gt
stringclasses 1
value | context
stringlengths 2.05k
161k
|
---|---|
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.server.integrationtests.jbpm;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.drools.core.process.instance.impl.WorkItemImpl;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.kie.api.KieServices;
import org.kie.internal.executor.api.STATUS;
import org.kie.server.api.model.ReleaseId;
import org.kie.server.api.model.instance.JobRequestInstance;
import org.kie.server.api.model.instance.RequestInfoInstance;
import org.kie.server.api.exception.KieServicesException;
import org.kie.server.integrationtests.category.Smoke;
import static org.junit.Assert.*;
import static org.hamcrest.core.AnyOf.*;
import static org.hamcrest.core.IsEqual.*;
import org.kie.server.integrationtests.shared.KieServerDeployer;
import org.kie.server.integrationtests.shared.KieServerReflections;
import org.kie.server.integrationtests.shared.KieServerSynchronization;
public class JobServiceIntegrationTest extends JbpmKieServerBaseIntegrationTest {
private static ReleaseId releaseId = new ReleaseId("org.kie.server.testing", "definition-project",
"1.0.0.Final");
protected static final String BUSINESS_KEY = "test key";
protected static final String PRINT_OUT_COMMAND = "org.jbpm.executor.commands.PrintOutCommand";
protected static final String LOG_CLEANUP_COMMAND = "org.jbpm.executor.commands.LogCleanupCommand";
protected static final String CUSTOM_COMMAND = "org.jbpm.data.CustomCommand";
@BeforeClass
public static void buildAndDeployArtifacts() {
KieServerDeployer.buildAndDeployCommonMavenParent();
KieServerDeployer.buildAndDeployMavenProjectFromResource("/kjars-sources/definition-project");
kieContainer = KieServices.Factory.get().newKieContainer(releaseId);
createContainer(CONTAINER_ID, releaseId);
}
@Before
public void finishAllJobs() throws Exception {
List<String> status = new ArrayList<String>();
status.add(STATUS.QUEUED.toString());
status.add(STATUS.RUNNING.toString());
status.add(STATUS.RETRYING.toString());
List<RequestInfoInstance> requests = jobServicesClient.getRequestsByStatus(status, 0, 100);
for (RequestInfoInstance instance : requests) {
jobServicesClient.cancelRequest(instance.getId());
KieServerSynchronization.waitForJobToFinish(jobServicesClient, instance.getId());
}
}
@Override
protected void addExtraCustomClasses(Map<String, Class<?>> extraClasses) throws Exception {
extraClasses.put(PERSON_CLASS_NAME, Class.forName(PERSON_CLASS_NAME, true, kieContainer.getClassLoader()));
}
@Test
public void testScheduleViewAndCancelJob() {
Calendar tomorrow = Calendar.getInstance();
tomorrow.add(Calendar.DATE, 1);
JobRequestInstance jobRequestInstance = createJobRequestInstance();
jobRequestInstance.setScheduledDate(tomorrow.getTime());
Long jobId = jobServicesClient.scheduleRequest(jobRequestInstance);
assertNotNull(jobId);
assertTrue( jobId.longValue() > 0);
RequestInfoInstance jobRequest = jobServicesClient.getRequestById(jobId, false, false);
RequestInfoInstance expected = createExpectedRequestInfoInstance(jobId, STATUS.QUEUED);
assertRequestInfoInstance(expected, jobRequest);
assertNotNull(jobRequest.getScheduledDate());
jobServicesClient.cancelRequest(jobId);
jobRequest = jobServicesClient.getRequestById(jobId, false, false);
expected.setStatus(STATUS.CANCELLED.toString());
assertRequestInfoInstance(expected, jobRequest);
}
@Test
@Category(Smoke.class)
public void testScheduleAndRunJob() throws Exception {
JobRequestInstance jobRequestInstance = createJobRequestInstance();
Long jobId = jobServicesClient.scheduleRequest(jobRequestInstance);
assertNotNull(jobId);
assertTrue( jobId.longValue() > 0);
RequestInfoInstance jobRequest = jobServicesClient.getRequestById(jobId, false, false);
assertNotNull(jobRequest);
assertEquals(jobId, jobRequest.getId());
assertEquals(BUSINESS_KEY, jobRequest.getBusinessKey());
assertThat(jobRequest.getStatus(),anyOf(
equalTo(STATUS.QUEUED.toString()),
equalTo(STATUS.RUNNING.toString()),
equalTo(STATUS.DONE.toString())));
assertEquals(PRINT_OUT_COMMAND, jobRequest.getCommandName());
KieServerSynchronization.waitForJobToFinish(jobServicesClient, jobId);
jobRequest = jobServicesClient.getRequestById(jobId, false, false);
assertNotNull(jobRequest);
assertEquals(jobId, jobRequest.getId());
assertEquals(BUSINESS_KEY, jobRequest.getBusinessKey());
assertEquals(STATUS.DONE.toString(), jobRequest.getStatus());
assertEquals(PRINT_OUT_COMMAND, jobRequest.getCommandName());
}
@Test
public void testScheduleAndRunJobWithCustomTypeFromContainer() throws Exception {
int currentNumberOfDone = jobServicesClient.getRequestsByContainer(CONTAINER_ID, Collections.singletonList(STATUS.DONE.toString()), 0, 100).size();
Class<?> personClass = Class.forName(PERSON_CLASS_NAME, true, kieContainer.getClassLoader());
Map<String, Object> data = new HashMap<String, Object>();
data.put("businessKey", BUSINESS_KEY);
data.put("person", createPersonInstance(USER_JOHN));
JobRequestInstance jobRequestInstance = new JobRequestInstance();
jobRequestInstance.setCommand(PRINT_OUT_COMMAND);
jobRequestInstance.setData(data);
Long jobId = jobServicesClient.scheduleRequest(CONTAINER_ID, jobRequestInstance);
assertNotNull(jobId);
assertTrue( jobId.longValue() > 0);
RequestInfoInstance jobRequest = jobServicesClient.getRequestById(jobId, false, false);
assertNotNull(jobRequest);
assertEquals(jobId, jobRequest.getId());
assertEquals(BUSINESS_KEY, jobRequest.getBusinessKey());
assertThat(jobRequest.getStatus(),anyOf(
equalTo(STATUS.QUEUED.toString()),
equalTo(STATUS.RUNNING.toString()),
equalTo(STATUS.DONE.toString())));
assertEquals(PRINT_OUT_COMMAND, jobRequest.getCommandName());
KieServerSynchronization.waitForJobToFinish(jobServicesClient, jobId);
jobRequest = jobServicesClient.getRequestById(jobId, false, true);
assertNotNull(jobRequest);
assertEquals(jobId, jobRequest.getId());
assertEquals(BUSINESS_KEY, jobRequest.getBusinessKey());
assertEquals(STATUS.DONE.toString(), jobRequest.getStatus());
assertEquals(PRINT_OUT_COMMAND, jobRequest.getCommandName());
Map<String, Object> requestData = jobRequest.getData();
assertNotNull(requestData);
assertEquals(3, requestData.size());
assertTrue(requestData.containsKey("person"));
assertTrue(requestData.containsKey("businessKey"));
assertTrue(requestData.containsKey("deploymentId"));
assertTrue(personClass.isAssignableFrom(requestData.get("person").getClass()));
assertTrue(String.class.isAssignableFrom(requestData.get("businessKey").getClass()));
assertTrue(String.class.isAssignableFrom(requestData.get("deploymentId").getClass()));
assertEquals(USER_JOHN, KieServerReflections.valueOf(requestData.get("person"), "name"));
assertEquals(CONTAINER_ID, requestData.get("deploymentId"));
assertEquals(BUSINESS_KEY, requestData.get("businessKey"));
Map<String, Object> responseData = jobRequest.getResponseData();
assertNotNull(responseData);
assertEquals(0, responseData.size());
List<RequestInfoInstance> result = jobServicesClient.getRequestsByContainer(CONTAINER_ID, Arrays.asList(STATUS.QUEUED.name()), 0, 100);
assertNotNull(result);
assertEquals(0, result.size());
result = jobServicesClient.getRequestsByContainer(CONTAINER_ID, Arrays.asList(STATUS.DONE.name()), 0, 100);
assertNotNull(result);
assertEquals(1 + currentNumberOfDone, result.size());
}
@Test
public void testScheduleSearchByStatusAndCancelJob() {
int currentNumberOfCancelled = jobServicesClient.getRequestsByStatus(Collections.singletonList(STATUS.CANCELLED.toString()), 0, 100).size();
Calendar tomorrow = Calendar.getInstance();
tomorrow.add(Calendar.DATE, 1);
JobRequestInstance jobRequestInstance = createJobRequestInstance();
jobRequestInstance.setScheduledDate(tomorrow.getTime());
Long jobId = jobServicesClient.scheduleRequest(jobRequestInstance);
assertNotNull(jobId);
assertTrue( jobId.longValue() > 0);
List<String> status = new ArrayList<String>();
status.add(STATUS.QUEUED.toString());
List<RequestInfoInstance> result = jobServicesClient.getRequestsByStatus(status, 0, 100);
assertNotNull(result);
assertEquals(1, result.size());
RequestInfoInstance jobRequest = result.get(0);
RequestInfoInstance expected = createExpectedRequestInfoInstance(jobId, STATUS.QUEUED);
assertRequestInfoInstance(expected, jobRequest);
assertNotNull(jobRequest.getScheduledDate());
jobServicesClient.cancelRequest(jobId);
result = jobServicesClient.getRequestsByStatus(status, 0, 100);
assertNotNull(result);
assertEquals(0, result.size());
// clear status to search only for canceled
status.clear();
status.add(STATUS.CANCELLED.toString());
result = jobServicesClient.getRequestsByStatus(status, 0, 100);
assertNotNull(result);
assertEquals(1 + currentNumberOfCancelled, result.size());
}
@Test
public void testScheduleAndRequeueJob() throws Exception {
String command = "org.jbpm.executor.commands.LogCleanupCommand";
Map<String, Object> data = new HashMap<String, Object>();
data.put("businessKey", BUSINESS_KEY);
data.put("DateFormat", "wrong-value");
data.put("SkipProcessLog", "true");
data.put("SkipTaskLog", "true");
data.put("SkipExecutorLog", "true");
data.put("SingleRun", "true");
data.put("retries", 0);
JobRequestInstance jobRequestInstance = new JobRequestInstance();
jobRequestInstance.setCommand(command);
jobRequestInstance.setData(data);
Long jobId = jobServicesClient.scheduleRequest(jobRequestInstance);
assertNotNull(jobId);
assertTrue( jobId.longValue() > 0);
RequestInfoInstance jobRequest = jobServicesClient.getRequestById(jobId, false, false);
assertNotNull(jobRequest);
assertEquals(jobId, jobRequest.getId());
assertEquals(BUSINESS_KEY, jobRequest.getBusinessKey());
assertThat(jobRequest.getStatus(),anyOf(
equalTo(STATUS.QUEUED.toString()),
equalTo(STATUS.RUNNING.toString()),
equalTo(STATUS.ERROR.toString())));
assertEquals(command, jobRequest.getCommandName());
KieServerSynchronization.waitForJobToFinish(jobServicesClient, jobId);
RequestInfoInstance expected = createExpectedRequestInfoInstance(jobId, STATUS.ERROR);
expected.setCommandName(command);
jobRequest = jobServicesClient.getRequestById(jobId, false, false);
assertRequestInfoInstance(expected, jobRequest);
data.put("DateFormat", "yyyy-MM-dd");
jobServicesClient.updateRequestData(jobId, null, data);
jobServicesClient.requeueRequest(jobId);
KieServerSynchronization.waitForJobToFinish(jobServicesClient, jobId);
jobRequest = jobServicesClient.getRequestById(jobId, false, false);
assertNotNull(jobRequest);
assertEquals(expected.getId(), jobRequest.getId());
assertEquals(expected.getBusinessKey(), jobRequest.getBusinessKey());
assertEquals(STATUS.DONE.toString(), jobRequest.getStatus());
assertEquals(expected.getCommandName(), jobRequest.getCommandName());
}
@Test
public void testScheduleSearchByKeyJob() throws Exception {
int currentNumberOfRequests = jobServicesClient.getRequestsByBusinessKey(BUSINESS_KEY, 0, 100).size();
Calendar tomorrow = Calendar.getInstance();
tomorrow.add(Calendar.DATE, 1);
JobRequestInstance jobRequestInstance = createJobRequestInstance();
jobRequestInstance.setScheduledDate(tomorrow.getTime());
Long jobId = jobServicesClient.scheduleRequest(jobRequestInstance);
assertNotNull(jobId);
assertTrue(jobId.longValue() > 0);
List<RequestInfoInstance> result = jobServicesClient.getRequestsByBusinessKey(BUSINESS_KEY, 0, 100);
assertNotNull(result);
assertEquals(1 + currentNumberOfRequests, result.size());
List<RequestInfoInstance> queuedJobs = result.stream().
filter(n -> n.getStatus().equals(STATUS.QUEUED.name())).collect(Collectors.toList());
assertNotNull(queuedJobs);
assertEquals(1, queuedJobs.size());
RequestInfoInstance expected = createExpectedRequestInfoInstance(jobId, STATUS.QUEUED);
RequestInfoInstance queuedJob = queuedJobs.get(0);
assertRequestInfoInstance(expected, queuedJob);
result = jobServicesClient.getRequestsByBusinessKey(BUSINESS_KEY, Arrays.asList(STATUS.QUEUED.name()), 0, 100);
assertNotNull(result);
assertEquals(1, result.size());
assertRequestInfoInstance(expected, result.get(0));
jobServicesClient.cancelRequest(jobId);
}
@Test
public void testScheduleSearchByCommandCancelJob() throws Exception {
String firstCommand = PRINT_OUT_COMMAND;
String secondCommand = LOG_CLEANUP_COMMAND;
int originalNumberOfSecondCommands = jobServicesClient.getRequestsByCommand(secondCommand, 0, 100).size();
Map<String, Object> data = new HashMap<String, Object>();
data.put("businessKey", BUSINESS_KEY);
Calendar tomorrow = Calendar.getInstance();
tomorrow.add(Calendar.DATE, 1);
JobRequestInstance jobRequestInstance = new JobRequestInstance();
jobRequestInstance.setCommand(firstCommand);
jobRequestInstance.setData(data);
jobRequestInstance.setScheduledDate(tomorrow.getTime());
// Executing fist command.
Long jobId = jobServicesClient.scheduleRequest(jobRequestInstance);
assertNotNull(jobId);
assertTrue( jobId.longValue() > 0);
// Number of commands should be same as we are checking second command.
int numberOfSecondCommands = jobServicesClient.getRequestsByCommand(secondCommand, 0, 100).size();
assertEquals(originalNumberOfSecondCommands, numberOfSecondCommands);
jobServicesClient.cancelRequest(jobId);
jobRequestInstance = new JobRequestInstance();
jobRequestInstance.setCommand(secondCommand);
jobRequestInstance.setData(data);
jobRequestInstance.setScheduledDate(tomorrow.getTime());
// Executing second command.
jobId = jobServicesClient.scheduleRequest(jobRequestInstance);
assertNotNull(jobId);
assertTrue( jobId.longValue() > 0);
// Number of commands should raise.
numberOfSecondCommands = jobServicesClient.getRequestsByCommand(secondCommand, 0, 100).size();
assertEquals(1 + originalNumberOfSecondCommands, numberOfSecondCommands);
numberOfSecondCommands = jobServicesClient.getRequestsByCommand(secondCommand, Arrays.asList(STATUS.QUEUED.name()), 0, 100).size();
assertEquals(1, numberOfSecondCommands);
jobServicesClient.cancelRequest(jobId);
}
@Test
public void testScheduleViewUpdateDataAndCancelJob() {
Calendar tomorrow = Calendar.getInstance();
tomorrow.add(Calendar.DATE, 1);
Map<String, Object> data = new HashMap<String, Object>();
data.put("businessKey", BUSINESS_KEY);
data.put("customValue", "just a simple value");
data.put("processInstanceId", 1234);
JobRequestInstance jobRequestInstance = createJobRequestInstance();
jobRequestInstance.setScheduledDate(tomorrow.getTime());
jobRequestInstance.setData(data);
Long jobId = jobServicesClient.scheduleRequest(jobRequestInstance);
assertNotNull(jobId);
assertTrue( jobId.longValue() > 0);
RequestInfoInstance jobRequest = jobServicesClient.getRequestById(jobId, false, true);
RequestInfoInstance expected = createExpectedRequestInfoInstance(jobId, STATUS.QUEUED);
assertRequestInfoInstance(expected, jobRequest);
assertNotNull(jobRequest.getScheduledDate());
Map<String, Object> jobsData = jobRequest.getData();
assertNotNull(jobsData);
assertEquals("just a simple value", jobsData.get("customValue"));
assertEquals(1234, jobsData.get("processInstanceId"));
Map<String, Object> updates = new HashMap<>();
updates.put("customValue", "updated string");
jobServicesClient.updateRequestData(jobId, null, updates);
jobRequest = jobServicesClient.getRequestById(jobId, false, true);
jobsData = jobRequest.getData();
assertNotNull(jobsData);
assertEquals("updated string", jobsData.get("customValue"));
assertEquals(1234, jobsData.get("processInstanceId"));
List<RequestInfoInstance> processRequests = jobServicesClient.getRequestsByProcessInstance(1234L, Arrays.asList(STATUS.QUEUED.name()), 0, 100);
assertNotNull(processRequests);
assertEquals(1, processRequests.size());
jobServicesClient.cancelRequest(jobId);
jobRequest = jobServicesClient.getRequestById(jobId, false, false);
expected.setStatus(STATUS.CANCELLED.toString());
assertRequestInfoInstance(expected, jobRequest);
}
private void assertRequestInfoInstance(RequestInfoInstance expected, RequestInfoInstance actual) {
assertNotNull(actual);
assertEquals(expected.getId(), actual.getId());
assertEquals(expected.getBusinessKey(), actual.getBusinessKey());
assertEquals(expected.getStatus(), actual.getStatus());
assertEquals(expected.getCommandName(), actual.getCommandName());
}
private RequestInfoInstance createExpectedRequestInfoInstance(Long jobId, STATUS expected) {
return RequestInfoInstance.builder()
.id(jobId)
.businessKey(BUSINESS_KEY)
.status(expected.toString())
.command(PRINT_OUT_COMMAND)
.build();
}
private JobRequestInstance createJobRequestInstance() {
Map<String, Object> data = new HashMap<>();
data.put("businessKey", BUSINESS_KEY);
JobRequestInstance jobRequestInstance = new JobRequestInstance();
jobRequestInstance.setCommand(PRINT_OUT_COMMAND);
jobRequestInstance.setData(data);
return jobRequestInstance;
}
@Test
public void testExecutorServiceDisabling() throws Exception {
String command = "invalidCommand";
Map<String, Object> data = new HashMap<String, Object>();
data.put("businessKey", BUSINESS_KEY);
Calendar tomorrow = Calendar.getInstance();
tomorrow.add(Calendar.DATE, 1);
JobRequestInstance jobRequestInstance = new JobRequestInstance();
jobRequestInstance.setCommand(command);
jobRequestInstance.setData(data);
jobRequestInstance.setScheduledDate(tomorrow.getTime());
// Executing fist command.
try {
jobServicesClient.scheduleRequest(jobRequestInstance);
} catch (Exception e){
assertTrue(e instanceof KieServicesException);
assertTrue(e.getMessage().contains("Invalid command type"));
}
}
@Test
public void testScheduleAndRunJobWithoutData() throws Exception {
JobRequestInstance jobRequestInstance = new JobRequestInstance();
jobRequestInstance.setCommand(PRINT_OUT_COMMAND);
Long jobId = jobServicesClient.scheduleRequest(jobRequestInstance);
assertNotNull(jobId);
assertTrue( jobId.longValue() > 0);
RequestInfoInstance jobRequest = jobServicesClient.getRequestById(jobId, false, false);
assertNotNull(jobRequest);
assertEquals(jobId, jobRequest.getId());
assertThat(jobRequest.getStatus(),anyOf(
equalTo(STATUS.QUEUED.toString()),
equalTo(STATUS.RUNNING.toString()),
equalTo(STATUS.DONE.toString())));
assertEquals(PRINT_OUT_COMMAND, jobRequest.getCommandName());
KieServerSynchronization.waitForJobToFinish(jobServicesClient, jobId);
jobRequest = jobServicesClient.getRequestById(jobId, false, false);
assertNotNull(jobRequest);
assertEquals(jobId, jobRequest.getId());
assertEquals(STATUS.DONE.toString(), jobRequest.getStatus());
assertEquals(PRINT_OUT_COMMAND, jobRequest.getCommandName());
}
@Test
public void testScheduleAndRunJobWithWorkItem() throws Exception {
JobRequestInstance jobRequestInstance = createJobRequestInstance();
final WorkItemImpl workItem = new WorkItemImpl();
workItem.setId(1);
workItem.setName("testWorkItemName");
workItem.setDeploymentId("test-1.0.0");
workItem.setState(1);
jobRequestInstance.getData().put("workItem",
workItem);
Long jobId = jobServicesClient.scheduleRequest(jobRequestInstance);
assertNotNull(jobId);
assertTrue(jobId.longValue() > 0);
KieServerSynchronization.waitForJobToFinish(jobServicesClient,
jobId);
final RequestInfoInstance jobRequest = jobServicesClient.getRequestById(jobId,
true,
true);
assertNotNull(jobRequest);
assertEquals(jobId,
jobRequest.getId());
assertEquals(STATUS.DONE.toString(),
jobRequest.getStatus());
assertEquals(PRINT_OUT_COMMAND,
jobRequest.getCommandName());
assertNotNull(jobRequest.getData().get("workItem"));
}
@Test
public void testGetNonExistentJob() {
final long jobId = -1L;
assertClientException(() -> jobServicesClient.getRequestById(jobId,
false,
false),
404,
"Request with id: " + jobId + " doesn't exist");
}
@Test
public void testScheduleAndRunJobWithCustomCommandFromContainer() throws Exception {
int currentNumberOfDone = jobServicesClient.getRequestsByContainer(CONTAINER_ID, Collections.singletonList(STATUS.DONE.toString()), 0, 100).size();
Class<?> personClass = Class.forName(PERSON_CLASS_NAME, true, kieContainer.getClassLoader());
Map<String, Object> data = new HashMap<String, Object>();
data.put("businessKey", BUSINESS_KEY);
data.put("person", createPersonInstance(USER_JOHN));
JobRequestInstance jobRequestInstance = new JobRequestInstance();
jobRequestInstance.setCommand(CUSTOM_COMMAND);
jobRequestInstance.setData(data);
Long jobId = jobServicesClient.scheduleRequest(CONTAINER_ID, jobRequestInstance);
assertNotNull(jobId);
assertTrue( jobId.longValue() > 0);
RequestInfoInstance jobRequest = jobServicesClient.getRequestById(jobId, false, false);
assertNotNull(jobRequest);
assertEquals(jobId, jobRequest.getId());
assertEquals(BUSINESS_KEY, jobRequest.getBusinessKey());
assertThat(jobRequest.getStatus(),anyOf(
equalTo(STATUS.QUEUED.toString()),
equalTo(STATUS.RUNNING.toString()),
equalTo(STATUS.DONE.toString())));
assertEquals(CUSTOM_COMMAND, jobRequest.getCommandName());
KieServerSynchronization.waitForJobToFinish(jobServicesClient, jobId);
jobRequest = jobServicesClient.getRequestById(jobId, false, true);
assertNotNull(jobRequest);
assertEquals(jobId, jobRequest.getId());
assertEquals(BUSINESS_KEY, jobRequest.getBusinessKey());
assertEquals(STATUS.DONE.toString(), jobRequest.getStatus());
assertEquals(CUSTOM_COMMAND, jobRequest.getCommandName());
Map<String, Object> responseData = jobRequest.getResponseData();
assertNotNull(responseData);
assertEquals(1, responseData.size());
assertTrue(responseData.containsKey("output"));
assertTrue(personClass.isAssignableFrom(responseData.get("output").getClass()));
assertEquals(USER_JOHN, KieServerReflections.valueOf(responseData.get("output"), "name"));
List<RequestInfoInstance> result = jobServicesClient.getRequestsByContainer(CONTAINER_ID, Arrays.asList(STATUS.QUEUED.name()), 0, 100);
assertNotNull(result);
assertEquals(0, result.size());
result = jobServicesClient.getRequestsByContainer(CONTAINER_ID, Arrays.asList(STATUS.DONE.name()), 0, 100);
assertNotNull(result);
assertEquals(1 + currentNumberOfDone, result.size());
}
}
|
|
/*-
* Copyright (c) 2002, 2020 Oracle and/or its affiliates. All rights reserved.
*
* See the file LICENSE for license information.
*
*/
package com.sleepycat.persist.test;
import static com.sleepycat.persist.model.Relationship.MANY_TO_ONE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.sleepycat.db.DatabaseException;
import com.sleepycat.db.Environment;
import com.sleepycat.db.EnvironmentConfig;
import com.sleepycat.db.Transaction;
import com.sleepycat.db.util.DualTestCase;
import com.sleepycat.persist.EntityCursor;
import com.sleepycat.persist.EntityStore;
import com.sleepycat.persist.PrimaryIndex;
import com.sleepycat.persist.SecondaryIndex;
import com.sleepycat.persist.StoreConfig;
import com.sleepycat.persist.model.AnnotationModel;
import com.sleepycat.persist.model.Entity;
import com.sleepycat.persist.model.EntityModel;
import com.sleepycat.persist.model.Persistent;
import com.sleepycat.persist.model.PrimaryKey;
import com.sleepycat.persist.model.SecondaryKey;
import com.sleepycat.util.test.SharedTestUtils;
import com.sleepycat.util.test.TestEnv;
public class SubclassIndexTest extends DualTestCase {
private File envHome;
private Environment env;
private EntityStore store;
@Before
public void setUp()
throws Exception {
envHome = SharedTestUtils.getTestDir();
super.setUp();
}
@After
public void tearDown()
throws Exception {
super.tearDown();
envHome = null;
env = null;
}
private void open()
throws DatabaseException {
EnvironmentConfig envConfig = TestEnv.TXN.getConfig();
envConfig.setAllowCreate(true);
env = create(envHome, envConfig);
EntityModel model = new AnnotationModel();
model.registerClass(Manager.class);
model.registerClass(SalariedManager.class);
StoreConfig storeConfig = new StoreConfig();
storeConfig.setModel(model);
storeConfig.setAllowCreate(true);
storeConfig.setTransactional(true);
store = new EntityStore(env, "foo", storeConfig);
}
private void close()
throws DatabaseException {
store.close();
store = null;
close(env);
env = null;
}
@Test
public void testSubclassIndex()
throws DatabaseException {
open();
PrimaryIndex<String, Employee> employeesById =
store.getPrimaryIndex(String.class, Employee.class);
employeesById.put(new Employee("1"));
employeesById.put(new Manager("2", "a"));
employeesById.put(new Manager("3", "a"));
employeesById.put(new Manager("4", "b"));
Employee e;
Manager m;
e = employeesById.get("1");
assertNotNull(e);
assertTrue(!(e instanceof Manager));
/* Ensure DB exists BEFORE calling getSubclassIndex. [#15247] */
PersistTestUtils.assertDbExists
(true, env, "foo", Employee.class.getName(), "dept");
/* Normal use: Subclass index for a key in the subclass. */
SecondaryIndex<String, String, Manager> managersByDept =
store.getSubclassIndex
(employeesById, Manager.class, String.class, "dept");
m = managersByDept.get("a");
assertNotNull(m);
assertEquals("2", m.id);
m = managersByDept.get("b");
assertNotNull(m);
assertEquals("4", m.id);
Transaction txn = env.beginTransaction(null, null);
EntityCursor<Manager> managers = managersByDept.entities(txn, null);
try {
m = managers.next();
assertNotNull(m);
assertEquals("2", m.id);
m = managers.next();
assertNotNull(m);
assertEquals("3", m.id);
m = managers.next();
assertNotNull(m);
assertEquals("4", m.id);
m = managers.next();
assertNull(m);
} finally {
managers.close();
txn.commit();
}
/* Getting a subclass index for the entity class is also allowed. */
store.getSubclassIndex
(employeesById, Employee.class, String.class, "other");
/* Getting a subclass index for a base class key is not allowed. */
try {
store.getSubclassIndex
(employeesById, Manager.class, String.class, "other");
fail();
} catch (IllegalArgumentException expected) {
}
close();
}
/**
* Previously this tested that a secondary key database was added only
* AFTER storing the first instance of the subclass that defines the key.
* Now that we require registering the subclass up front, the database is
* created up front also. So this test is somewhat less useful, but still
* nice to have around. [#16399]
*/
@Test
public void testAddSecKey()
throws DatabaseException {
open();
PrimaryIndex<String, Employee> employeesById =
store.getPrimaryIndex(String.class, Employee.class);
employeesById.put(new Employee("1"));
assertTrue(hasEntityKey("dept"));
close();
open();
employeesById = store.getPrimaryIndex(String.class, Employee.class);
assertTrue(hasEntityKey("dept"));
employeesById.put(new Manager("2", "a"));
assertTrue(hasEntityKey("dept"));
close();
open();
assertTrue(hasEntityKey("dept"));
close();
open();
employeesById = store.getPrimaryIndex(String.class, Employee.class);
assertTrue(hasEntityKey("salary"));
employeesById.put(new SalariedManager("3", "a", "111"));
assertTrue(hasEntityKey("salary"));
close();
open();
assertTrue(hasEntityKey("dept"));
assertTrue(hasEntityKey("salary"));
close();
}
private boolean hasEntityKey(String keyName) {
return store.getModel().
getRawType(Employee.class.getName()).
getEntityMetadata().
getSecondaryKeys().
keySet().
contains(keyName);
}
@Entity
private static class Employee {
@PrimaryKey
String id;
@SecondaryKey(relate=MANY_TO_ONE)
String other;
Employee(String id) {
this.id = id;
}
private Employee() {}
}
@Persistent
private static class Manager extends Employee {
@SecondaryKey(relate=MANY_TO_ONE)
String dept;
Manager(String id, String dept) {
super(id);
this.dept = dept;
}
private Manager() {}
}
@Persistent
private static class SalariedManager extends Manager {
@SecondaryKey(relate=MANY_TO_ONE)
String salary;
SalariedManager(String id, String dept, String salary) {
super(id, dept);
this.salary = salary;
}
private SalariedManager() {}
}
}
|
|
/*
* Copyright (c) 2008, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores
* CA 94065 USA or visit www.oracle.com if you need additional information or
* have any questions.
*/
package com.sun.lwuit.table;
import com.sun.lwuit.Component;
import com.sun.lwuit.Container;
import com.sun.lwuit.Display;
import com.sun.lwuit.Form;
import com.sun.lwuit.Graphics;
import com.sun.lwuit.Label;
import com.sun.lwuit.TextArea;
import com.sun.lwuit.TextField;
import com.sun.lwuit.events.ActionEvent;
import com.sun.lwuit.events.ActionListener;
import com.sun.lwuit.events.DataChangedListener;
import com.sun.lwuit.geom.Rectangle;
import com.sun.lwuit.plaf.Border;
import com.sun.lwuit.plaf.Style;
/**
* The table class represents a grid of data that can be used for rendering a grid
* of components/labels. The table reflects and updates the underlying model data.
*
* @author Shai Almog
*/
public class Table extends Container {
/**
* Constant denoting that inner borders should not be drawn at all
*/
public static final int INNER_BORDERS_NONE = 0;
/**
* Constant denoting that only inner borders rows should be drawn
*/
public static final int INNER_BORDERS_ROWS = 1;
/**
* Constant denoting that only inner borders columns should be drawn
*/
public static final int INNER_BORDERS_COLS = 2;
/**
* Constant denoting that inner borders should be drawn fully
*/
public static final int INNER_BORDERS_ALL = 3;
private TableModel model;
private Listener listener = new Listener();
private boolean drawBorder = true;
private boolean collapseBorder = true;
private boolean drawEmptyCellsBorder = true;
private int horizontalBorderSpacing = 0;
private int verticalBorderSpacing = 0;
private boolean includeHeader = true;
private int innerBorder = INNER_BORDERS_ALL;
/**
* Indicates the alignment of the title see label alignment for details
*
* @see com.sun.lwuit.Label#setAlignment(int)
*/
private int titleAlignment = Label.CENTER;
/**
* Indicates the alignment of the cells see label alignment for details
*
* @see com.sun.lwuit.Label#setAlignment(int)
*/
private int cellAlignment = Label.LEFT;
/**
* This flag allows us to workaround issue 275 without incuring too many updateModel calls
*/
private boolean potentiallyDirtyModel;
/**
* Constructor for usage by GUI builder and automated tools, normally one
* should use the version that accepts the model
*/
public Table() {
this(new DefaultTableModel(new String[]{"Col1", "Col2"}, new String[][]{
{"1", "2"},
{"3", "4"}}));
}
/**
* Create a table with a new model
*
* @param model the model underlying this table
*/
public Table(TableModel model) {
this.model = model;
updateModel();
setUIID("Table");
}
/**
* Create a table with a new model
*
* @param model the model underlying this table
* @param includeHeader Indicates whether the table should render a table header as the first row
*/
public Table(TableModel model, boolean includeHeader) {
setUIID("Table");
this.includeHeader = includeHeader;
this.model = model;
updateModel();
}
/**
* Returns the selected row in the table
*
* @return the offset of the selected row in the table if a selection exists
*/
public int getSelectedRow() {
Form f = getComponentForm();
if(f != null) {
Component c = f.getFocused();
if(c != null) {
return getCellRow(c);
}
}
return -1;
}
/**
* Returns the selected column in the table
*
* @return the offset of the selected column in the table if a selection exists
*/
public int getSelectedColumn() {
Form f = getComponentForm();
if(f != null) {
Component c = f.getFocused();
if(c != null) {
return getCellColumn(c);
}
}
return -1;
}
private void updateModel() {
int selectionRow = -1, selectionColumn = -1;
Form f = getComponentForm();
if(f != null) {
Component c = f.getFocused();
if(c != null) {
selectionRow = getCellRow(c);
selectionColumn = getCellColumn(c);
}
}
removeAll();
int columnCount = model.getColumnCount();
// another row for the table header
if(includeHeader) {
setLayout(new TableLayout(model.getRowCount() + 1, columnCount));
for(int iter = 0 ; iter < columnCount ; iter++) {
String name = model.getColumnName(iter);
Component header = createCellImpl(name, -1, iter, false);
TableLayout.Constraint con = createCellConstraint(name, -1, iter);
addComponent(con, header);
}
} else {
setLayout(new TableLayout(model.getRowCount(), columnCount));
}
for(int r = 0 ; r < model.getRowCount() ; r++) {
for(int c = 0 ; c < columnCount ; c++) {
Object value = model.getValueAt(r, c);
// null should be returned for spanned over values
if(value != null) {
boolean e = model.isCellEditable(r, c);
Component cell = createCellImpl(value, r, c, e);
if(cell != null) {
TableLayout.Constraint con = createCellConstraint(value, r, c);
// returns the current row we iterate about
int currentRow = ((TableLayout)getLayout()).getNextRow();
if(currentRow > model.getRowCount()) {
return;
}
addComponent(con, cell);
if(r == selectionRow && c == selectionColumn) {
cell.requestFocus();
}
}
}
}
}
}
/**
* @inheritDoc
*/
protected void paintGlass(Graphics g) {
if ((drawBorder) && (innerBorder!=INNER_BORDERS_NONE)) {
int xPos = getAbsoluteX();
int yPos = getAbsoluteY();
g.translate(xPos, yPos);
int rows = model.getRowCount();
int cols = model.getColumnCount();
if(includeHeader) {
rows++;
}
g.setColor(getStyle().getFgColor());
TableLayout t = (TableLayout)getLayout();
int actualWidth = Math.max(getWidth(), getScrollDimension().getWidth());
int actualHeight = Math.max(getHeight(), getScrollDimension().getHeight());
if ((collapseBorder) || (innerBorder!=INNER_BORDERS_ALL) || // inner borders cols/rows are supported only in collapsed mode
(t.hasHorizontalSpanning()) || (t.hasVerticalSpanning())) { // TODO - We currently don't support separate borders for tables with spanned cells
if ((innerBorder==INNER_BORDERS_ALL) || (innerBorder==INNER_BORDERS_ROWS)) {
if(t.hasVerticalSpanning()) {
// iterate over the components and draw a line on the side of all
// the components other than the ones that are at the last column.
for(int cellRow = 0 ; cellRow < rows - 1; cellRow++) {
for(int cellColumn = 0 ; cellColumn < cols ; cellColumn++) {
// if this isn't the last row
if(cellRow + t.getCellVerticalSpan(cellRow, cellColumn) - 1 != rows - 1) {
// if this is a spanned through cell we don't want to draw a line here
if(t.isCellSpannedThroughHorizontally(cellRow, cellColumn)) {
continue;
}
int x = t.getColumnPosition(cellColumn);
int y = t.getRowPosition(cellRow);
int rowHeight = t.getRowPosition(cellRow + t.getCellVerticalSpan(cellRow, cellColumn)) - y;
int columnWidth;
if(cellColumn < getModel().getColumnCount() - 1) {
columnWidth = t.getColumnPosition(cellColumn + 1) - x;
} else {
columnWidth = getWidth() - y;
}
if ((innerBorder!=INNER_BORDERS_ROWS) || (shouldDrawInnerBorderAfterRow(cellRow))) {
g.drawLine(x, y + rowHeight, x + columnWidth, y + rowHeight);
}
}
}
}
} else {
// this is much faster since we don't need to check spanning
for(int row = 1 ; row < rows; row++) {
int y = t.getRowPosition(row);
if ((innerBorder!=INNER_BORDERS_ROWS) || (shouldDrawInnerBorderAfterRow(row-1))) {
g.drawLine(0, y, actualWidth, y);
}
//g.drawLine(0+2, y+2, actualWidth-2, y+2);
}
}
}
if ((innerBorder==INNER_BORDERS_ALL) || (innerBorder==INNER_BORDERS_COLS)) {
if(t.hasHorizontalSpanning()) {
// iterate over the components and draw a line on the side of all
// the components other than the ones that are at the last column.
for(int cellRow = 0 ; cellRow < rows ; cellRow++) {
for(int cellColumn = 0 ; cellColumn < cols - 1 ; cellColumn++) {
// if this isn't the last column
if(cellColumn + t.getCellHorizontalSpan(cellRow, cellColumn) - 1 != cols - 1) {
// if this is a spanned through cell we don't want to draw a line here
if(t.isCellSpannedThroughVertically(cellRow, cellColumn)) {
continue;
}
int x = t.getColumnPosition(cellColumn);
int y = t.getRowPosition(cellRow);
int rowHeight;
int columnWidth = t.getColumnPosition(cellColumn + t.getCellHorizontalSpan(cellRow, cellColumn)) - x;
if(cellRow < getModel().getRowCount() - 1) {
rowHeight = t.getRowPosition(cellRow + 1) - y;
} else {
rowHeight = getHeight() - y;
}
g.drawLine(x + columnWidth, y, x + columnWidth, y + rowHeight);
}
}
}
} else {
for(int col = 1 ; col < cols ; col++) {
int x = t.getColumnPosition(col);
g.drawLine(x, 0, x, actualHeight);
//g.drawLine(x+2, 0+2, x+2, actualHeight-2);
}
}
}
} else { // separate border
//if ((!t.hasHorizontalSpanning()) && (!t.hasVerticalSpanning())) {
for(int row = 0 ; row < rows; row++) {
int y = t.getRowPosition(row);
int h;
if (row+1<rows) {
h=t.getRowPosition(row+1)-y;
} else {
h=getY()+actualHeight-y-2;
}
for(int col = 0 ; col < cols ; col++) {
int x = t.getColumnPosition(col);
int w;
if (col+1<cols) {
w=t.getColumnPosition(col+1)-x;
} else {
w=getX()+actualWidth-x-2;
}
Component comp=t.getComponentAt(row, col);
if ((comp.isVisible()) &&
((drawEmptyCellsBorder) ||
((comp.getWidth()-comp.getStyle().getPadding(false, Component.RIGHT) - comp.getStyle().getPadding(false, Component.LEFT)>0) &&
(comp.getHeight()-comp.getStyle().getPadding(false, Component.TOP) - comp.getStyle().getPadding(false, Component.BOTTOM)>0)))) {
int rightMargin=comp.getStyle().getMargin(Component.RIGHT);
int bottomMargin=comp.getStyle().getMargin(Component.BOTTOM);
if (col==0) {
rightMargin*=2; // Since the first cell includes margins from both sides (left/right) so the next cell location is farther away - but we don't want to paint the border up to it
}
if (row==0) {
bottomMargin*=2;
}
g.drawRect(x+comp.getStyle().getMargin(Component.LEFT), y+comp.getStyle().getMargin(Component.TOP), w-2-rightMargin, h-2-bottomMargin);
}
}
}
}
g.translate(-xPos, -yPos);
}
}
private Component createCellImpl(Object value, final int row, final int column, boolean editable) {
Component c = createCell(value, row, column, editable);
c.putClientProperty("row", new Integer(row));
c.putClientProperty("column", new Integer(column));
// we do this here to allow subclasses to return a text area or its subclass
if(c instanceof TextArea) {
((TextArea)c).addActionListener(listener);
}
Style s = c.getSelectedStyle();
//s.setMargin(0, 0, 0, 0);
s.setMargin(verticalBorderSpacing, verticalBorderSpacing, horizontalBorderSpacing, horizontalBorderSpacing);
if ((drawBorder) && (innerBorder!=INNER_BORDERS_NONE)) {
s.setBorder(null);
s = c.getUnselectedStyle();
s.setBorder(null);
} else {
s = c.getUnselectedStyle();
}
s.setBgTransparency(0);
//s.setMargin(0, 0, 0, 0);
s.setMargin(verticalBorderSpacing, verticalBorderSpacing, horizontalBorderSpacing, horizontalBorderSpacing);
return c;
}
/**
* Creates a cell based on the given value
*
* @param value the new value object
* @param row row number, -1 for the header rows
* @param column column number
* @param editable true if the cell is editable
* @return cell component instance
*/
protected Component createCell(Object value, int row, int column, boolean editable) {
if(row == -1) {
Label header = new Label((String)value);
header.setUIID("TableHeader");
header.setAlignment(titleAlignment);
header.setFocusable(true);
return header;
}
if(editable) {
TextField cell = new TextField("" + value, -1);
cell.setLeftAndRightEditingTrigger(false);
cell.setUIID("TableCell");
return cell;
}
Label cell = new Label("" + value);
cell.setUIID("TableCell");
cell.setAlignment(cellAlignment);
cell.setFocusable(true);
return cell;
}
/**
* @inheritDoc
*/
public void initComponent() {
// this can happen if deinitialize is invoked due to a menu command which modifies
// the content of the table while the listener wasn't bound
if(potentiallyDirtyModel) {
updateModel();
potentiallyDirtyModel = false;
}
model.addDataChangeListener(listener);
}
/**
* @inheritDoc
*/
public void deinitialize() {
// we unbind the listener to prevent a memory leak for the use case of keeping
// the model while discarding the component
// Prevent the model listener from being removed when the VKB is shown
if(!Display.getInstance().isVirtualKeyboardShowing()) {
potentiallyDirtyModel = true;
model.removeDataChangeListener(listener);
} else {
potentiallyDirtyModel = false;
}
}
/**
* Replaces the underlying model
*
* @param model the new model
*/
public void setModel(TableModel model) {
this.model = model;
updateModel();
revalidate();
}
/**
* Returns the model instance
*
* @return the model instance
*/
public TableModel getModel() {
return model;
}
/**
* Indicates whether the table border should be drawn
*
* @return the drawBorder
*/
public boolean isDrawBorder() {
return drawBorder;
}
/**
* Indicates whether the table border should be drawn
*
* @param drawBorder the drawBorder to set
*/
public void setDrawBorder(boolean drawBorder) {
this.drawBorder = drawBorder;
repaint();
}
/**
* Sets how to draw the inner border (All of it, only rows/columns, none, groups)
* Note that setting to any mode other than NONE/ALL will result in the border drawing as collapsed whether this is a collpased border or not
*
* @param innerBorder one of the INNER_BORDER_* constants
*/
public void setInnerBorderMode(int innerBorder) {
if ((innerBorder<INNER_BORDERS_NONE) || (innerBorder>INNER_BORDERS_ALL)) {
throw new IllegalArgumentException("Inner border mode must be one of the INNER_BORDER_* constants");
}
this.innerBorder=innerBorder;
repaint();
}
/**
* Returns the current inner border mode
*
* @return the current inner border mode (one of the INNER_BORDER_* constants)
*/
public int getInnerBorderMode() {
return innerBorder;
}
/**
* Returns whether an inner border should be drawn after the specified row.
* This allows customization in subclasses to create for example the effects of segments in atable, i.e. instead of a line after each row - lines after "chunks" of rows.
* Note that this is queried only when the inner border mode is set to INNER_BORDER_ROWS
*
* @param row The row in question
* @return true to draw inner border, false otherwise
*/
protected boolean shouldDrawInnerBorderAfterRow(int row) {
return true;
}
/**
* Indicates whether the borders of the cells should collapse to form a one line border
*
* @param collapseBorder true to collapse (default), false for separate borders
*/
public void setCollapseBorder(boolean collapseBorder) {
if (this.collapseBorder!=collapseBorder) {
this.collapseBorder = collapseBorder;
if ((horizontalBorderSpacing!=0) || (verticalBorderSpacing!=0)) { // Only if one of the spacing was not 0, then we need to update, since otherwise the margin is 0 for both collapse and separate modes
updateMargins();
}
repaint();
}
}
/**
* Indicates whether empty cells should have borders (relevant only for separate borders and not for collapsed)
*
* @param drawEmptyCellsBorder - true to draw (default), false otherwise
*/
public void setDrawEmptyCellsBorder(boolean drawEmptyCellsBorder) {
this.drawEmptyCellsBorder = drawEmptyCellsBorder;
repaint();
}
/**
* Sets the spacing of cells border (relevant only for separate borders and not for collapsed)
*
* @param horizontal - The horizontal spacing
* @param vertical - The vertical spacing
*/
public void setBorderSpacing(int horizontal, int vertical) {
horizontalBorderSpacing=horizontal;
verticalBorderSpacing=vertical;
updateMargins();
}
private void updateMargins() {
TableLayout t = (TableLayout)getLayout();
int hSpace=horizontalBorderSpacing;
int vSpace=verticalBorderSpacing;
if (collapseBorder) { // not relevant for collapse border
hSpace=0;
vSpace=0;
}
if ((!t.hasHorizontalSpanning()) && (!t.hasVerticalSpanning())) {
for(int row = 0 ; row < t.getRows(); row++) {
for(int col = 0 ; col < t.getColumns() ; col++) {
Component cmp=null;
try {
cmp=t.getComponentAt(row, col);
} catch (Exception e) {
// parent of cmp can be null as well - TODO - check why
}
if (cmp!=null) {
int leftMargin=(col==0)?hSpace:0;
int topMargin=(row==0)?vSpace:0;
cmp.getUnselectedStyle().setMargin(topMargin, vSpace, leftMargin, hSpace);
cmp.getSelectedStyle().setMargin(topMargin, vSpace, leftMargin, hSpace);
}
}
}
}
repaint();
}
/**
* Indicates the alignment of the title see label alignment for details
*
* @return the title alignment
* @see com.sun.lwuit.Label#setAlignment(int)
*/
public int getTitleAlignment() {
return titleAlignment;
}
/**
* Indicates the alignment of the title see label alignment for details
*
* @param titleAlignment the title alignment
* @see com.sun.lwuit.Label#setAlignment(int)
*/
public void setTitleAlignment(int titleAlignment) {
this.titleAlignment = titleAlignment;
repaint();
}
/**
* Returns the column in which the given cell is placed
*
* @param cell the component representing the cell placed in the table
* @return the column in which the cell was placed in the table
*/
public int getCellColumn(Component cell) {
Integer i = ((Integer)cell.getClientProperty("column"));
if(i != null) {
return i.intValue();
}
return -1;
}
/**
* Returns the row in which the given cell is placed
*
* @param cell the component representing the cell placed in the table
* @return the row in which the cell was placed in the table
*/
public int getCellRow(Component cell) {
Integer i = ((Integer)cell.getClientProperty("row"));
if(i != null) {
return i.intValue();
}
return -1;
}
/**
* Indicates the alignment of the cells see label alignment for details
*
* @see com.sun.lwuit.Label#setAlignment(int)
* @return the cell alignment
*/
public int getCellAlignment() {
return cellAlignment;
}
/**
* Indicates the alignment of the cells see label alignment for details
*
* @param cellAlignment the table cell alignment
* @see com.sun.lwuit.Label#setAlignment(int)
*/
public void setCellAlignment(int cellAlignment) {
this.cellAlignment = cellAlignment;
repaint();
}
/**
* Indicates whether the table should render a table header as the first row
*
* @return the includeHeader
*/
public boolean isIncludeHeader() {
return includeHeader;
}
/**
* Indicates whether the table should render a table header as the first row
*
* @param includeHeader the includeHeader to set
*/
public void setIncludeHeader(boolean includeHeader) {
this.includeHeader = includeHeader;
updateModel();
}
/**
* Creates the table cell constraint for the given cell, this method can be overriden for
* the purposes of modifying the table constraints.
*
* @param value the value of the cell
* @param row the table row
* @param column the table column
* @return the table constraint
*/
protected TableLayout.Constraint createCellConstraint(Object value, int row, int column) {
if(includeHeader) {
row++;
}
TableLayout t = (TableLayout)getLayout();
return t.createConstraint(row, column);
}
/**
* @inheritDoc
*/
public String[] getPropertyNames() {
return new String[] {"data", "header"};
}
/**
* @inheritDoc
*/
public Class[] getPropertyTypes() {
return new Class[] {String[][].class, String[].class};
}
/**
* @inheritDoc
*/
public Object getPropertyValue(String name) {
if(name.equals("data")) {
return ((DefaultTableModel)model).data;
}
if(name.equals("header")) {
return ((DefaultTableModel)model).columnNames;
}
return null;
}
/**
* @inheritDoc
*/
public String setPropertyValue(String name, Object value) {
if(name.equals("data")) {
setModel(new DefaultTableModel(((DefaultTableModel)model).columnNames, (String[][])value));
return null;
}
if(name.equals("header")) {
setModel(new DefaultTableModel((String[])value, ((DefaultTableModel)model).data));
return null;
}
return super.setPropertyValue(name, value);
}
class Listener implements DataChangedListener, ActionListener {
/**
* @inheritDoc
*/
public final void dataChanged(int row, int column) {
Object value = model.getValueAt(row, column);
boolean e = model.isCellEditable(row, column);
Component cell = createCellImpl(value, row, column, e);
TableLayout t = (TableLayout)getLayout();
TableLayout.Constraint con = createCellConstraint(value, row, column);
if(includeHeader) {
row++;
}
Component c = t.getComponentAt(row, column);
removeComponent(c);
// a repaint sent right before this might result in an artifact for some use cases so
// removing visibility essentially cancels repaints
c.setVisible(false);
addComponent(con, cell);
layoutContainer();
cell.requestFocus();
revalidate();
}
public void actionPerformed(ActionEvent evt) {
TextArea t = (TextArea)evt.getSource();
int row = getCellRow(t);
int column = getCellColumn(t);
getModel().setValueAt(row, column, t.getText());
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.store.parquet.columnreaders;
import org.apache.drill.common.exceptions.ExecutionSetupException;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.vector.VarDecimalVector;
import org.apache.drill.exec.vector.NullableVarDecimalVector;
import org.apache.drill.exec.vector.BigIntVector;
import org.apache.drill.exec.vector.BitVector;
import org.apache.drill.exec.vector.DateVector;
import org.apache.drill.exec.vector.Float4Vector;
import org.apache.drill.exec.vector.Float8Vector;
import org.apache.drill.exec.vector.IntVector;
import org.apache.drill.exec.vector.IntervalVector;
import org.apache.drill.exec.vector.NullableBigIntVector;
import org.apache.drill.exec.vector.NullableBitVector;
import org.apache.drill.exec.vector.NullableDateVector;
import org.apache.drill.exec.vector.NullableFloat4Vector;
import org.apache.drill.exec.vector.NullableFloat8Vector;
import org.apache.drill.exec.vector.NullableIntVector;
import org.apache.drill.exec.vector.NullableIntervalVector;
import org.apache.drill.exec.vector.NullableTimeStampVector;
import org.apache.drill.exec.vector.NullableTimeVector;
import org.apache.drill.exec.vector.NullableVarBinaryVector;
import org.apache.drill.exec.vector.NullableVarCharVector;
import org.apache.drill.exec.vector.TimeStampVector;
import org.apache.drill.exec.vector.TimeVector;
import org.apache.drill.exec.vector.UInt4Vector;
import org.apache.drill.exec.vector.UInt8Vector;
import org.apache.drill.exec.vector.ValueVector;
import org.apache.drill.exec.vector.VarBinaryVector;
import org.apache.drill.exec.vector.VarCharVector;
import org.apache.drill.exec.vector.VariableWidthVector;
import org.apache.parquet.column.ColumnDescriptor;
import org.apache.parquet.column.Encoding;
import org.apache.parquet.format.ConvertedType;
import org.apache.parquet.format.SchemaElement;
import org.apache.parquet.hadoop.metadata.ColumnChunkMetaData;
import org.apache.parquet.schema.PrimitiveType;
public class ColumnReaderFactory {
/**
* @param fixedLength
* @param descriptor
* @param columnChunkMetaData
* @return ColumnReader object instance
* @throws SchemaChangeException
*/
static ColumnReader<?> createFixedColumnReader(ParquetRecordReader recordReader, boolean fixedLength, ColumnDescriptor descriptor,
ColumnChunkMetaData columnChunkMetaData, ValueVector v,
SchemaElement schemaElement)
throws Exception {
ConvertedType convertedType = schemaElement.getConverted_type();
// if the column is required, or repeated (in which case we just want to use this to generate our appropriate
// ColumnReader for actually transferring data into the data vector inside of our repeated vector
if (descriptor.getMaxDefinitionLevel() == 0 || descriptor.getMaxRepetitionLevel() > 0) {
if (columnChunkMetaData.getType() == PrimitiveType.PrimitiveTypeName.BOOLEAN){
return new BitReader(recordReader, descriptor, columnChunkMetaData,
fixedLength, (BitVector) v, schemaElement);
} else if (!columnChunkMetaData.getEncodings().contains(Encoding.PLAIN_DICTIONARY) && (
columnChunkMetaData.getType() == PrimitiveType.PrimitiveTypeName.FIXED_LEN_BYTE_ARRAY
|| columnChunkMetaData.getType() == PrimitiveType.PrimitiveTypeName.INT96)) {
if (convertedType == null) {
return new FixedByteAlignedReader.FixedBinaryReader(recordReader, descriptor,
columnChunkMetaData, (VariableWidthVector) v, schemaElement);
}
switch (convertedType) {
case DECIMAL:
return new FixedByteAlignedReader.VarDecimalReader(recordReader, descriptor,
columnChunkMetaData, fixedLength, (VarDecimalVector) v, schemaElement);
case INTERVAL:
return new FixedByteAlignedReader.IntervalReader(recordReader, descriptor,
columnChunkMetaData, fixedLength, (IntervalVector) v, schemaElement);
default:
return new FixedByteAlignedReader.FixedBinaryReader(recordReader, descriptor,
columnChunkMetaData, (VariableWidthVector) v, schemaElement);
}
} else if (columnChunkMetaData.getType() == PrimitiveType.PrimitiveTypeName.INT32 && convertedType == ConvertedType.DATE){
switch(recordReader.getDateCorruptionStatus()) {
case META_SHOWS_CORRUPTION:
return new FixedByteAlignedReader.CorruptDateReader(recordReader, descriptor, columnChunkMetaData, fixedLength, (DateVector) v, schemaElement);
case META_SHOWS_NO_CORRUPTION:
return new FixedByteAlignedReader.DateReader(recordReader, descriptor, columnChunkMetaData, fixedLength, (DateVector) v, schemaElement);
case META_UNCLEAR_TEST_VALUES:
return new FixedByteAlignedReader.CorruptionDetectingDateReader(recordReader, descriptor, columnChunkMetaData, fixedLength, (DateVector) v, schemaElement);
default:
throw new ExecutionSetupException(
String.format("Issue setting up parquet reader for date type, " +
"unrecognized date corruption status %s. See DRILL-4203 for more info.",
recordReader.getDateCorruptionStatus()));
}
} else {
if (columnChunkMetaData.getEncodings().contains(Encoding.PLAIN_DICTIONARY)) {
switch (columnChunkMetaData.getType()) {
case INT32:
if (convertedType == null) {
return new ParquetFixedWidthDictionaryReaders.DictionaryIntReader(recordReader, descriptor, columnChunkMetaData, fixedLength, (IntVector) v, schemaElement);
}
switch (convertedType) {
case DECIMAL:
return new ParquetFixedWidthDictionaryReaders.DictionaryVarDecimalReader(recordReader,
descriptor, columnChunkMetaData, fixedLength, (VarDecimalVector) v, schemaElement);
case TIME_MILLIS:
return new ParquetFixedWidthDictionaryReaders.DictionaryTimeReader(recordReader, descriptor, columnChunkMetaData, fixedLength, (TimeVector) v, schemaElement);
case INT_8:
case INT_16:
case INT_32:
return new ParquetFixedWidthDictionaryReaders.DictionaryIntReader(recordReader, descriptor, columnChunkMetaData, fixedLength, (IntVector) v, schemaElement);
case UINT_8:
case UINT_16:
case UINT_32:
return new ParquetFixedWidthDictionaryReaders.DictionaryUInt4Reader(recordReader, descriptor, columnChunkMetaData, fixedLength, (UInt4Vector) v, schemaElement);
default:
throw new ExecutionSetupException("Unsupported dictionary converted type " + convertedType + " for primitive type INT32");
}
case INT64:
if (convertedType == null) {
return new ParquetFixedWidthDictionaryReaders.DictionaryBigIntReader(recordReader, descriptor, columnChunkMetaData, fixedLength, (BigIntVector) v, schemaElement);
}
switch (convertedType) {
// DRILL-6670: handle TIMESTAMP_MICROS as INT64 with no logical type
case INT_64:
case TIMESTAMP_MICROS:
return new ParquetFixedWidthDictionaryReaders.DictionaryBigIntReader(recordReader, descriptor, columnChunkMetaData, fixedLength, (BigIntVector) v, schemaElement);
case UINT_64:
return new ParquetFixedWidthDictionaryReaders.DictionaryUInt8Reader(recordReader, descriptor, columnChunkMetaData, fixedLength, (UInt8Vector) v, schemaElement);
case DECIMAL:
return new ParquetFixedWidthDictionaryReaders.DictionaryVarDecimalReader(recordReader,
descriptor, columnChunkMetaData, fixedLength, (VarDecimalVector) v, schemaElement);
case TIMESTAMP_MILLIS:
return new ParquetFixedWidthDictionaryReaders.DictionaryTimeStampReader(recordReader, descriptor, columnChunkMetaData, fixedLength, (TimeStampVector) v, schemaElement);
default:
throw new ExecutionSetupException("Unsupported dictionary converted type " + convertedType + " for primitive type INT64");
}
case FLOAT:
return new ParquetFixedWidthDictionaryReaders.DictionaryFloat4Reader(recordReader, descriptor, columnChunkMetaData, fixedLength, (Float4Vector) v, schemaElement);
case DOUBLE:
return new ParquetFixedWidthDictionaryReaders.DictionaryFloat8Reader(recordReader, descriptor, columnChunkMetaData, fixedLength, (Float8Vector) v, schemaElement);
case FIXED_LEN_BYTE_ARRAY:
return new ParquetFixedWidthDictionaryReaders.DictionaryFixedBinaryReader(recordReader, descriptor, columnChunkMetaData, fixedLength, (VarBinaryVector) v, schemaElement);
case INT96:
if (recordReader.getFragmentContext().getOptions().getOption(ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP).bool_val) {
return new ParquetFixedWidthDictionaryReaders.DictionaryBinaryAsTimeStampReader(recordReader, descriptor, columnChunkMetaData, fixedLength, (TimeStampVector) v, schemaElement);
} else {
return new ParquetFixedWidthDictionaryReaders.DictionaryFixedBinaryReader(recordReader, descriptor, columnChunkMetaData, fixedLength, (VarBinaryVector) v, schemaElement);
}
default:
throw new ExecutionSetupException("Unsupported dictionary column type " + descriptor.getType().name() );
}
} else if (convertedType == ConvertedType.DECIMAL) {
return new FixedByteAlignedReader.VarDecimalReader(recordReader,
descriptor, columnChunkMetaData, fixedLength, (VarDecimalVector) v, schemaElement);
} else {
return new FixedByteAlignedReader<>(recordReader, descriptor, columnChunkMetaData,
fixedLength, v, schemaElement);
}
}
} else { // if the column is nullable
if (columnChunkMetaData.getType() == PrimitiveType.PrimitiveTypeName.BOOLEAN) {
return new NullableBitReader(recordReader, descriptor, columnChunkMetaData,
fixedLength, (NullableBitVector) v, schemaElement);
} else if (columnChunkMetaData.getType() == PrimitiveType.PrimitiveTypeName.INT32 && convertedType == ConvertedType.DATE) {
switch(recordReader.getDateCorruptionStatus()) {
case META_SHOWS_CORRUPTION:
return new NullableFixedByteAlignedReaders.NullableCorruptDateReader(recordReader, descriptor, columnChunkMetaData, fixedLength, (NullableDateVector)v, schemaElement);
case META_SHOWS_NO_CORRUPTION:
return new NullableFixedByteAlignedReaders.NullableDateReader(recordReader, descriptor, columnChunkMetaData, fixedLength, (NullableDateVector) v, schemaElement);
case META_UNCLEAR_TEST_VALUES:
return new NullableFixedByteAlignedReaders.CorruptionDetectingNullableDateReader(recordReader, descriptor, columnChunkMetaData, fixedLength, (NullableDateVector) v, schemaElement);
default:
throw new ExecutionSetupException(
String.format("Issue setting up parquet reader for date type, " +
"unrecognized date corruption status %s. See DRILL-4203 for more info.",
recordReader.getDateCorruptionStatus()));
}
} else if (columnChunkMetaData.getType() == PrimitiveType.PrimitiveTypeName.FIXED_LEN_BYTE_ARRAY) {
if (convertedType == ConvertedType.DECIMAL) {
return new NullableFixedByteAlignedReaders.NullableVarDecimalReader(recordReader,
descriptor, columnChunkMetaData, fixedLength, (NullableVarDecimalVector) v, schemaElement);
} else if (convertedType == ConvertedType.INTERVAL) {
return new NullableFixedByteAlignedReaders.NullableIntervalReader(recordReader, descriptor,
columnChunkMetaData, fixedLength, (NullableIntervalVector) v, schemaElement);
}
} else {
return getNullableColumnReader(recordReader, descriptor,
columnChunkMetaData, fixedLength, v, schemaElement);
}
}
throw new Exception("Unexpected parquet metadata configuration.");
}
static VarLengthValuesColumn<?> getReader(ParquetRecordReader parentReader, ColumnDescriptor descriptor,
ColumnChunkMetaData columnChunkMetaData, boolean fixedLength, ValueVector v,
SchemaElement schemaElement
) throws ExecutionSetupException {
ConvertedType convertedType = schemaElement.getConverted_type();
switch (descriptor.getMaxDefinitionLevel()) {
case 0:
if (convertedType == null) {
return new VarLengthColumnReaders.VarBinaryColumn(parentReader, descriptor, columnChunkMetaData, fixedLength, (VarBinaryVector) v, schemaElement);
}
switch (convertedType) {
case UTF8:
case ENUM:
return new VarLengthColumnReaders.VarCharColumn(parentReader, descriptor, columnChunkMetaData, fixedLength, (VarCharVector) v, schemaElement);
case DECIMAL:
if (v instanceof VarDecimalVector) {
return new VarLengthColumnReaders.VarDecimalColumn(parentReader, descriptor, columnChunkMetaData, fixedLength, (VarDecimalVector) v, schemaElement);
}
default:
return new VarLengthColumnReaders.VarBinaryColumn(parentReader, descriptor, columnChunkMetaData, fixedLength, (VarBinaryVector) v, schemaElement);
}
default:
if (convertedType == null) {
return new VarLengthColumnReaders.NullableVarBinaryColumn(parentReader, descriptor, columnChunkMetaData, fixedLength, (NullableVarBinaryVector) v, schemaElement);
}
switch (convertedType) {
case UTF8:
case ENUM:
return new VarLengthColumnReaders.NullableVarCharColumn(parentReader, descriptor, columnChunkMetaData, fixedLength, (NullableVarCharVector) v, schemaElement);
case DECIMAL:
if (v instanceof NullableVarDecimalVector) {
return new VarLengthColumnReaders.NullableVarDecimalColumn(parentReader, descriptor, columnChunkMetaData, fixedLength, (NullableVarDecimalVector) v, schemaElement);
}
default:
return new VarLengthColumnReaders.NullableVarBinaryColumn(parentReader, descriptor, columnChunkMetaData, fixedLength, (NullableVarBinaryVector) v, schemaElement);
}
}
}
public static NullableColumnReader<?> getNullableColumnReader(ParquetRecordReader parentReader,
ColumnDescriptor columnDescriptor,
ColumnChunkMetaData columnChunkMetaData,
boolean fixedLength,
ValueVector valueVec,
SchemaElement schemaElement) throws ExecutionSetupException {
ConvertedType convertedType = schemaElement.getConverted_type();
if (! columnChunkMetaData.getEncodings().contains(Encoding.PLAIN_DICTIONARY)) {
if (columnDescriptor.getType() == PrimitiveType.PrimitiveTypeName.INT96) {
// TODO: check convertedType once parquet support TIMESTAMP_NANOS type annotation.
if (parentReader.getFragmentContext().getOptions().getOption(ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP).bool_val) {
return new NullableFixedByteAlignedReaders.NullableFixedBinaryAsTimeStampReader(parentReader, columnDescriptor, columnChunkMetaData, true, (NullableTimeStampVector) valueVec, schemaElement);
} else {
return new NullableFixedByteAlignedReaders.NullableFixedBinaryReader(parentReader, columnDescriptor, columnChunkMetaData, true, (NullableVarBinaryVector) valueVec, schemaElement);
}
} else if (convertedType == ConvertedType.DECIMAL) {
// NullableVarDecimalVector allows storing of values with different width,
// so every time when the value is added, offset vector should be updated.
// Therefore NullableVarDecimalReader is used here instead of NullableFixedByteAlignedReader.
return new NullableFixedByteAlignedReaders.NullableVarDecimalReader(parentReader,
columnDescriptor, columnChunkMetaData, fixedLength, (NullableVarDecimalVector) valueVec, schemaElement);
} else {
return new NullableFixedByteAlignedReaders.NullableFixedByteAlignedReader<>(parentReader, columnDescriptor, columnChunkMetaData, fixedLength, valueVec, schemaElement);
}
} else {
switch (columnDescriptor.getType()) {
case INT32:
if (convertedType == null) {
return new NullableFixedByteAlignedReaders.NullableDictionaryIntReader(parentReader, columnDescriptor, columnChunkMetaData, fixedLength, (NullableIntVector) valueVec, schemaElement);
}
switch (convertedType) {
case DECIMAL:
return new NullableFixedByteAlignedReaders.NullableDictionaryVarDecimalReader(parentReader,
columnDescriptor, columnChunkMetaData, fixedLength, (NullableVarDecimalVector) valueVec, schemaElement);
case TIME_MILLIS:
return new NullableFixedByteAlignedReaders.NullableDictionaryTimeReader(parentReader, columnDescriptor, columnChunkMetaData, fixedLength, (NullableTimeVector)valueVec, schemaElement);
default:
throw new ExecutionSetupException("Unsupported nullable converted type " + convertedType + " for primitive type INT32");
}
case INT64:
if (convertedType == null) {
return new NullableFixedByteAlignedReaders.NullableDictionaryBigIntReader(parentReader, columnDescriptor, columnChunkMetaData, fixedLength, (NullableBigIntVector)valueVec, schemaElement);
}
switch (convertedType) {
case DECIMAL:
return new NullableFixedByteAlignedReaders.NullableDictionaryVarDecimalReader(parentReader,
columnDescriptor, columnChunkMetaData, fixedLength, (NullableVarDecimalVector) valueVec, schemaElement);
case TIMESTAMP_MILLIS:
return new NullableFixedByteAlignedReaders.NullableDictionaryTimeStampReader(parentReader, columnDescriptor, columnChunkMetaData, fixedLength, (NullableTimeStampVector)valueVec, schemaElement);
// DRILL-6670: handle TIMESTAMP_MICROS as INT64 with no logical type
case TIMESTAMP_MICROS:
return new NullableFixedByteAlignedReaders.NullableDictionaryBigIntReader(parentReader, columnDescriptor, columnChunkMetaData, fixedLength, (NullableBigIntVector)valueVec, schemaElement);
default:
throw new ExecutionSetupException("Unsupported nullable converted type " + convertedType + " for primitive type INT64");
}
case INT96:
// TODO: check convertedType once parquet support TIMESTAMP_NANOS type annotation.
if (parentReader.getFragmentContext().getOptions().getOption(ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP).bool_val) {
return new NullableFixedByteAlignedReaders.NullableFixedBinaryAsTimeStampReader(parentReader, columnDescriptor, columnChunkMetaData, true, (NullableTimeStampVector) valueVec, schemaElement);
} else {
return new NullableFixedByteAlignedReaders.NullableFixedBinaryReader(parentReader, columnDescriptor, columnChunkMetaData, true, (NullableVarBinaryVector) valueVec, schemaElement);
}
case FLOAT:
return new NullableFixedByteAlignedReaders.NullableDictionaryFloat4Reader(parentReader, columnDescriptor, columnChunkMetaData, fixedLength, (NullableFloat4Vector)valueVec, schemaElement);
case DOUBLE:
return new NullableFixedByteAlignedReaders.NullableDictionaryFloat8Reader(parentReader, columnDescriptor, columnChunkMetaData, fixedLength, (NullableFloat8Vector)valueVec, schemaElement);
default:
throw new ExecutionSetupException("Unsupported nullable column type " + columnDescriptor.getType().name() );
}
}
}
}
|
|
/**
* $Id: mxGdCodec.java,v 1.1 2010-08-25 08:36:59 gaudenz Exp $
* Copyright (c) 2010, Gaudenz Alder, David Benson
*/
package com.mxgraph.io;
import com.mxgraph.io.gd.mxGdDocument;
import com.mxgraph.io.gd.mxGdEdge;
import com.mxgraph.io.gd.mxGdNode;
import com.mxgraph.model.mxCell;
import com.mxgraph.model.mxGeometry;
import com.mxgraph.util.mxConstants;
import com.mxgraph.util.mxPoint;
import com.mxgraph.view.mxGraph;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
* Parses a GD .txt file and imports it in the given graph.<br/>
* This class depends from the classes contained in
* com.mxgraph.io.gd.
*/
public class mxGdCodec
{
/**
* Map with the vertex cells added in the addNode method.
*/
private static HashMap<String, Object> cellsMap = new HashMap<String, Object>();
/**
* Returns the coordinates of the left top corner of the node.
* @param node Node
* @return mxPoint that represent the coordinates.
*/
private static mxPoint getOriginPoint(mxGdNode node)
{
mxPoint coord = node.getCoordinates();
mxPoint dim = node.getDimentions();
double x = coord.getX() - dim.getX() / 2;
double y = coord.getY() - dim.getY() / 2;
return new mxPoint(x, y);
}
/**
* Adds a new vertex to the graph.
* @param graph Graph where the vertex is added.
* @param parent Parent of the vertex to add.
* @param node Node
* @return Returns the vertex added.
*/
private static mxCell addNode(mxGraph graph, Object parent, mxGdNode node)
{
mxPoint cordenates = getOriginPoint(node);
mxPoint dimentions = node.getDimentions();
//Set the node name as label.
String label = node.getName();
//Set the node name as ID.
String id = node.getName();
//Insert a new vertex in the graph
mxCell v1 = (mxCell) graph.insertVertex(parent, id, label,
cordenates.getX(), cordenates.getY(), dimentions.getX(),
dimentions.getY());
cellsMap.put(node.getName(), v1);
return v1;
}
/**
* Returns the string that represents the content of a given style map.
* @param styleMap Map with the styles values
* @return string that represents the style.
*/
private static String getStyleString(Map<String, Object> styleMap,
String asig)
{
String style = "";
Iterator<Object> it = styleMap.values().iterator();
Iterator<String> kit = styleMap.keySet().iterator();
while (kit.hasNext())
{
String key = kit.next();
Object value = it.next();
style = style + key + asig + value + ";";
}
return style;
}
/**
* Analizes a edge shape and returns a string with the style.
* @return style read from the edge shape.
*/
private static String getEdgeStyle()
{
Hashtable<String, Object> styleMap = new Hashtable<String, Object>();
//Defines Edge Style
//Defines if line is rounding
styleMap.put(mxConstants.STYLE_ROUNDED, false);
return getStyleString(styleMap, "=");
}
/**
* Adds a new edge to the graph.
* @param graph Graph where the edge is added.
* @param parent Parent of the edge to add.
* @param node Node
* @return Returns the edge added.
*/
private static mxCell addEdge(mxGraph graph, Object parent, mxGdEdge edge)
{
//Get source and target vertex
Object source = cellsMap.get(edge.getSourceName());
Object target = cellsMap.get(edge.getTargetName());
//Defines style of the edge.
String style = getEdgeStyle();
//Insert new edge and set constraints.
mxCell e = (mxCell) graph.insertEdge(parent, null, "", source, target,
style);
return e;
}
/**
* Recieves a mxGDDocument document and parses it generating a new graph that is inserted in graph.
* @param document GD to be parsed
* @param graph Graph where the parsed graph is included.
*/
public static void decode(mxGdDocument document, mxGraph graph)
{
Object parent = graph.getDefaultParent();
graph.getModel().beginUpdate();
//Add nodes.
List<mxGdNode> nodes = document.getNodes();
for (mxGdNode node : nodes)
{
addNode(graph, parent, node);
}
//Add Edges.
List<mxGdEdge> edges = document.getEdges();
for (mxGdEdge edge : edges)
{
addEdge(graph, parent, edge);
}
graph.getModel().endUpdate();
}
/**
* Returns a GD document with the data of the vertexes and edges in the graph.
* @param document GD document where the elements are put.
* @param parent Parent cell of the vertexes and edges to be added.
* @param graph Graph that contains the vertexes and edges.
* @return Returns the document with the elements added.
*/
private static mxGdDocument encodeNodesAndEdges(mxGdDocument document,
Object parent, mxGraph graph, mxPoint parentCoord)
{
Object[] vertexes = graph.getChildVertices(parent);
List<mxGdEdge> GDedges = document.getEdges();
GDedges = encodeEdges(GDedges, parent, graph);
document.setEdges(GDedges);
for (Object vertex : vertexes)
{
List<mxGdNode> GDnodes = document.getNodes();
mxCell v = (mxCell) vertex;
mxGeometry geom = v.getGeometry();
String id = v.getId();
mxPoint coord = new mxPoint(parentCoord.getX() + geom.getCenterX(),
parentCoord.getY() + geom.getCenterY());
mxPoint dim = new mxPoint(geom.getWidth(), geom.getHeight());
mxPoint cornerCoord = new mxPoint(parentCoord.getX() + geom.getX(),
parentCoord.getY() + geom.getY());
mxGdNode GDnode = new mxGdNode(id, coord, dim);
GDnodes.add(GDnode);
document.setNodes(GDnodes);
document = encodeNodesAndEdges(document, vertex, graph, cornerCoord);
}
return document;
}
/**
* Returns a list of mxGDEdge with the data of the edges in the graph.
* @param GDedges List where the elements are put.
* @param parent Parent cell of the edges to be added.
* @param graph Graph that contains the edges.
* @return Returns the list GDedges with the elements added.
*/
private static List<mxGdEdge> encodeEdges(List<mxGdEdge> GDedges,
Object parent, mxGraph graph)
{
Object[] edges = graph.getChildEdges(parent);
for (Object edge : edges)
{
mxCell e = (mxCell) edge;
mxCell source = (mxCell) e.getSource();
mxCell target = (mxCell) e.getTarget();
String sourceName = "";
String targetName = "";
sourceName = source.getId();
targetName = target.getId();
mxGdEdge GDedge = new mxGdEdge(sourceName, targetName);
GDedges.add(GDedge);
}
return GDedges;
}
/**
* Generates a GD document with the cells in the graph.
* The actual implementation only uses the cells located in the first level.
* @param graph Graph with the cells.
* @return The GD document generated.
*/
public static mxGdDocument encode(mxGraph graph)
{
Object parent = graph.getDefaultParent();
mxGdDocument document = new mxGdDocument();
//Adds Nodes and Edges.
document = encodeNodesAndEdges(document, parent, graph, new mxPoint(0,
0));
return document;
}
}
|
|
/*
* Copyright 2014 Guidewire Software, Inc.
*/
package gw.internal.gosu.parser.expressions;
import gw.internal.gosu.parser.*;
import gw.lang.parser.MemberAccessKind;
import gw.lang.parser.IExpressionRuntime;
import gw.lang.parser.expressions.IBeanMethodCallExpression;
import gw.lang.reflect.*;
import gw.lang.reflect.gs.IGosuClass;
import gw.lang.reflect.gs.IGosuMethodInfo;
import gw.lang.reflect.java.JavaTypes;
/**
* An expression representing a bean method call:
* <pre>
* <i>bean-method-call-expression</i>
* <member-access> <b>(</b> [<argument-list>] <b>)</b>
* <p/>
* <i>member-access</i>
* <root-expression>.<member>
* <root-expression>*.<member>
* <root-expression>[member-name]
* <p/>
* <i>root-expression</i>
* <bean-reference>
* <type-literal>
* <p/>
* <i>member</i>
* <member-access>
* <identifier>
* <p/>
* <i>bean-reference</i>
* <primary-expression>
* <p/>
* <i>member-name</i>
* <expression>
* </pre>
*
* @see gw.lang.parser.IGosuParser
*/
public final class BeanMethodCallExpression extends Expression implements IBeanMethodCallExpression, IHasOperatorLineNumber {
private Expression _rootExpression;
private IType[] _argTypes;
private String _accessPath;
private Expression[] _args;
private IMethodInfo _md;
private IFunctionType _funcType;
private MemberAccessKind _kind;
private int[] _namedArgOrder;
private int _iArgPos;
/**
* Start offset of array list (without leading '.')
*/
protected int _startOffset;
private static final IType[] EMPTY_ARG_TYPES = new IType[0];
private IExpressionRuntime _expressionRuntime;
private int _opLineNum;
public IFunctionType getFunctionType() {
return _funcType;
}
public void setFunctionType(IFunctionType funcType) {
_funcType = funcType;
}
public Expression getRootExpression() {
return _rootExpression;
}
public void setRootExpression(Expression rootExpression) {
_rootExpression = rootExpression;
}
/**
* @return An array of IIntrinsicITyperguments of the method call.
*/
public IType[] getArgTypes() {
return _argTypes;
}
/**
* @param argTypes An array of IIntrinsicType for the arguments of the method call.
*/
public void setArgTypes(IType[] argTypes) {
_argTypes = argTypes.length == 0 ? EMPTY_ARG_TYPES : argTypes;
}
/**
* @return A list of Strings representing the member access path. Note the
* member access path for the expression Root.foo.bar() is {foo, bar}.
*/
public String getMemberName() {
return _accessPath;
}
/**
* @param accessPath A list of Strings representing the member access path.
*/
public void setAccessPath(String accessPath) {
assert accessPath != null;
_accessPath = StringCache.get(accessPath);
}
public String getAccessPath() {
return _accessPath;
}
public int getStartOffset() {
return _startOffset;
}
public void setExpressionRuntime(IExpressionRuntime expressionRuntime) {
_expressionRuntime = expressionRuntime;
}
@Override
public IPropertyInfo getPropertyInfo()
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public IExpressionRuntime getExpressionRuntime() {
return _expressionRuntime;
}
public void setStartOffset(int startOffset) {
_startOffset = startOffset;
}
/**
* @return An array of expressions for corresponding to the arguments in the
* expression.
*/
public Expression[] getArgs() {
return _args;
}
/**
* @param args An array of expressions for corresponding to the arguments in
* the expression.
*/
public void setArgs(Expression[] args) {
_args = args == null || args.length == 0 ? null : args;
}
public int[] getNamedArgOrder()
{
return _namedArgOrder;
}
public void setNamedArgOrder( int[] namedArgOrder )
{
_namedArgOrder = namedArgOrder;
}
public void setMethodDescriptor(IMethodInfo md) {
_md = md;
if (md != null) {
IType type = JavaTypes.IGOSU_OBJECT();
if (_md.getOwnersType() == IGosuClassInternal.Util.getGosuClassFrom(type)) {
_md = type.getTypeInfo().getMethod(_md.getDisplayName(), ((FunctionType) ((IGosuMethodInfo) _md).getDfs().getType()).getParameterTypes());
}
}
}
public IMethodInfo getMethodDescriptor() {
return _md;
}
public IMethodInfo getGenericMethodDescriptor() {
if (_md instanceof GosuMethodInfo) {
ReducedDynamicFunctionSymbol dfs = ((GosuMethodInfo) _md).getDfs();
if (dfs instanceof ReducedParameterizedDynamicFunctionSymbol) {
return (IMethodInfo) ((ReducedParameterizedDynamicFunctionSymbol) dfs).getBackingDfs().getMethodOrConstructorInfo();
}
}
return _md;
}
/**
*/
public IType getRootType() {
IType rootType = getRootExpression().getType();
rootType = IGosuClass.ProxyUtil.isProxy(rootType) && rootType instanceof IGosuClass ? ((IGosuClass) rootType).getJavaType() : rootType;
return rootType;
}
public MemberAccessKind getMemberAccessKind()
{
return _kind;
}
public void setMemberAccessKind( MemberAccessKind kind )
{
if( kind == MemberAccessKind.NORMAL &&
_md != null &&
GosuClassProxyFactory.isPropertyGetter( _md ) )
{
// getter call null-safety treatment must behave the same way as property member access
kind = MemberAccessKind.NULL_SAFE;
}
_kind = kind;
}
@Override
public boolean isNullSafe()
{
return getMemberAccessKind() == MemberAccessKind.NULL_SAFE || isExpansion();
}
public boolean isExpansion() {
return _kind == MemberAccessKind.EXPANSION;
}
/**
* Evaluates the bean method call.
*
* @return The value of the expression.
*/
public Object evaluate() {
if (!isCompileTimeConstant() ) {
return super.evaluate();
}
throw new CannotExecuteGosuException();
}
@Override
public String toString() {
String strOut = getRootExpression().toString();
if (_accessPath != null) {
strOut += "." + _accessPath;
}
strOut += "(";
if (_args != null && _args.length > 0) {
strOut += " ";
for (int i = 0; i < _args.length; i++) {
if (i != 0) {
strOut += ", ";
}
strOut += _args[i].toString();
}
strOut += " ";
}
return strOut += ")";
}
public int getArgPosition() {
return _iArgPos;
}
public void setArgPosition(int iArgPos) {
_iArgPos = iArgPos;
}
@Override
public int getOperatorLineNumber()
{
return _opLineNum;
}
@Override
public void setOperatorLineNumber( int operatorLineNumber )
{
_opLineNum = operatorLineNumber;
}
}
|
|
/**
*/
package gluemodel.CIM.IEC61970.Generation.GenerationDynamics.impl;
import gluemodel.CIM.IEC61970.Generation.GenerationDynamics.GenerationDynamicsPackage;
import gluemodel.CIM.IEC61970.Generation.GenerationDynamics.PWRSteamSupply;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>PWR Steam Supply</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link gluemodel.CIM.IEC61970.Generation.GenerationDynamics.impl.PWRSteamSupplyImpl#getHotLegToColdLegGain <em>Hot Leg To Cold Leg Gain</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Generation.GenerationDynamics.impl.PWRSteamSupplyImpl#getPressureCG <em>Pressure CG</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Generation.GenerationDynamics.impl.PWRSteamSupplyImpl#getHotLegLagTC <em>Hot Leg Lag TC</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Generation.GenerationDynamics.impl.PWRSteamSupplyImpl#getSteamPressureDropLagTC <em>Steam Pressure Drop Lag TC</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Generation.GenerationDynamics.impl.PWRSteamSupplyImpl#getThrottlePressureSP <em>Throttle Pressure SP</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Generation.GenerationDynamics.impl.PWRSteamSupplyImpl#getColdLegFBLeadTC2 <em>Cold Leg FB Lead TC2</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Generation.GenerationDynamics.impl.PWRSteamSupplyImpl#getColdLegFBLeadTC1 <em>Cold Leg FB Lead TC1</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Generation.GenerationDynamics.impl.PWRSteamSupplyImpl#getColdLegLagTC <em>Cold Leg Lag TC</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Generation.GenerationDynamics.impl.PWRSteamSupplyImpl#getSteamFlowFG <em>Steam Flow FG</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Generation.GenerationDynamics.impl.PWRSteamSupplyImpl#getCoreHTLagTC2 <em>Core HT Lag TC2</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Generation.GenerationDynamics.impl.PWRSteamSupplyImpl#getCoreHTLagTC1 <em>Core HT Lag TC1</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Generation.GenerationDynamics.impl.PWRSteamSupplyImpl#getCoreNeutronicsEffTC <em>Core Neutronics Eff TC</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Generation.GenerationDynamics.impl.PWRSteamSupplyImpl#getSteamPressureFG <em>Steam Pressure FG</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Generation.GenerationDynamics.impl.PWRSteamSupplyImpl#getFeedbackFactor <em>Feedback Factor</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Generation.GenerationDynamics.impl.PWRSteamSupplyImpl#getHotLegSteamGain <em>Hot Leg Steam Gain</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Generation.GenerationDynamics.impl.PWRSteamSupplyImpl#getCoreNeutronicsHT <em>Core Neutronics HT</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Generation.GenerationDynamics.impl.PWRSteamSupplyImpl#getColdLegFG1 <em>Cold Leg FG1</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Generation.GenerationDynamics.impl.PWRSteamSupplyImpl#getColdLegFG2 <em>Cold Leg FG2</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Generation.GenerationDynamics.impl.PWRSteamSupplyImpl#getThrottlePressureFactor <em>Throttle Pressure Factor</em>}</li>
* <li>{@link gluemodel.CIM.IEC61970.Generation.GenerationDynamics.impl.PWRSteamSupplyImpl#getColdLegFBLagTC <em>Cold Leg FB Lag TC</em>}</li>
* </ul>
*
* @generated
*/
public class PWRSteamSupplyImpl extends SteamSupplyImpl implements PWRSteamSupply {
/**
* The default value of the '{@link #getHotLegToColdLegGain() <em>Hot Leg To Cold Leg Gain</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getHotLegToColdLegGain()
* @generated
* @ordered
*/
protected static final float HOT_LEG_TO_COLD_LEG_GAIN_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getHotLegToColdLegGain() <em>Hot Leg To Cold Leg Gain</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getHotLegToColdLegGain()
* @generated
* @ordered
*/
protected float hotLegToColdLegGain = HOT_LEG_TO_COLD_LEG_GAIN_EDEFAULT;
/**
* The default value of the '{@link #getPressureCG() <em>Pressure CG</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPressureCG()
* @generated
* @ordered
*/
protected static final float PRESSURE_CG_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getPressureCG() <em>Pressure CG</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPressureCG()
* @generated
* @ordered
*/
protected float pressureCG = PRESSURE_CG_EDEFAULT;
/**
* The default value of the '{@link #getHotLegLagTC() <em>Hot Leg Lag TC</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getHotLegLagTC()
* @generated
* @ordered
*/
protected static final float HOT_LEG_LAG_TC_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getHotLegLagTC() <em>Hot Leg Lag TC</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getHotLegLagTC()
* @generated
* @ordered
*/
protected float hotLegLagTC = HOT_LEG_LAG_TC_EDEFAULT;
/**
* The default value of the '{@link #getSteamPressureDropLagTC() <em>Steam Pressure Drop Lag TC</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getSteamPressureDropLagTC()
* @generated
* @ordered
*/
protected static final float STEAM_PRESSURE_DROP_LAG_TC_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getSteamPressureDropLagTC() <em>Steam Pressure Drop Lag TC</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getSteamPressureDropLagTC()
* @generated
* @ordered
*/
protected float steamPressureDropLagTC = STEAM_PRESSURE_DROP_LAG_TC_EDEFAULT;
/**
* The default value of the '{@link #getThrottlePressureSP() <em>Throttle Pressure SP</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getThrottlePressureSP()
* @generated
* @ordered
*/
protected static final float THROTTLE_PRESSURE_SP_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getThrottlePressureSP() <em>Throttle Pressure SP</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getThrottlePressureSP()
* @generated
* @ordered
*/
protected float throttlePressureSP = THROTTLE_PRESSURE_SP_EDEFAULT;
/**
* The default value of the '{@link #getColdLegFBLeadTC2() <em>Cold Leg FB Lead TC2</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getColdLegFBLeadTC2()
* @generated
* @ordered
*/
protected static final float COLD_LEG_FB_LEAD_TC2_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getColdLegFBLeadTC2() <em>Cold Leg FB Lead TC2</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getColdLegFBLeadTC2()
* @generated
* @ordered
*/
protected float coldLegFBLeadTC2 = COLD_LEG_FB_LEAD_TC2_EDEFAULT;
/**
* The default value of the '{@link #getColdLegFBLeadTC1() <em>Cold Leg FB Lead TC1</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getColdLegFBLeadTC1()
* @generated
* @ordered
*/
protected static final float COLD_LEG_FB_LEAD_TC1_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getColdLegFBLeadTC1() <em>Cold Leg FB Lead TC1</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getColdLegFBLeadTC1()
* @generated
* @ordered
*/
protected float coldLegFBLeadTC1 = COLD_LEG_FB_LEAD_TC1_EDEFAULT;
/**
* The default value of the '{@link #getColdLegLagTC() <em>Cold Leg Lag TC</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getColdLegLagTC()
* @generated
* @ordered
*/
protected static final float COLD_LEG_LAG_TC_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getColdLegLagTC() <em>Cold Leg Lag TC</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getColdLegLagTC()
* @generated
* @ordered
*/
protected float coldLegLagTC = COLD_LEG_LAG_TC_EDEFAULT;
/**
* The default value of the '{@link #getSteamFlowFG() <em>Steam Flow FG</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getSteamFlowFG()
* @generated
* @ordered
*/
protected static final float STEAM_FLOW_FG_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getSteamFlowFG() <em>Steam Flow FG</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getSteamFlowFG()
* @generated
* @ordered
*/
protected float steamFlowFG = STEAM_FLOW_FG_EDEFAULT;
/**
* The default value of the '{@link #getCoreHTLagTC2() <em>Core HT Lag TC2</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getCoreHTLagTC2()
* @generated
* @ordered
*/
protected static final float CORE_HT_LAG_TC2_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getCoreHTLagTC2() <em>Core HT Lag TC2</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getCoreHTLagTC2()
* @generated
* @ordered
*/
protected float coreHTLagTC2 = CORE_HT_LAG_TC2_EDEFAULT;
/**
* The default value of the '{@link #getCoreHTLagTC1() <em>Core HT Lag TC1</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getCoreHTLagTC1()
* @generated
* @ordered
*/
protected static final float CORE_HT_LAG_TC1_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getCoreHTLagTC1() <em>Core HT Lag TC1</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getCoreHTLagTC1()
* @generated
* @ordered
*/
protected float coreHTLagTC1 = CORE_HT_LAG_TC1_EDEFAULT;
/**
* The default value of the '{@link #getCoreNeutronicsEffTC() <em>Core Neutronics Eff TC</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getCoreNeutronicsEffTC()
* @generated
* @ordered
*/
protected static final float CORE_NEUTRONICS_EFF_TC_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getCoreNeutronicsEffTC() <em>Core Neutronics Eff TC</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getCoreNeutronicsEffTC()
* @generated
* @ordered
*/
protected float coreNeutronicsEffTC = CORE_NEUTRONICS_EFF_TC_EDEFAULT;
/**
* The default value of the '{@link #getSteamPressureFG() <em>Steam Pressure FG</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getSteamPressureFG()
* @generated
* @ordered
*/
protected static final float STEAM_PRESSURE_FG_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getSteamPressureFG() <em>Steam Pressure FG</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getSteamPressureFG()
* @generated
* @ordered
*/
protected float steamPressureFG = STEAM_PRESSURE_FG_EDEFAULT;
/**
* The default value of the '{@link #getFeedbackFactor() <em>Feedback Factor</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getFeedbackFactor()
* @generated
* @ordered
*/
protected static final float FEEDBACK_FACTOR_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getFeedbackFactor() <em>Feedback Factor</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getFeedbackFactor()
* @generated
* @ordered
*/
protected float feedbackFactor = FEEDBACK_FACTOR_EDEFAULT;
/**
* The default value of the '{@link #getHotLegSteamGain() <em>Hot Leg Steam Gain</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getHotLegSteamGain()
* @generated
* @ordered
*/
protected static final float HOT_LEG_STEAM_GAIN_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getHotLegSteamGain() <em>Hot Leg Steam Gain</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getHotLegSteamGain()
* @generated
* @ordered
*/
protected float hotLegSteamGain = HOT_LEG_STEAM_GAIN_EDEFAULT;
/**
* The default value of the '{@link #getCoreNeutronicsHT() <em>Core Neutronics HT</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getCoreNeutronicsHT()
* @generated
* @ordered
*/
protected static final float CORE_NEUTRONICS_HT_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getCoreNeutronicsHT() <em>Core Neutronics HT</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getCoreNeutronicsHT()
* @generated
* @ordered
*/
protected float coreNeutronicsHT = CORE_NEUTRONICS_HT_EDEFAULT;
/**
* The default value of the '{@link #getColdLegFG1() <em>Cold Leg FG1</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getColdLegFG1()
* @generated
* @ordered
*/
protected static final float COLD_LEG_FG1_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getColdLegFG1() <em>Cold Leg FG1</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getColdLegFG1()
* @generated
* @ordered
*/
protected float coldLegFG1 = COLD_LEG_FG1_EDEFAULT;
/**
* The default value of the '{@link #getColdLegFG2() <em>Cold Leg FG2</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getColdLegFG2()
* @generated
* @ordered
*/
protected static final float COLD_LEG_FG2_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getColdLegFG2() <em>Cold Leg FG2</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getColdLegFG2()
* @generated
* @ordered
*/
protected float coldLegFG2 = COLD_LEG_FG2_EDEFAULT;
/**
* The default value of the '{@link #getThrottlePressureFactor() <em>Throttle Pressure Factor</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getThrottlePressureFactor()
* @generated
* @ordered
*/
protected static final float THROTTLE_PRESSURE_FACTOR_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getThrottlePressureFactor() <em>Throttle Pressure Factor</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getThrottlePressureFactor()
* @generated
* @ordered
*/
protected float throttlePressureFactor = THROTTLE_PRESSURE_FACTOR_EDEFAULT;
/**
* The default value of the '{@link #getColdLegFBLagTC() <em>Cold Leg FB Lag TC</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getColdLegFBLagTC()
* @generated
* @ordered
*/
protected static final float COLD_LEG_FB_LAG_TC_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getColdLegFBLagTC() <em>Cold Leg FB Lag TC</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getColdLegFBLagTC()
* @generated
* @ordered
*/
protected float coldLegFBLagTC = COLD_LEG_FB_LAG_TC_EDEFAULT;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected PWRSteamSupplyImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return GenerationDynamicsPackage.Literals.PWR_STEAM_SUPPLY;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public float getHotLegToColdLegGain() {
return hotLegToColdLegGain;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setHotLegToColdLegGain(float newHotLegToColdLegGain) {
float oldHotLegToColdLegGain = hotLegToColdLegGain;
hotLegToColdLegGain = newHotLegToColdLegGain;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GenerationDynamicsPackage.PWR_STEAM_SUPPLY__HOT_LEG_TO_COLD_LEG_GAIN, oldHotLegToColdLegGain, hotLegToColdLegGain));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public float getPressureCG() {
return pressureCG;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setPressureCG(float newPressureCG) {
float oldPressureCG = pressureCG;
pressureCG = newPressureCG;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GenerationDynamicsPackage.PWR_STEAM_SUPPLY__PRESSURE_CG, oldPressureCG, pressureCG));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public float getHotLegLagTC() {
return hotLegLagTC;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setHotLegLagTC(float newHotLegLagTC) {
float oldHotLegLagTC = hotLegLagTC;
hotLegLagTC = newHotLegLagTC;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GenerationDynamicsPackage.PWR_STEAM_SUPPLY__HOT_LEG_LAG_TC, oldHotLegLagTC, hotLegLagTC));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public float getSteamPressureDropLagTC() {
return steamPressureDropLagTC;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setSteamPressureDropLagTC(float newSteamPressureDropLagTC) {
float oldSteamPressureDropLagTC = steamPressureDropLagTC;
steamPressureDropLagTC = newSteamPressureDropLagTC;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GenerationDynamicsPackage.PWR_STEAM_SUPPLY__STEAM_PRESSURE_DROP_LAG_TC, oldSteamPressureDropLagTC, steamPressureDropLagTC));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public float getThrottlePressureSP() {
return throttlePressureSP;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setThrottlePressureSP(float newThrottlePressureSP) {
float oldThrottlePressureSP = throttlePressureSP;
throttlePressureSP = newThrottlePressureSP;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GenerationDynamicsPackage.PWR_STEAM_SUPPLY__THROTTLE_PRESSURE_SP, oldThrottlePressureSP, throttlePressureSP));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public float getColdLegFBLeadTC2() {
return coldLegFBLeadTC2;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setColdLegFBLeadTC2(float newColdLegFBLeadTC2) {
float oldColdLegFBLeadTC2 = coldLegFBLeadTC2;
coldLegFBLeadTC2 = newColdLegFBLeadTC2;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FB_LEAD_TC2, oldColdLegFBLeadTC2, coldLegFBLeadTC2));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public float getColdLegFBLeadTC1() {
return coldLegFBLeadTC1;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setColdLegFBLeadTC1(float newColdLegFBLeadTC1) {
float oldColdLegFBLeadTC1 = coldLegFBLeadTC1;
coldLegFBLeadTC1 = newColdLegFBLeadTC1;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FB_LEAD_TC1, oldColdLegFBLeadTC1, coldLegFBLeadTC1));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public float getColdLegLagTC() {
return coldLegLagTC;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setColdLegLagTC(float newColdLegLagTC) {
float oldColdLegLagTC = coldLegLagTC;
coldLegLagTC = newColdLegLagTC;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_LAG_TC, oldColdLegLagTC, coldLegLagTC));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public float getSteamFlowFG() {
return steamFlowFG;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setSteamFlowFG(float newSteamFlowFG) {
float oldSteamFlowFG = steamFlowFG;
steamFlowFG = newSteamFlowFG;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GenerationDynamicsPackage.PWR_STEAM_SUPPLY__STEAM_FLOW_FG, oldSteamFlowFG, steamFlowFG));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public float getCoreHTLagTC2() {
return coreHTLagTC2;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setCoreHTLagTC2(float newCoreHTLagTC2) {
float oldCoreHTLagTC2 = coreHTLagTC2;
coreHTLagTC2 = newCoreHTLagTC2;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GenerationDynamicsPackage.PWR_STEAM_SUPPLY__CORE_HT_LAG_TC2, oldCoreHTLagTC2, coreHTLagTC2));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public float getCoreHTLagTC1() {
return coreHTLagTC1;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setCoreHTLagTC1(float newCoreHTLagTC1) {
float oldCoreHTLagTC1 = coreHTLagTC1;
coreHTLagTC1 = newCoreHTLagTC1;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GenerationDynamicsPackage.PWR_STEAM_SUPPLY__CORE_HT_LAG_TC1, oldCoreHTLagTC1, coreHTLagTC1));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public float getCoreNeutronicsEffTC() {
return coreNeutronicsEffTC;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setCoreNeutronicsEffTC(float newCoreNeutronicsEffTC) {
float oldCoreNeutronicsEffTC = coreNeutronicsEffTC;
coreNeutronicsEffTC = newCoreNeutronicsEffTC;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GenerationDynamicsPackage.PWR_STEAM_SUPPLY__CORE_NEUTRONICS_EFF_TC, oldCoreNeutronicsEffTC, coreNeutronicsEffTC));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public float getSteamPressureFG() {
return steamPressureFG;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setSteamPressureFG(float newSteamPressureFG) {
float oldSteamPressureFG = steamPressureFG;
steamPressureFG = newSteamPressureFG;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GenerationDynamicsPackage.PWR_STEAM_SUPPLY__STEAM_PRESSURE_FG, oldSteamPressureFG, steamPressureFG));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public float getFeedbackFactor() {
return feedbackFactor;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setFeedbackFactor(float newFeedbackFactor) {
float oldFeedbackFactor = feedbackFactor;
feedbackFactor = newFeedbackFactor;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GenerationDynamicsPackage.PWR_STEAM_SUPPLY__FEEDBACK_FACTOR, oldFeedbackFactor, feedbackFactor));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public float getHotLegSteamGain() {
return hotLegSteamGain;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setHotLegSteamGain(float newHotLegSteamGain) {
float oldHotLegSteamGain = hotLegSteamGain;
hotLegSteamGain = newHotLegSteamGain;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GenerationDynamicsPackage.PWR_STEAM_SUPPLY__HOT_LEG_STEAM_GAIN, oldHotLegSteamGain, hotLegSteamGain));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public float getCoreNeutronicsHT() {
return coreNeutronicsHT;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setCoreNeutronicsHT(float newCoreNeutronicsHT) {
float oldCoreNeutronicsHT = coreNeutronicsHT;
coreNeutronicsHT = newCoreNeutronicsHT;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GenerationDynamicsPackage.PWR_STEAM_SUPPLY__CORE_NEUTRONICS_HT, oldCoreNeutronicsHT, coreNeutronicsHT));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public float getColdLegFG1() {
return coldLegFG1;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setColdLegFG1(float newColdLegFG1) {
float oldColdLegFG1 = coldLegFG1;
coldLegFG1 = newColdLegFG1;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FG1, oldColdLegFG1, coldLegFG1));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public float getColdLegFG2() {
return coldLegFG2;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setColdLegFG2(float newColdLegFG2) {
float oldColdLegFG2 = coldLegFG2;
coldLegFG2 = newColdLegFG2;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FG2, oldColdLegFG2, coldLegFG2));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public float getThrottlePressureFactor() {
return throttlePressureFactor;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setThrottlePressureFactor(float newThrottlePressureFactor) {
float oldThrottlePressureFactor = throttlePressureFactor;
throttlePressureFactor = newThrottlePressureFactor;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GenerationDynamicsPackage.PWR_STEAM_SUPPLY__THROTTLE_PRESSURE_FACTOR, oldThrottlePressureFactor, throttlePressureFactor));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public float getColdLegFBLagTC() {
return coldLegFBLagTC;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setColdLegFBLagTC(float newColdLegFBLagTC) {
float oldColdLegFBLagTC = coldLegFBLagTC;
coldLegFBLagTC = newColdLegFBLagTC;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FB_LAG_TC, oldColdLegFBLagTC, coldLegFBLagTC));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__HOT_LEG_TO_COLD_LEG_GAIN:
return getHotLegToColdLegGain();
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__PRESSURE_CG:
return getPressureCG();
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__HOT_LEG_LAG_TC:
return getHotLegLagTC();
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__STEAM_PRESSURE_DROP_LAG_TC:
return getSteamPressureDropLagTC();
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__THROTTLE_PRESSURE_SP:
return getThrottlePressureSP();
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FB_LEAD_TC2:
return getColdLegFBLeadTC2();
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FB_LEAD_TC1:
return getColdLegFBLeadTC1();
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_LAG_TC:
return getColdLegLagTC();
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__STEAM_FLOW_FG:
return getSteamFlowFG();
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__CORE_HT_LAG_TC2:
return getCoreHTLagTC2();
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__CORE_HT_LAG_TC1:
return getCoreHTLagTC1();
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__CORE_NEUTRONICS_EFF_TC:
return getCoreNeutronicsEffTC();
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__STEAM_PRESSURE_FG:
return getSteamPressureFG();
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__FEEDBACK_FACTOR:
return getFeedbackFactor();
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__HOT_LEG_STEAM_GAIN:
return getHotLegSteamGain();
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__CORE_NEUTRONICS_HT:
return getCoreNeutronicsHT();
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FG1:
return getColdLegFG1();
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FG2:
return getColdLegFG2();
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__THROTTLE_PRESSURE_FACTOR:
return getThrottlePressureFactor();
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FB_LAG_TC:
return getColdLegFBLagTC();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__HOT_LEG_TO_COLD_LEG_GAIN:
setHotLegToColdLegGain((Float)newValue);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__PRESSURE_CG:
setPressureCG((Float)newValue);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__HOT_LEG_LAG_TC:
setHotLegLagTC((Float)newValue);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__STEAM_PRESSURE_DROP_LAG_TC:
setSteamPressureDropLagTC((Float)newValue);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__THROTTLE_PRESSURE_SP:
setThrottlePressureSP((Float)newValue);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FB_LEAD_TC2:
setColdLegFBLeadTC2((Float)newValue);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FB_LEAD_TC1:
setColdLegFBLeadTC1((Float)newValue);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_LAG_TC:
setColdLegLagTC((Float)newValue);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__STEAM_FLOW_FG:
setSteamFlowFG((Float)newValue);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__CORE_HT_LAG_TC2:
setCoreHTLagTC2((Float)newValue);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__CORE_HT_LAG_TC1:
setCoreHTLagTC1((Float)newValue);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__CORE_NEUTRONICS_EFF_TC:
setCoreNeutronicsEffTC((Float)newValue);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__STEAM_PRESSURE_FG:
setSteamPressureFG((Float)newValue);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__FEEDBACK_FACTOR:
setFeedbackFactor((Float)newValue);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__HOT_LEG_STEAM_GAIN:
setHotLegSteamGain((Float)newValue);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__CORE_NEUTRONICS_HT:
setCoreNeutronicsHT((Float)newValue);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FG1:
setColdLegFG1((Float)newValue);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FG2:
setColdLegFG2((Float)newValue);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__THROTTLE_PRESSURE_FACTOR:
setThrottlePressureFactor((Float)newValue);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FB_LAG_TC:
setColdLegFBLagTC((Float)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__HOT_LEG_TO_COLD_LEG_GAIN:
setHotLegToColdLegGain(HOT_LEG_TO_COLD_LEG_GAIN_EDEFAULT);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__PRESSURE_CG:
setPressureCG(PRESSURE_CG_EDEFAULT);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__HOT_LEG_LAG_TC:
setHotLegLagTC(HOT_LEG_LAG_TC_EDEFAULT);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__STEAM_PRESSURE_DROP_LAG_TC:
setSteamPressureDropLagTC(STEAM_PRESSURE_DROP_LAG_TC_EDEFAULT);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__THROTTLE_PRESSURE_SP:
setThrottlePressureSP(THROTTLE_PRESSURE_SP_EDEFAULT);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FB_LEAD_TC2:
setColdLegFBLeadTC2(COLD_LEG_FB_LEAD_TC2_EDEFAULT);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FB_LEAD_TC1:
setColdLegFBLeadTC1(COLD_LEG_FB_LEAD_TC1_EDEFAULT);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_LAG_TC:
setColdLegLagTC(COLD_LEG_LAG_TC_EDEFAULT);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__STEAM_FLOW_FG:
setSteamFlowFG(STEAM_FLOW_FG_EDEFAULT);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__CORE_HT_LAG_TC2:
setCoreHTLagTC2(CORE_HT_LAG_TC2_EDEFAULT);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__CORE_HT_LAG_TC1:
setCoreHTLagTC1(CORE_HT_LAG_TC1_EDEFAULT);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__CORE_NEUTRONICS_EFF_TC:
setCoreNeutronicsEffTC(CORE_NEUTRONICS_EFF_TC_EDEFAULT);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__STEAM_PRESSURE_FG:
setSteamPressureFG(STEAM_PRESSURE_FG_EDEFAULT);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__FEEDBACK_FACTOR:
setFeedbackFactor(FEEDBACK_FACTOR_EDEFAULT);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__HOT_LEG_STEAM_GAIN:
setHotLegSteamGain(HOT_LEG_STEAM_GAIN_EDEFAULT);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__CORE_NEUTRONICS_HT:
setCoreNeutronicsHT(CORE_NEUTRONICS_HT_EDEFAULT);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FG1:
setColdLegFG1(COLD_LEG_FG1_EDEFAULT);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FG2:
setColdLegFG2(COLD_LEG_FG2_EDEFAULT);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__THROTTLE_PRESSURE_FACTOR:
setThrottlePressureFactor(THROTTLE_PRESSURE_FACTOR_EDEFAULT);
return;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FB_LAG_TC:
setColdLegFBLagTC(COLD_LEG_FB_LAG_TC_EDEFAULT);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__HOT_LEG_TO_COLD_LEG_GAIN:
return hotLegToColdLegGain != HOT_LEG_TO_COLD_LEG_GAIN_EDEFAULT;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__PRESSURE_CG:
return pressureCG != PRESSURE_CG_EDEFAULT;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__HOT_LEG_LAG_TC:
return hotLegLagTC != HOT_LEG_LAG_TC_EDEFAULT;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__STEAM_PRESSURE_DROP_LAG_TC:
return steamPressureDropLagTC != STEAM_PRESSURE_DROP_LAG_TC_EDEFAULT;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__THROTTLE_PRESSURE_SP:
return throttlePressureSP != THROTTLE_PRESSURE_SP_EDEFAULT;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FB_LEAD_TC2:
return coldLegFBLeadTC2 != COLD_LEG_FB_LEAD_TC2_EDEFAULT;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FB_LEAD_TC1:
return coldLegFBLeadTC1 != COLD_LEG_FB_LEAD_TC1_EDEFAULT;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_LAG_TC:
return coldLegLagTC != COLD_LEG_LAG_TC_EDEFAULT;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__STEAM_FLOW_FG:
return steamFlowFG != STEAM_FLOW_FG_EDEFAULT;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__CORE_HT_LAG_TC2:
return coreHTLagTC2 != CORE_HT_LAG_TC2_EDEFAULT;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__CORE_HT_LAG_TC1:
return coreHTLagTC1 != CORE_HT_LAG_TC1_EDEFAULT;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__CORE_NEUTRONICS_EFF_TC:
return coreNeutronicsEffTC != CORE_NEUTRONICS_EFF_TC_EDEFAULT;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__STEAM_PRESSURE_FG:
return steamPressureFG != STEAM_PRESSURE_FG_EDEFAULT;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__FEEDBACK_FACTOR:
return feedbackFactor != FEEDBACK_FACTOR_EDEFAULT;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__HOT_LEG_STEAM_GAIN:
return hotLegSteamGain != HOT_LEG_STEAM_GAIN_EDEFAULT;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__CORE_NEUTRONICS_HT:
return coreNeutronicsHT != CORE_NEUTRONICS_HT_EDEFAULT;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FG1:
return coldLegFG1 != COLD_LEG_FG1_EDEFAULT;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FG2:
return coldLegFG2 != COLD_LEG_FG2_EDEFAULT;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__THROTTLE_PRESSURE_FACTOR:
return throttlePressureFactor != THROTTLE_PRESSURE_FACTOR_EDEFAULT;
case GenerationDynamicsPackage.PWR_STEAM_SUPPLY__COLD_LEG_FB_LAG_TC:
return coldLegFBLagTC != COLD_LEG_FB_LAG_TC_EDEFAULT;
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (hotLegToColdLegGain: ");
result.append(hotLegToColdLegGain);
result.append(", pressureCG: ");
result.append(pressureCG);
result.append(", hotLegLagTC: ");
result.append(hotLegLagTC);
result.append(", steamPressureDropLagTC: ");
result.append(steamPressureDropLagTC);
result.append(", throttlePressureSP: ");
result.append(throttlePressureSP);
result.append(", coldLegFBLeadTC2: ");
result.append(coldLegFBLeadTC2);
result.append(", coldLegFBLeadTC1: ");
result.append(coldLegFBLeadTC1);
result.append(", coldLegLagTC: ");
result.append(coldLegLagTC);
result.append(", steamFlowFG: ");
result.append(steamFlowFG);
result.append(", coreHTLagTC2: ");
result.append(coreHTLagTC2);
result.append(", coreHTLagTC1: ");
result.append(coreHTLagTC1);
result.append(", coreNeutronicsEffTC: ");
result.append(coreNeutronicsEffTC);
result.append(", steamPressureFG: ");
result.append(steamPressureFG);
result.append(", feedbackFactor: ");
result.append(feedbackFactor);
result.append(", hotLegSteamGain: ");
result.append(hotLegSteamGain);
result.append(", coreNeutronicsHT: ");
result.append(coreNeutronicsHT);
result.append(", coldLegFG1: ");
result.append(coldLegFG1);
result.append(", coldLegFG2: ");
result.append(coldLegFG2);
result.append(", throttlePressureFactor: ");
result.append(throttlePressureFactor);
result.append(", coldLegFBLagTC: ");
result.append(coldLegFBLagTC);
result.append(')');
return result.toString();
}
} //PWRSteamSupplyImpl
|
|
package org.jimmutable.cloud;
import java.net.InetAddress;
import java.net.UnknownHostException;
//import org.apache.logging.log4j.Level;
import org.slf4j.LoggerFactory;
import org.slf4j.Logger;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.transport.client.PreBuiltTransportClient;
import org.jimmutable.cloud.cache.CacheRedis;
import org.jimmutable.cloud.cache.CacheStub;
import org.jimmutable.cloud.cache.ICache;
import org.jimmutable.cloud.cache.redis.LowLevelRedisDriver;
import org.jimmutable.cloud.elasticsearch.ElasticSearchEndpoint;
import org.jimmutable.cloud.elasticsearch.ElasticSearchRESTClient;
import org.jimmutable.cloud.elasticsearch.ISearch;
import org.jimmutable.cloud.elasticsearch.StubSearch;
import org.jimmutable.cloud.email.EmailStub;
import org.jimmutable.cloud.email.IEmail;
import org.jimmutable.cloud.email.SESClient;
import org.jimmutable.cloud.messaging.queue.IQueue;
import org.jimmutable.cloud.messaging.queue.QueueRedis;
import org.jimmutable.cloud.messaging.queue.QueueStub;
import org.jimmutable.cloud.messaging.signal.ISignal;
import org.jimmutable.cloud.messaging.signal.SignalRedis;
import org.jimmutable.cloud.messaging.signal.SignalStub;
import org.jimmutable.cloud.storage.IStorage;
import org.jimmutable.cloud.storage.StandardImmutableObjectCache;
import org.jimmutable.cloud.storage.StorageDevLocalFileSystem;
import org.jimmutable.cloud.storage.StubStorage;
import org.jimmutable.cloud.storage.s3.RegionSpecificAmazonS3ClientFactory;
import org.jimmutable.cloud.storage.s3.StorageS3;
import org.jimmutable.cloud.utils.ApplicationHeartbeatUtils;
import org.jimmutable.core.serialization.JimmutableTypeNameRegister;
/**
* Configures environment and application specific setting, to be used by other
* classes. This includes EnvironmantType, ApplicationId, and logging settings.
*
* @author trevorbox
*
*/
public class CloudExecutionEnvironment
{
private static Logger logger = LoggerFactory.getLogger(CloudExecutionEnvironment.class);
private static CloudExecutionEnvironment CURRENT;
private ISearch search;
private IStorage storage;
private IQueue queue_service;
private ISignal signal_service;
private IEmail email_service;
private ICache cache_service;
// System properties
private static final String ENV_TYPE_VARIABLE_NAME = "JIMMUTABLE_ENV_TYPE";
private static final String DISABLE_STANDARD_IMMUTABLE_OBJECT_CACHE = "DISABLE_STANDARD_IMMUTABLE_OBJECT_CACHE";
private static EnvironmentType ENV_TYPE;
private static ApplicationId APPLICATION_ID;
private static ApplicationId APPLICATION_SUB_SERVICE_ID;
private static StandardImmutableObjectCache STANDARD_IMMUTABLE_OBJECT_CACHE;
public static final String STAGING_POST_FIX = "-staging";
private CloudExecutionEnvironment( ISearch search, IStorage storage, IQueue queue_service, ISignal signal_service, IEmail email_service, ICache cache_service )
{
this.search = search;
this.storage = storage;
this.queue_service = queue_service;
this.signal_service = signal_service;
this.email_service = email_service;
this.cache_service = cache_service;
}
public EnvironmentType getSimpleEnvironmentType()
{
return ENV_TYPE;
}
public ApplicationId getSimpleApplicationId()
{
return APPLICATION_ID;
}
public ApplicationId getSimpleApplicationServiceId()
{
return APPLICATION_SUB_SERVICE_ID;
}
public static void validate()
{
if ( APPLICATION_ID == null )
{
throw new RuntimeException("No Passed in APPLICATION_ID! Add the application ID to startup to handle it correctly.");
}
if ( APPLICATION_SUB_SERVICE_ID == null )
{
throw new RuntimeException("No Passed in APPLICATION_SUB_SERVICE_ID! Add the application's service ID to startup to handle it correctly.");
}
if ( ENV_TYPE == null )
{
throw new RuntimeException("No Passed in EnvironmentType! Add the environment to startup to handle it correctly.");
}
}
/**
* Search instance used for document upsert and searching of indices
*
* @return The Search instance
*/
public ISearch getSimpleSearch()
{
return search;
}
/**
* Storage system the application uses to store objects
*
* @return Storage The storage system
*/
public IStorage getSimpleStorage()
{
return storage;
}
public IQueue getSimpleQueueService()
{
return queue_service;
}
public ISignal getSimpleSignalService()
{
return signal_service;
}
public IEmail getSimpleEmailService()
{
return email_service;
}
public ICache getSimpleCacheService()
{
return cache_service;
}
private static SESClient getSESClient()
{
try
{
return new SESClient(SESClient.getClient());
}
catch ( Exception e )
{
logger.error("Failed to created email client!", e);
throw new RuntimeException("Failed to created email client!");
}
}
/**
*
* ONLY CALL THIS METHOD ONCE
*
* Startup must be called within the main method of the application and will
* attempt to set the environment type and application id based on the
* environment variables JIMMUTABLE_ENV_TYPE and JIMMUTABLE_APP_ID.
*
* The DEV environment variable is the default if the variable is not provided
*
* @param application_id
* the overall application name that all services that make up the
* application can share
* @param application_sub_service_id
* a service that is a part of the application_id. For instance if we
* have a application_id of "AdRocket", we may then also have sub
* services within "AdRocket" like "AdRocket-Web" &
* "AdRocket-Ad-Processor".
*
* @param default_id
* A default application id in case the environment variable is not
* set
*/
@SuppressWarnings("resource")
public static void startup( ApplicationId application_id, ApplicationId application_sub_service_id, EnvironmentType env_type )
{
if ( CURRENT != null )
{
throw new RuntimeException("Startup has already been called!");
}
// register objects
ENV_TYPE = env_type;
APPLICATION_ID = application_id;
APPLICATION_SUB_SERVICE_ID = application_sub_service_id;
validate();
logger.info(String.format("ApplicationID=%s APPLICATION_SUB_SERVICE_ID:%s Environment=%s", APPLICATION_ID, APPLICATION_SUB_SERVICE_ID, ENV_TYPE));
LowLevelRedisDriver redis_driver = new LowLevelRedisDriver();
CacheRedis redis = new CacheRedis(APPLICATION_ID, redis_driver);
// Pulls from system property DISABLE_STANDARD_IMMUTABLE_OBJECT_CACHE, default
// is to leave it enabled unless flag passed in.
boolean should_disable_sio_cache = shouldDisableSIOCacheFromSystemProperty();
STANDARD_IMMUTABLE_OBJECT_CACHE = new StandardImmutableObjectCache(redis, "storagecache", StandardImmutableObjectCache.DEFAULT_ALLOWED_ENTRY_AGE_IN_MS, should_disable_sio_cache);
switch ( env_type )
{
case DEV:
checkOs();
TransportClient dev_client = null;
try
{
dev_client = new PreBuiltTransportClient(Settings.EMPTY).addTransportAddress(new TransportAddress(InetAddress.getByName(ElasticSearchEndpoint.CURRENT.getSimpleHost()), ElasticSearchEndpoint.CURRENT.getSimplePort()));
}
catch ( UnknownHostException e )
{
logger.error("Failed to instantiate the elasticsearch client!", e);
}
if ( dev_client == null )
{
throw new RuntimeException("Failed to instantiate the elasticsearch client!");
}
CURRENT = new CloudExecutionEnvironment(new ElasticSearchRESTClient(), new StorageDevLocalFileSystem(false, APPLICATION_ID, STANDARD_IMMUTABLE_OBJECT_CACHE), new QueueRedis(APPLICATION_ID, redis_driver), new SignalRedis(APPLICATION_ID, redis_driver), getSESClient(), redis);
break;
// For now, staging is the same as dev
case STAGING:
checkOs();
TransportClient staging_client = null;
try
{
staging_client = new PreBuiltTransportClient(Settings.EMPTY).addTransportAddress(new TransportAddress(InetAddress.getByName(ElasticSearchEndpoint.CURRENT.getSimpleHost()), ElasticSearchEndpoint.CURRENT.getSimplePort()));
}
catch ( UnknownHostException e )
{
logger.error("Failed to instantiate the elasticsearch client!", e);
}
if ( staging_client == null )
{
throw new RuntimeException("Failed to instantiate the elasticsearch client!");
}
/*
* For our staging mode we still use S3, but we use a different bucket that is
* meant to be synced up nightly
*/
ApplicationId staging_application_id = createStagingApplicationIDComplex(APPLICATION_ID, null);
if ( staging_application_id == null )
{
throw new RuntimeException("Failed to create staging application ID for Storage!");
}
StorageS3 staging_storage = new StorageS3(RegionSpecificAmazonS3ClientFactory.defaultFactory(), staging_application_id, STANDARD_IMMUTABLE_OBJECT_CACHE, false);
staging_storage.upsertBucketIfNeeded();
CURRENT = new CloudExecutionEnvironment(new ElasticSearchRESTClient(), staging_storage, new QueueRedis(APPLICATION_ID, redis_driver), new SignalRedis(APPLICATION_ID, redis_driver), getSESClient(), redis);
break;
case PRODUCTION:
checkOs();
logger.info("Starting production environment");
StorageS3 production_storage = new StorageS3(RegionSpecificAmazonS3ClientFactory.defaultFactory(), APPLICATION_ID, STANDARD_IMMUTABLE_OBJECT_CACHE, false);
production_storage.upsertBucketIfNeeded();
CURRENT = new CloudExecutionEnvironment(new ElasticSearchRESTClient(), production_storage, new QueueRedis(APPLICATION_ID, redis_driver), new SignalRedis(APPLICATION_ID, redis_driver), getSESClient(), new CacheRedis(APPLICATION_ID, redis_driver));
break;
case STUB:
checkOs();
CURRENT = new CloudExecutionEnvironment(new StubSearch(), new StubStorage(), new QueueStub(), new SignalStub(), new EmailStub(), new CacheStub());
break;
default:
throw new RuntimeException(String.format("Unhandled EnvironmentType: %s! Add the environment to startup to handle it correctly.", env_type));
}
JimmutableTypeNameRegister.registerAllTypes();
JimmutableCloudTypeNameRegister.registerAllTypes();
ApplicationHeartbeatUtils.setupHeartbeat(application_id, application_sub_service_id);
setDefaultUncaughtExceptionHandler();
STANDARD_IMMUTABLE_OBJECT_CACHE.createListeners();
}
/*
* This is an implementation of an uncaught exception handler so that these
* types of exceptions bubble up into our cloud logging solution as we expect
* rather than going to sys.err.
*/
private static void setDefaultUncaughtExceptionHandler()
{
try
{
Thread.setDefaultUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler()
{
@Override
public void uncaughtException( Thread t, Throwable e )
{
logger.error("Uncaught exception detected in thread " + t, e);
}
});
}
catch ( SecurityException e )
{
logger.error("Could not set the Default Uncaught Exception Handler", e);
}
}
/**
* This creates a staging specific application ID for Storage.
*/
public static ApplicationId createStagingApplicationIDComplex( ApplicationId application_id, ApplicationId default_value )
{
try
{
return new ApplicationId(application_id.getSimpleValue() + STAGING_POST_FIX);
}
catch ( Exception e )
{
logger.error(String.format("Could not create staging post fix for application ID %s. Dying now.", application_id.getSimpleValue()), e);
return default_value;
}
}
/**
* Use this for running unit tests. Search, Storage, and Messaging are just stub
* classes and throw Runtime Exceptions if their methods are called.
*
* @param application_id
* The ApplicationId
*/
public static void startupStubTest( ApplicationId application_id )
{
startup(application_id, new ApplicationId("stub"), EnvironmentType.STUB);
}
/**
* Use this for running integration tests. Search, Storage, and Messaging are
* initialized to the Dev instances. Any local clients needed for Search,
* Storage or Messaging should be running.
*
* @param application_id
* The ApplicationId
*/
public static void startupIntegrationTest( ApplicationId application_id )
{
startup(application_id, new ApplicationId("integration"), EnvironmentType.DEV);
}
/**
* Get the environment type from the system property JIMMUTABLE_ENV_TYPE. If it
* fails the default value is returned.
*
* @param default_value
* @return The EnvironmentType from the system variable or the default_value
*/
public static EnvironmentType getEnvironmentTypeFromSystemProperty( EnvironmentType default_value )
{
String env_level = System.getProperty(ENV_TYPE_VARIABLE_NAME);
if ( env_level == null )
env_level = System.getProperty(ENV_TYPE_VARIABLE_NAME.toLowerCase());
if ( env_level != null )
{
EnvironmentType tmp_type = null;
try
{
env_level = env_level.toUpperCase();
tmp_type = EnvironmentType.valueOf(env_level);
}
catch ( Exception e )
{
logger.error(String.format("Invalid Environment type %s using default type %s", env_level, tmp_type));
return default_value;
}
return tmp_type;
}
return default_value;
}
/**
* Gets if user wants the standard immutable object cache fully disabled at
* startup. This means redis will store zero standard immutable object data by
* default and every fetch from storage will be directly from Storage. Made
* mostly to ease development. Cache should always be ON by default.
*
* Pass in "-DDISABLE_STANDARD_IMMUTABLE_OBJECT_CACHE=true" to turn it off.
*/
private static boolean shouldDisableSIOCacheFromSystemProperty()
{
String disable_cache_prop = System.getProperty(DISABLE_STANDARD_IMMUTABLE_OBJECT_CACHE);
if ( disable_cache_prop == null )
disable_cache_prop = System.getProperty(DISABLE_STANDARD_IMMUTABLE_OBJECT_CACHE.toLowerCase());
if ( disable_cache_prop == null )
{
return false;
}
return "TRUE".equalsIgnoreCase(disable_cache_prop);
}
private static void checkOs()
{
String operating_system_property = System.getProperty("os.name");
if ( operating_system_property != null )
{
String os = operating_system_property.toLowerCase();
if ( os.indexOf("win") < 0 && os.indexOf("mac") < 0 && os.indexOf("linux") < 0)
{
logger.error(String.format("Unexpected operating system (%s) detected for %s environment!", os, ENV_TYPE));
}
}
else
{
logger.error("Failed to detect operating system!");
}
}
public StandardImmutableObjectCache getSimpleCache()
{
return STANDARD_IMMUTABLE_OBJECT_CACHE;
}
/**
* MAKE SURE YOU CALLED STARTUP ONCE BEFORE
*
* Make sure your application calls startup first to setup the singleton. If
* CURRENT is not set the JVM will be terminated!
*
* @return the cloud execution environment
*/
static public CloudExecutionEnvironment getSimpleCurrent()
{
if ( CURRENT == null )
{
throw new RuntimeException("The startup method was never called first to setup the singleton!");
}
return CURRENT;
}
}
|
|
/*
* Copyright 2014-2019 Real Logic Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.aeron;
import io.aeron.logbuffer.LogBufferDescriptor;
import org.agrona.CloseHelper;
import org.agrona.IoUtil;
import org.agrona.ManagedResource;
import org.agrona.concurrent.UnsafeBuffer;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.nio.file.attribute.FileAttribute;
import java.util.EnumSet;
import static io.aeron.logbuffer.LogBufferDescriptor.*;
import static java.nio.channels.FileChannel.MapMode.READ_WRITE;
import static java.nio.file.StandardOpenOption.READ;
import static java.nio.file.StandardOpenOption.SPARSE;
import static java.nio.file.StandardOpenOption.WRITE;
/**
* Takes a log file name and maps the file into memory and wraps it with {@link UnsafeBuffer}s as appropriate.
*
* @see io.aeron.logbuffer.LogBufferDescriptor
*/
public class LogBuffers implements AutoCloseable, ManagedResource
{
private static final EnumSet<StandardOpenOption> FILE_OPTIONS = EnumSet.of(READ, WRITE, SPARSE);
private static final FileAttribute<?>[] NO_ATTRIBUTES = new FileAttribute[0];
private long timeOfLastStateChangeNs;
private int refCount;
private final int termLength;
private final FileChannel fileChannel;
private final ByteBuffer[] termBuffers = new ByteBuffer[PARTITION_COUNT];
private final UnsafeBuffer logMetaDataBuffer;
private final MappedByteBuffer[] mappedByteBuffers;
/**
* Construct the log buffers for a given log file.
*
* @param logFileName to be mapped.
*/
public LogBuffers(final String logFileName)
{
try
{
fileChannel = FileChannel.open(Paths.get(logFileName), FILE_OPTIONS, NO_ATTRIBUTES);
final long logLength = fileChannel.size();
if (logLength < Integer.MAX_VALUE)
{
final MappedByteBuffer mappedBuffer = fileChannel.map(READ_WRITE, 0, logLength);
mappedBuffer.order(ByteOrder.LITTLE_ENDIAN);
mappedByteBuffers = new MappedByteBuffer[]{ mappedBuffer };
logMetaDataBuffer = new UnsafeBuffer(
mappedBuffer, (int)(logLength - LOG_META_DATA_LENGTH), LOG_META_DATA_LENGTH);
final int termLength = LogBufferDescriptor.termLength(logMetaDataBuffer);
final int pageSize = LogBufferDescriptor.pageSize(logMetaDataBuffer);
checkTermLength(termLength);
checkPageSize(pageSize);
this.termLength = termLength;
for (int i = 0; i < PARTITION_COUNT; i++)
{
final int offset = i * termLength;
mappedBuffer.limit(offset + termLength).position(offset);
termBuffers[i] = mappedBuffer.slice();
}
}
else
{
mappedByteBuffers = new MappedByteBuffer[PARTITION_COUNT + 1];
final int assumedTermLength = TERM_MAX_LENGTH;
final long metaDataSectionOffset = assumedTermLength * (long)PARTITION_COUNT;
final long metaDataMappingLength = logLength - metaDataSectionOffset;
final MappedByteBuffer metaDataMappedBuffer = fileChannel.map(
READ_WRITE, metaDataSectionOffset, metaDataMappingLength);
metaDataMappedBuffer.order(ByteOrder.LITTLE_ENDIAN);
mappedByteBuffers[LOG_META_DATA_SECTION_INDEX] = metaDataMappedBuffer;
logMetaDataBuffer = new UnsafeBuffer(
metaDataMappedBuffer,
(int)metaDataMappingLength - LOG_META_DATA_LENGTH,
LOG_META_DATA_LENGTH);
final int metaDataTermLength = LogBufferDescriptor.termLength(logMetaDataBuffer);
final int pageSize = LogBufferDescriptor.pageSize(logMetaDataBuffer);
checkPageSize(pageSize);
if (metaDataTermLength != assumedTermLength)
{
throw new IllegalStateException(
"assumed term length " + assumedTermLength +
" does not match metadata: termLength=" + metaDataTermLength);
}
this.termLength = assumedTermLength;
for (int i = 0; i < PARTITION_COUNT; i++)
{
final long position = assumedTermLength * (long)i;
final MappedByteBuffer mappedBuffer = fileChannel.map(READ_WRITE, position, assumedTermLength);
mappedBuffer.order(ByteOrder.LITTLE_ENDIAN);
mappedByteBuffers[i] = mappedBuffer;
termBuffers[i] = mappedBuffer;
}
}
}
catch (final IOException ex)
{
throw new RuntimeException(ex);
}
catch (final IllegalStateException ex)
{
close();
throw ex;
}
}
/**
* Duplicate the underlying {@link ByteBuffer}s and wrap them for thread local access.
*
* @return duplicates of the wrapped underlying {@link ByteBuffer}s.
*/
public UnsafeBuffer[] duplicateTermBuffers()
{
final UnsafeBuffer[] buffers = new UnsafeBuffer[PARTITION_COUNT];
for (int i = 0; i < PARTITION_COUNT; i++)
{
buffers[i] = new UnsafeBuffer(termBuffers[i].duplicate().order(ByteOrder.LITTLE_ENDIAN));
}
return buffers;
}
/**
* Get the buffer which holds the log metadata.
*
* @return the buffer which holds the log metadata.
*/
public UnsafeBuffer metaDataBuffer()
{
return logMetaDataBuffer;
}
/**
* The {@link FileChannel} for the mapped log.
*
* @return the {@link FileChannel} for the mapped log.
*/
public FileChannel fileChannel()
{
return fileChannel;
}
public void close()
{
CloseHelper.close(fileChannel);
for (final MappedByteBuffer buffer : mappedByteBuffers)
{
IoUtil.unmap(buffer);
}
}
/**
* The length of the term buffer in each log partition.
*
* @return length of the term buffer in each log partition.
*/
public int termLength()
{
return termLength;
}
public int incRef()
{
return ++refCount;
}
public int decRef()
{
return --refCount;
}
public void timeOfLastStateChange(final long timeNs)
{
timeOfLastStateChangeNs = timeNs;
}
public long timeOfLastStateChange()
{
return timeOfLastStateChangeNs;
}
public void delete()
{
close();
}
}
|
|
package com.example.streetrats.genie;
import android.content.Context;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import com.afollestad.materialdialogs.MaterialDialog;
import com.example.streetrats.genie.rest.BuyProductRequest;
import com.example.streetrats.genie.rest.GenieService;
import com.example.streetrats.genie.rest.Product;
import com.example.streetrats.genie.rest.RestClient;
import com.facebook.Session;
import com.google.analytics.tracking.android.EasyTracker;
import com.google.analytics.tracking.android.MapBuilder;
import com.nispok.snackbar.Snackbar;
import com.nispok.snackbar.SnackbarManager;
import com.nispok.snackbar.enums.SnackbarType;
import com.squareup.picasso.Picasso;
import org.json.JSONException;
import org.json.JSONObject;
import java.text.DecimalFormat;
import java.util.List;
import retrofit.Callback;
import retrofit.RetrofitError;
import retrofit.client.Response;
//import me.drakeet.materialdialog.MaterialDialog;
public class ProductsFriendAdapter extends RecyclerView.Adapter<ProductsFriendAdapter.ProductFriendViewHolder> {
private Context context;
private List<Product> productList;
private static final int RESULT_OK = 1;
private static final int RESULT_CANCELED = 0;
RestClient restClient;
GenieService genieService;
public ProductsFriendAdapter(Context context, List<Product> productList) {
this.productList = productList;
this.context = context;
restClient = new RestClient();
genieService = restClient.getGenieService();
}
@Override
public int getItemCount() {
return productList.size();
}
@Override
public void onBindViewHolder(ProductFriendViewHolder productViewHolder, int i) {
final int position = i;
final Product p = productList.get(i);
productViewHolder.vName.setText(p.name);
Picasso.with(productViewHolder.context)
.load(p.image)
.placeholder(R.drawable.product)
.error(R.drawable.product)
.into(productViewHolder.vImage);
if(p.bought == true) {
productViewHolder.vBought.setVisibility(View.VISIBLE);
} else {
productViewHolder.vBought.setVisibility(View.GONE);
}
productViewHolder.vDelete.setVisibility(View.GONE);
productViewHolder.vImage.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// May return null if a EasyTracker has not yet been initialized with a
// property ID.
EasyTracker easyTracker = EasyTracker.getInstance(context);
// MapBuilder.createEvent().build() returns a Map of event fields and values
// that are set and sent with the hit.
easyTracker.send(MapBuilder
.createEvent("ui_action", // Event category (required)
"Product Image Clicked", // Event action (required)
"list_product_image", // Event label
null) // Event value
.build()
);
LayoutInflater inflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
View view = inflater.inflate(R.layout.item_image, null);
ImageView itemImage = (ImageView) view.findViewById(R.id.item_image);
Picasso.with(view.getContext())
.load(p.image)
.placeholder(R.drawable.product)
.error(R.drawable.product)
.into(itemImage);
final me.drakeet.materialdialog.MaterialDialog dialog = new me.drakeet.materialdialog.MaterialDialog(context);
dialog.setView(view);
dialog.setNegativeButton("Close", new View.OnClickListener() {
@Override
public void onClick(View v) {
dialog.dismiss();
}
});
dialog.setCanceledOnTouchOutside(true);
dialog.show();
}
});
productViewHolder.vName.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// May return null if a EasyTracker has not yet been initialized with a
// property ID.
EasyTracker easyTracker = EasyTracker.getInstance(context);
// MapBuilder.createEvent().build() returns a Map of event fields and values
// that are set and sent with the hit.
easyTracker.send(MapBuilder
.createEvent("ui_action", // Event category (required)
"Product Info Pressed", // Event action (required)
"list_product_name", // Event label
null) // Event value
.build()
);
JSONObject features = null;
StringBuilder result = new StringBuilder();
result.append("Wished By: " + p.owner_first_name + " " + p.owner_last_name + '\n');
DecimalFormat df = new DecimalFormat("0.00");
String price = df.format(p.price);
result.append('\n' + "Price: $" + price + '\n');
result.append('\n' + "Item Information:" + '\n');
try {
features = new JSONObject(p.features);
for(int i = 0; i < features.names().length(); i++) {
result.append(features.names().getString(i) + " : " + features.get(features.names().getString(i)) + '\n');
}
} catch (JSONException e) {
}
final MaterialDialog.Builder dialog = new MaterialDialog.Builder(context)
.title(p.name)
.content(result)
.positiveText("Grant")
.negativeText("Close")
.callback(new MaterialDialog.ButtonCallback() {
@Override
public void onPositive(MaterialDialog dialog) {
// May return null if a EasyTracker has not yet been initialized with a
// property ID.
EasyTracker easyTracker = EasyTracker.getInstance(context);
// MapBuilder.createEvent().build() returns a Map of event fields and values
// that are set and sent with the hit.
easyTracker.send(MapBuilder
.createEvent("ui_action", // Event category (required)
"Product Bought", // Event action (required)
"Grant", // Event label
null) // Event value
.build()
);
buyProduct(p._id, position, p.owner_first_name, p.owner);
}
});
dialog.build();
dialog.show();
}
});
}
@Override
public ProductFriendViewHolder onCreateViewHolder(ViewGroup viewGroup, int i) {
View productView = LayoutInflater.
from(viewGroup.getContext()).
inflate(R.layout.list_products, viewGroup, false);
ProductFriendViewHolder vh = new ProductFriendViewHolder(productView);
return vh;
}
public static class ProductFriendViewHolder extends RecyclerView.ViewHolder {
protected View vView;
protected TextView vName;
protected ImageView vImage;
protected ImageView vDelete;
protected ImageView vBought;
protected Context context;
public ProductFriendViewHolder(View v) {
super(v);
vView = v;
vName = (TextView) v.findViewById(R.id.list_product_name);
vImage = (ImageView) v.findViewById(R.id.list_product_image);
vDelete = (ImageView) v.findViewById(R.id.product_delete_btn);
vBought = (ImageView) v.findViewById(R.id.product_bought);
context = v.getContext();
}
}
public void buyProduct(final String product_id, int _position, String _owner, String _owner_id) {
if(restClient == null || genieService == null) {
return;
}
final int position = _position;
final String owner = _owner;
final String owner_id = _owner_id;
genieService.buyProduct(new BuyProductRequest(product_id, Session.getActiveSession().getAccessToken().toString()), new Callback<Product>() {
@Override
public void success(Product product, Response response) {
SnackbarManager.show(
Snackbar.with(context) // context
.type(SnackbarType.MULTI_LINE) // Set is as a multi-line snackbar
.text("You Granted " + owner + "'s Wish") // text to be displayed
.duration(Snackbar.SnackbarDuration.LENGTH_SHORT) // make it shorter
.animation(false) // don't animate it
, (HomeActivity) context); // where it is displayed
productList.get(position).bought = true;
productList.remove(position);
notifyDataSetChanged();
/*ParsePush push = new ParsePush();
//push.setChannel("channel54fbd7f5e09f1f030039fb75");
push.setChannel("channel" + owner_id);
push.setMessage("Your Wish Has Been Granted!");
push.sendInBackground(new SendCallback() {
@Override
public void done(ParseException e) {
if(e == null) {
Log.d("push", "success");
}
else {
Log.d("push", "failure");
}
}
});*/
}
@Override
public void failure(RetrofitError retrofitError) {
SnackbarManager.show(
Snackbar.with(context) // context
.type(SnackbarType.MULTI_LINE) // Set is as a multi-line snackbar
.text("Could Not Buy Item. Something Went Wrong.") // text to be displayed
.duration(Snackbar.SnackbarDuration.LENGTH_SHORT) // make it shorter
.animation(false) // don't animate it
, (HomeActivity) context); // where it is displayed
System.out.println(retrofitError.getMessage());
}
});
}
}
|
|
package org.apollo.game.model;
/**
* Represents a single movement direction.
*
* @author Graham
*/
public enum Direction {
/**
* No movement.
*/
NONE(-1),
/**
* North west movement.
*/
NORTH_WEST(0),
/**
* North movement.
*/
NORTH(1),
/**
* North east movement.
*/
NORTH_EAST(2),
/**
* West movement.
*/
WEST(3),
/**
* East movement.
*/
EAST(4),
/**
* South west movement.
*/
SOUTH_WEST(5),
/**
* South movement.
*/
SOUTH(6),
/**
* South east movement.
*/
SOUTH_EAST(7);
/**
* An empty direction array.
*/
public static final Direction[] EMPTY_DIRECTION_ARRAY = new Direction[0];
/**
* An array of directions without any diagonal directions.
*/
public final static Direction[] NESW = { NORTH, EAST, SOUTH, WEST };
/**
* An array of directions without any diagonal directions, and one step counter-clockwise, as used by
* the clients collision mapping.
*/
public final static Direction[] WNES = { WEST, NORTH, EAST, SOUTH };
/**
* An array of diagonal directions, and one step counter-clockwise, as used by the clients collision
* mapping.
*/
public final static Direction[] WNES_DIAGONAL = { NORTH_WEST, NORTH_EAST, SOUTH_EAST, SOUTH_WEST};
/**
* Gets the Direction between the two {@link Position}s..
*
* @param current The difference between two X coordinates.
* @param next The difference between two Y coordinates.
* @return The direction.
*/
public static Direction between(Position current, Position next) {
int deltaX = next.getX() - current.getX();
int deltaY = next.getY() - current.getY();
return fromDeltas(deltaX, deltaY);
}
/**
* Creates a direction from the differences between X and Y.
*
* @param deltaX The difference between two X coordinates.
* @param deltaY The difference between two Y coordinates.
* @return The direction.
*/
public static Direction fromDeltas(int deltaX, int deltaY) {
if (deltaY == 1) {
if (deltaX == 1) {
return NORTH_EAST;
} else if (deltaX == 0) {
return NORTH;
} else if (deltaX == -1) {
return NORTH_WEST;
}
} else if (deltaY == -1) {
if (deltaX == 1) {
return SOUTH_EAST;
} else if (deltaX == 0) {
return SOUTH;
} else if (deltaX == -1) {
return SOUTH_WEST;
}
} else if (deltaY == 0) {
if (deltaX == 1) {
return EAST;
} else if (deltaX == 0) {
return NONE;
} else if (deltaX == -1) {
return WEST;
}
}
throw new IllegalArgumentException("Difference between Positions must be [-1, 1].");
}
/**
* Get the 2 directions which make up a diagonal direction (i.e., NORTH and EAST for NORTH_EAST).
*
* @param direction The direction to get the components for.
* @return The components for the given direction.
*/
public static Direction[] diagonalComponents(Direction direction) {
switch (direction) {
case NORTH_EAST:
return new Direction[] { NORTH, EAST };
case NORTH_WEST:
return new Direction[] { NORTH, WEST };
case SOUTH_EAST:
return new Direction[] { SOUTH, EAST };
case SOUTH_WEST:
return new Direction[] { SOUTH, WEST };
}
throw new IllegalArgumentException("Must provide a diagonal direction.");
}
/**
* The direction as an integer.
*/
private final int intValue;
/**
* Creates the direction.
*
* @param intValue The direction as an integer.
*/
Direction(int intValue) {
this.intValue = intValue;
}
/**
* Gets the opposite direction of the this direction.
*
* @return The opposite direction.
*/
public Direction opposite() {
switch (this) {
case NORTH:
return SOUTH;
case SOUTH:
return NORTH;
case EAST:
return WEST;
case WEST:
return EAST;
case NORTH_WEST:
return SOUTH_EAST;
case NORTH_EAST:
return SOUTH_WEST;
case SOUTH_EAST:
return NORTH_WEST;
case SOUTH_WEST:
return NORTH_EAST;
}
return NONE;
}
/**
* Gets the X delta from a {@link Position} of (0, 0).
*
* @return The delta of X from (0, 0).
*/
public int deltaX() {
switch (this) {
case SOUTH_EAST:
case NORTH_EAST:
case EAST:
return 1;
case SOUTH_WEST:
case NORTH_WEST:
case WEST:
return -1;
}
return 0;
}
/**
* Gets the Y delta from a {@link Position} of (0, 0).
*
* @return The delta of Y from (0, 0).
*/
public int deltaY() {
switch (this) {
case NORTH_WEST:
case NORTH_EAST:
case NORTH:
return 1;
case SOUTH_WEST:
case SOUTH_EAST:
case SOUTH:
return -1;
}
return 0;
}
/**
* Check if this direction is a diagonal direction.
*
* @return {@code true} if this direction is a diagonal direction, {@code false} otherwise.
*/
public boolean isDiagonal() {
return this == SOUTH_EAST || this == SOUTH_WEST || this == NORTH_EAST || this == NORTH_WEST;
}
/**
* Gets the direction as an integer which the client can understand.
*
* @return The movement as an integer.
*/
public int toInteger() {
return intValue;
}
/**
* Gets the direction as an integer as used orientation in the client maps (WNES as opposed to NESW).
*
* @return The direction as an integer.
*/
public int toOrientationInteger() {
switch(this) {
case WEST:
case NORTH_WEST:
return 0;
case NORTH:
case NORTH_EAST:
return 1;
case EAST:
case SOUTH_EAST:
return 2;
case SOUTH:
case SOUTH_WEST:
return 3;
default:
throw new IllegalStateException("Only a valid direction can have an orientation value");
}
}
}
|
|
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.options.colors.pages;
import com.intellij.application.options.colors.InspectionColorSettingsPage;
import com.intellij.codeHighlighting.RainbowHighlighter;
import com.intellij.ide.highlighter.JavaFileHighlighter;
import com.intellij.ide.highlighter.JavaHighlightingColors;
import com.intellij.lang.Language;
import com.intellij.lang.java.JavaLanguage;
import com.intellij.openapi.application.ApplicationBundle;
import com.intellij.openapi.editor.colors.CodeInsightColors;
import com.intellij.openapi.editor.colors.TextAttributesKey;
import com.intellij.openapi.fileTypes.StdFileTypes;
import com.intellij.openapi.fileTypes.SyntaxHighlighter;
import com.intellij.openapi.options.OptionsBundle;
import com.intellij.openapi.options.colors.AttributesDescriptor;
import com.intellij.openapi.options.colors.ColorDescriptor;
import com.intellij.openapi.options.colors.RainbowColorSettingsPage;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.codeStyle.DisplayPriority;
import com.intellij.psi.codeStyle.DisplayPrioritySortable;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.Map;
public class JavaColorSettingsPage implements RainbowColorSettingsPage, InspectionColorSettingsPage, DisplayPrioritySortable {
private static final AttributesDescriptor[] ourDescriptors = {
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.keyword"), JavaHighlightingColors.KEYWORD),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.number"), JavaHighlightingColors.NUMBER),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.string"), JavaHighlightingColors.STRING),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.valid.escape.in.string"), JavaHighlightingColors.VALID_STRING_ESCAPE),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.invalid.escape.in.string"), JavaHighlightingColors.INVALID_STRING_ESCAPE),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.operator.sign"), JavaHighlightingColors.OPERATION_SIGN),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.parentheses"), JavaHighlightingColors.PARENTHESES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.braces"), JavaHighlightingColors.BRACES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.brackets"), JavaHighlightingColors.BRACKETS),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.comma"), JavaHighlightingColors.COMMA),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.semicolon"), JavaHighlightingColors.JAVA_SEMICOLON),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.dot"), JavaHighlightingColors.DOT),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.line.comment"), JavaHighlightingColors.LINE_COMMENT),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.block.comment"), JavaHighlightingColors.JAVA_BLOCK_COMMENT),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.javadoc.comment"), JavaHighlightingColors.DOC_COMMENT),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.javadoc.tag"), JavaHighlightingColors.DOC_COMMENT_TAG),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.javadoc.tag.value"), JavaHighlightingColors.DOC_COMMENT_TAG_VALUE),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.javadoc.markup"), JavaHighlightingColors.DOC_COMMENT_MARKUP),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.class"), JavaHighlightingColors.CLASS_NAME_ATTRIBUTES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.anonymous.class"), JavaHighlightingColors.ANONYMOUS_CLASS_NAME_ATTRIBUTES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.type.parameter"), JavaHighlightingColors.TYPE_PARAMETER_NAME_ATTRIBUTES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.abstract.class"), JavaHighlightingColors.ABSTRACT_CLASS_NAME_ATTRIBUTES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.interface"), JavaHighlightingColors.INTERFACE_NAME_ATTRIBUTES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.enum"), JavaHighlightingColors.ENUM_NAME_ATTRIBUTES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.local.variable"), JavaHighlightingColors.LOCAL_VARIABLE_ATTRIBUTES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.reassigned.local.variable"), JavaHighlightingColors.REASSIGNED_LOCAL_VARIABLE_ATTRIBUTES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.reassigned.parameter"), JavaHighlightingColors.REASSIGNED_PARAMETER_ATTRIBUTES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.implicit.anonymous.parameter"), JavaHighlightingColors.IMPLICIT_ANONYMOUS_CLASS_PARAMETER_ATTRIBUTES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.instance.field"), JavaHighlightingColors.INSTANCE_FIELD_ATTRIBUTES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.instance.final.field"), JavaHighlightingColors.INSTANCE_FINAL_FIELD_ATTRIBUTES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.static.field"), JavaHighlightingColors.STATIC_FIELD_ATTRIBUTES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.static.final.field"), JavaHighlightingColors.STATIC_FINAL_FIELD_ATTRIBUTES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.parameter"), JavaHighlightingColors.PARAMETER_ATTRIBUTES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.lambda.parameter"), JavaHighlightingColors.LAMBDA_PARAMETER_ATTRIBUTES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.method.call"), JavaHighlightingColors.METHOD_CALL_ATTRIBUTES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.method.declaration"), JavaHighlightingColors.METHOD_DECLARATION_ATTRIBUTES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.constructor.call"), JavaHighlightingColors.CONSTRUCTOR_CALL_ATTRIBUTES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.constructor.declaration"), JavaHighlightingColors.CONSTRUCTOR_DECLARATION_ATTRIBUTES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.static.method"), JavaHighlightingColors.STATIC_METHOD_ATTRIBUTES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.abstract.method"), JavaHighlightingColors.ABSTRACT_METHOD_ATTRIBUTES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.inherited.method"), JavaHighlightingColors.INHERITED_METHOD_ATTRIBUTES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.annotation.name"), JavaHighlightingColors.ANNOTATION_NAME_ATTRIBUTES),
new AttributesDescriptor(OptionsBundle.message("options.java.attribute.descriptor.annotation.attribute.name"), JavaHighlightingColors.ANNOTATION_ATTRIBUTE_NAME_ATTRIBUTES)
};
@NonNls private static final Map<String, TextAttributesKey> ourTags = RainbowHighlighter.createRainbowHLM();
static {
ourTags.put("field", JavaHighlightingColors.INSTANCE_FIELD_ATTRIBUTES);
ourTags.put("unusedField", CodeInsightColors.NOT_USED_ELEMENT_ATTRIBUTES);
ourTags.put("error", CodeInsightColors.ERRORS_ATTRIBUTES);
ourTags.put("warning", CodeInsightColors.WARNINGS_ATTRIBUTES);
ourTags.put("weak_warning", CodeInsightColors.WEAK_WARNING_ATTRIBUTES);
ourTags.put("server_problems", CodeInsightColors.GENERIC_SERVER_ERROR_OR_WARNING);
ourTags.put("server_duplicate", CodeInsightColors.DUPLICATE_FROM_SERVER);
ourTags.put("unknownType", CodeInsightColors.WRONG_REFERENCES_ATTRIBUTES);
ourTags.put("localVar", JavaHighlightingColors.LOCAL_VARIABLE_ATTRIBUTES);
ourTags.put("reassignedLocalVar", JavaHighlightingColors.REASSIGNED_LOCAL_VARIABLE_ATTRIBUTES);
ourTags.put("reassignedParameter", JavaHighlightingColors.REASSIGNED_PARAMETER_ATTRIBUTES);
ourTags.put("implicitAnonymousParameter", JavaHighlightingColors.IMPLICIT_ANONYMOUS_CLASS_PARAMETER_ATTRIBUTES);
ourTags.put("static", JavaHighlightingColors.STATIC_FIELD_ATTRIBUTES);
ourTags.put("static_final", JavaHighlightingColors.STATIC_FINAL_FIELD_ATTRIBUTES);
ourTags.put("deprecated", CodeInsightColors.DEPRECATED_ATTRIBUTES);
ourTags.put("constructorCall", JavaHighlightingColors.CONSTRUCTOR_CALL_ATTRIBUTES);
ourTags.put("constructorDeclaration", JavaHighlightingColors.CONSTRUCTOR_DECLARATION_ATTRIBUTES);
ourTags.put("methodCall", JavaHighlightingColors.METHOD_CALL_ATTRIBUTES);
ourTags.put("methodDeclaration", JavaHighlightingColors.METHOD_DECLARATION_ATTRIBUTES);
ourTags.put("static_method", JavaHighlightingColors.STATIC_METHOD_ATTRIBUTES);
ourTags.put("abstract_method", JavaHighlightingColors.ABSTRACT_METHOD_ATTRIBUTES);
ourTags.put("inherited_method", JavaHighlightingColors.INHERITED_METHOD_ATTRIBUTES);
ourTags.put("param", JavaHighlightingColors.PARAMETER_ATTRIBUTES);
ourTags.put("lambda_param", JavaHighlightingColors.LAMBDA_PARAMETER_ATTRIBUTES);
ourTags.put("class", JavaHighlightingColors.CLASS_NAME_ATTRIBUTES);
ourTags.put("anonymousClass", JavaHighlightingColors.ANONYMOUS_CLASS_NAME_ATTRIBUTES);
ourTags.put("typeParameter", JavaHighlightingColors.TYPE_PARAMETER_NAME_ATTRIBUTES);
ourTags.put("abstractClass", JavaHighlightingColors.ABSTRACT_CLASS_NAME_ATTRIBUTES);
ourTags.put("interface", JavaHighlightingColors.INTERFACE_NAME_ATTRIBUTES);
ourTags.put("enum", JavaHighlightingColors.ENUM_NAME_ATTRIBUTES);
ourTags.put("annotationName", JavaHighlightingColors.ANNOTATION_NAME_ATTRIBUTES);
ourTags.put("annotationAttributeName", JavaHighlightingColors.ANNOTATION_ATTRIBUTE_NAME_ATTRIBUTES);
ourTags.put("javadocTagValue", JavaHighlightingColors.DOC_COMMENT_TAG_VALUE);
ourTags.put("instanceFinalField", JavaHighlightingColors.INSTANCE_FINAL_FIELD_ATTRIBUTES);
}
@Override
@NotNull
public String getDisplayName() {
return OptionsBundle.message("options.java.display.name");
}
@Override
public Icon getIcon() {
return StdFileTypes.JAVA.getIcon();
}
@Override
@NotNull
public AttributesDescriptor[] getAttributeDescriptors() {
return ourDescriptors;
}
@Override
@NotNull
public ColorDescriptor[] getColorDescriptors() {
return ColorDescriptor.EMPTY_ARRAY;
}
@Override
@NotNull
public SyntaxHighlighter getHighlighter() {
return new JavaFileHighlighter(LanguageLevel.HIGHEST);
}
@Override
@NotNull
public String getDemoText() {
return
"/* Block comment */\n" +
"import <class>java.util.Date</class>;\n" +
"/**\n" +
" * Doc comment here for <code>SomeClass</code>\n" +
" * @param <javadocTagValue>T</javadocTagValue> type parameter\n" +
" * @see <class>Math</class>#<methodCall>sin</methodCall>(double)\n" +
" */\n" +
"<annotationName>@Annotation</annotationName> (<annotationAttributeName>name</annotationAttributeName>=value)\n" +
"public class <class>SomeClass</class><<typeParameter>T</typeParameter> extends <interface>Runnable</interface>> { // some comment\n" +
" private <typeParameter>T</typeParameter> <field>field</field> = null;\n" +
" private double <unusedField>unusedField</unusedField> = 12345.67890;\n" +
" private <unknownType>UnknownType</unknownType> <field>anotherString</field> = \"Another\\nStrin\\g\";\n" +
" public static int <static>staticField</static> = 0;\n" +
" public final int <instanceFinalField>instanceFinalField</instanceFinalField> = 0;\n" +
"\n" +
" /**\n" +
" * " + ApplicationBundle.message("rainbow.option.panel.display.name") + ":\n" +
" * " + RainbowHighlighter.generatePaletteExample() + "\n" +
" * @param <javadocTagValue>param1</javadocTagValue>\n" +
" * @param <javadocTagValue>reassignedParam</javadocTagValue>\n" +
" * @param <javadocTagValue>param2</javadocTagValue>\n" +
" * @param <javadocTagValue>param3</javadocTagValue>\n" +
" */\n" +
" public <constructorDeclaration>SomeClass</constructorDeclaration>(<interface>AnInterface</interface> <param>param1</param>, int[] <reassignedParameter>reassignedParam</reassignedParameter>,\n" +
" int <param>param2</param>\n" +
" int <param>param3</param>) {\n" +
" int <reassignedLocalVar>reassignedValue</reassignedLocalVar> = this.<warning>staticField</warning> + <param>param2</param> + <param>param3</param>;\n" +
" long <localVar>localVar1</localVar>, <localVar>localVar2</localVar>, <localVar>localVar3</localVar>, <localVar>localVar4</localVar>;\n" +
" <error>int <localVar>localVar</localVar> = \"IntelliJ\"</error>; // Error, incompatible types\n" +
" <class>System</class>.<static>out</static>.<methodCall>println</methodCall>(<field>anotherString</field> + <inherited_method>toString</inherited_method>() + <localVar>localVar</localVar>);\n" +
" long <localVar>time</localVar> = <class>Date</class>.<static_method><deprecated>parse</deprecated></static_method>(\"1.2.3\"); // Method is deprecated\n" +
" <reassignedLocalVar>reassignedValue</reassignedLocalVar> ++; \n" +
" <field>field</field>.<abstract_method>run</abstract_method>(); \n" +
" new <anonymousClass>SomeClass</anonymousClass>() {\n" +
" {\n" +
" int <localVar>a</localVar> = <implicitAnonymousParameter>localVar</implicitAnonymousParameter>;\n" +
" }\n" +
" };\n" +
" <reassignedParameter>reassignedParam</reassignedParameter> = new <constructorCall>ArrayList</constructorCall><<class>String</class>>().toArray(new int[0]);\n" +
" }\n" +
"}\n" +
"enum <enum>AnEnum</enum> { <static_final>CONST1</static_final>, <static_final>CONST2</static_final> }\n" +
"interface <interface>AnInterface</interface> {\n" +
" int <static_final>CONSTANT</static_final> = 2;\n" +
" void <methodDeclaration>method</methodDeclaration>();\n" +
"}\n" +
"abstract class <abstractClass>SomeAbstractClass</abstractClass> {\n" +
"}";
}
@Override
public Map<String,TextAttributesKey> getAdditionalHighlightingTagToDescriptorMap() {
return ourTags;
}
@Override
public DisplayPriority getPriority() {
return DisplayPriority.KEY_LANGUAGE_SETTINGS;
}
@Override
public boolean isRainbowType(TextAttributesKey type) {
return JavaHighlightingColors.LOCAL_VARIABLE_ATTRIBUTES.equals(type)
|| JavaHighlightingColors.REASSIGNED_LOCAL_VARIABLE_ATTRIBUTES.equals(type)
|| JavaHighlightingColors.PARAMETER_ATTRIBUTES.equals(type)
|| JavaHighlightingColors.REASSIGNED_PARAMETER_ATTRIBUTES.equals(type)
|| JavaHighlightingColors.DOC_COMMENT_TAG_VALUE.equals(type);
}
@Nullable
@Override
public Language getLanguage() {
return JavaLanguage.INSTANCE;
}
}
|
|
/**
* Copyright 2012 multibit.org
*
* Licensed under the MIT license (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://opensource.org/licenses/mit-license.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.multibit.viewsystem.swing.action;
import java.awt.event.ActionEvent;
import java.io.File;
import java.io.IOException;
import java.nio.CharBuffer;
import javax.swing.Action;
import javax.swing.ImageIcon;
import javax.swing.JOptionPane;
import javax.swing.JPasswordField;
import javax.swing.SwingWorker;
import org.bitcoinj.wallet.Protos.Wallet.EncryptionType;
import org.multibit.controller.Controller;
import org.multibit.controller.bitcoin.BitcoinController;
import org.multibit.file.PrivateKeysHandler;
import org.multibit.file.Verification;
import org.multibit.model.bitcoin.WalletData;
import org.multibit.model.bitcoin.WalletBusyListener;
import org.multibit.utils.ImageLoader;
import org.multibit.viewsystem.swing.MultiBitFrame;
import org.multibit.viewsystem.swing.view.panels.ExportPrivateKeysPanel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.spongycastle.util.Arrays;
import com.google.bitcoin.core.MultiBitBlockChain;
import com.google.bitcoin.crypto.KeyCrypterException;
/**
* This {@link Action} exports the active wallets private keys.
*/
public class ExportPrivateKeysSubmitAction extends MultiBitSubmitAction implements WalletBusyListener {
private static final Logger log = LoggerFactory.getLogger(ExportPrivateKeysSubmitAction.class);
private static final long serialVersionUID = 1923492460598757765L;
private ExportPrivateKeysPanel exportPrivateKeysPanel;
private MultiBitFrame mainFrame;
private PrivateKeysHandler privateKeysHandler;
private JPasswordField walletPassword;
private JPasswordField exportFilePassword;
private JPasswordField exportFileRepeatPassword;
/**
* Creates a new {@link ExportPrivateKeysSubmitAction}.
*/
public ExportPrivateKeysSubmitAction(BitcoinController bitcoinController, ExportPrivateKeysPanel exportPrivateKeysPanel,
ImageIcon icon, JPasswordField walletPassword, JPasswordField exportFilePassword, JPasswordField exportFileRepeatPassword, MultiBitFrame mainFrame) {
super(bitcoinController, "showExportPrivateKeysAction.text.camel", "showExportPrivateKeysAction.tooltip", "showExportPrivateKeysAction.mnemonicKey", icon);
this.exportPrivateKeysPanel = exportPrivateKeysPanel;
this.walletPassword = walletPassword;
this.exportFilePassword = exportFilePassword;
this.exportFileRepeatPassword = exportFileRepeatPassword;
this.mainFrame = mainFrame;
// This action is a WalletBusyListener.
super.bitcoinController.registerWalletBusyListener(this);
walletBusyChange(super.bitcoinController.getModel().getActivePerWalletModelData().isBusy());
}
/**
* Export the private keys to a file.
*/
@Override
public void actionPerformed(ActionEvent e) {
if (abort()) {
return;
}
exportPrivateKeysPanel.clearMessages();
// See if a wallet password is required and present.
if (super.bitcoinController.getModel().getActiveWallet() != null
&& super.bitcoinController.getModel().getActiveWallet().getEncryptionType() == EncryptionType.ENCRYPTED_SCRYPT_AES) {
if (walletPassword.getPassword() == null || walletPassword.getPassword().length == 0) {
exportPrivateKeysPanel.setMessage1(controller.getLocaliser().getString(
"showExportPrivateKeysAction.youMustEnterTheWalletPassword"));
return;
}
try {
// See if the password is the correct wallet password.
if (!super.bitcoinController.getModel().getActiveWallet().checkPassword(CharBuffer.wrap(walletPassword.getPassword()))) {
// The password supplied is incorrect.
exportPrivateKeysPanel.setMessage1(controller.getLocaliser().getString(
"createNewReceivingAddressSubmitAction.passwordIsIncorrect"));
exportPrivateKeysPanel.setMessage2(" ");
return;
}
} catch (KeyCrypterException kce) {
exportPrivateKeysPanel.setMessage1(controller.getLocaliser().getString(
"createNewReceivingAddressSubmitAction.passwordIsIncorrect"));
exportPrivateKeysPanel.setMessage2(" ");
}
}
// Get the required output file.
String exportPrivateKeysFilename = exportPrivateKeysPanel.getOutputFilename();
// Check an output file was selected.
if (exportPrivateKeysFilename == null || "".equals(exportPrivateKeysFilename)) {
exportPrivateKeysPanel.setMessage1(controller.getLocaliser().getString(
"showExportPrivateKeysAction.youMustSelectAnOutputFile"));
return;
}
File exportPrivateKeysFile = new File(exportPrivateKeysFilename);
privateKeysHandler = new PrivateKeysHandler(super.bitcoinController.getModel().getNetworkParameters());
boolean performEncryptionOfExportFile = false;
CharSequence exportPasswordToUse = null;
if (exportPrivateKeysPanel.requiresEncryption()) {
// Get the passwords on the export file password fields.
if (exportFilePassword.getPassword() == null || exportFilePassword.getPassword().length == 0) {
// Notify must enter a password.
exportPrivateKeysPanel.setMessage1(controller.getLocaliser()
.getString("showExportPrivateKeysAction.enterPasswords"));
return;
} else {
if (!Arrays.areEqual(exportFilePassword.getPassword(), exportFileRepeatPassword.getPassword())) {
// Notify user passwords are different.
exportPrivateKeysPanel.setMessage1(controller.getLocaliser().getString(
"showExportPrivateKeysAction.passwordsAreDifferent"));
return;
} else {
// Perform encryption.
performEncryptionOfExportFile = true;
exportPasswordToUse = CharBuffer.wrap(exportFilePassword.getPassword());
}
}
}
// Check on file overwrite.
if (exportPrivateKeysFile.exists()) {
String yesText = controller.getLocaliser().getString("showOpenUriView.yesText");
String noText = controller.getLocaliser().getString("showOpenUriView.noText");
String questionText = controller.getLocaliser().getString("showExportPrivateKeysAction.thisFileExistsOverwrite",
new Object[] { exportPrivateKeysFile.getName() });
String questionTitle = controller.getLocaliser().getString("showExportPrivateKeysAction.thisFileExistsOverwriteTitle");
int selection = JOptionPane.showOptionDialog(mainFrame, questionText, questionTitle, JOptionPane.YES_NO_OPTION,
JOptionPane.QUESTION_MESSAGE, ImageLoader.createImageIcon(ImageLoader.QUESTION_MARK_ICON_FILE), new String[] {
yesText, noText }, noText);
if (selection != JOptionPane.YES_OPTION) {
return;
}
}
// Double check wallet is not busy then declare that the active wallet
// is busy with the task
WalletData perWalletModelData = super.bitcoinController.getModel().getActivePerWalletModelData();
if (!perWalletModelData.isBusy()) {
perWalletModelData.setBusy(true);
perWalletModelData.setBusyTaskKey("showExportPrivateKeysAction.text.camel");
exportPrivateKeysPanel.setMessage1(controller.getLocaliser().getString(
"exportPrivateKeysSubmitAction.exportingPrivateKeys"));
exportPrivateKeysPanel.setMessage2("");
super.bitcoinController.fireWalletBusyChange(true);
CharSequence walletPasswordToUse = null;
if (walletPassword.getPassword() != null) {
walletPasswordToUse = CharBuffer.wrap(walletPassword.getPassword());
}
exportPrivateKeysInBackground(exportPrivateKeysFile, performEncryptionOfExportFile, exportPasswordToUse,
walletPasswordToUse);
}
}
/**
* Export the private keys in a background Swing worker thread.
*/
private void exportPrivateKeysInBackground(final File exportPrivateKeysFile, final boolean performEncryptionOfExportFile,
final CharSequence exportPasswordToUse, final CharSequence walletPassword) {
final WalletData finalPerWalletModelData = super.bitcoinController.getModel().getActivePerWalletModelData();
final ExportPrivateKeysPanel finalExportPanel = exportPrivateKeysPanel;
final BitcoinController finalBitcoinController = super.bitcoinController;
SwingWorker<Boolean, Void> worker = new SwingWorker<Boolean, Void>() {
private String uiMessage1 = null;
private String uiMessage2 = null;
@Override
protected Boolean doInBackground() throws Exception {
Boolean successMeasure = Boolean.FALSE;
MultiBitBlockChain blockChain = null;
if (finalBitcoinController.getMultiBitService() != null) {
blockChain = finalBitcoinController.getMultiBitService().getChain();
}
try {
privateKeysHandler.exportPrivateKeys(exportPrivateKeysFile, finalBitcoinController.getModel().getActivePerWalletModelData()
.getWallet(), blockChain, performEncryptionOfExportFile, exportPasswordToUse, walletPassword);
// Success.
uiMessage1 = controller.getLocaliser().getString("showExportPrivateKeysAction.privateKeysExportSuccess");
// Perform a verification on the exported file to see if it
// is correct.
Verification verification = privateKeysHandler.verifyExportFile(exportPrivateKeysFile, finalBitcoinController.getModel()
.getActivePerWalletModelData().getWallet(), blockChain, performEncryptionOfExportFile,
exportPasswordToUse, walletPassword);
uiMessage2 = controller.getLocaliser().getString(verification.getMessageKey(), verification.getMessageData());
successMeasure = true;
} catch (IOException ioe) {
logError(ioe);
}
return successMeasure;
}
private void logError(Exception e) {
log.error(e.getClass().getName() + " " + e.getMessage());
e.printStackTrace();
uiMessage1 = controller.getLocaliser().getString("importPrivateKeysSubmitAction.privateKeysImportFailure",
new Object[] { e.getMessage() });
uiMessage2 = "";
}
@Override
protected void done() {
try {
Boolean wasSuccessful = get();
if (finalExportPanel != null && uiMessage1 != null) {
finalExportPanel.setMessage1(uiMessage1);
}
if (finalExportPanel != null && uiMessage2 != null) {
finalExportPanel.setMessage2(uiMessage2);
}
// Clear the passwords if the export was successful and the user is still
// looking at the same wallet as at start.
if (wasSuccessful && finalPerWalletModelData.getWalletFilename().equals(finalBitcoinController.getModel().getActiveWalletFilename())) {
finalExportPanel.clearPasswords();
}
} catch (Exception e) {
// Not really used but caught so that SwingWorker shuts down cleanly.
log.error(e.getClass() + " " + e.getMessage());
} finally {
// Declare that wallet is no longer busy with the task.
finalPerWalletModelData.setBusyTaskKey(null);
finalPerWalletModelData.setBusy(false);
finalBitcoinController.fireWalletBusyChange(false);
}
}
};
log.debug("Exporting private keys in background SwingWorker thread");
worker.execute();
}
@Override
public void walletBusyChange(boolean newWalletIsBusy) {
// Update the enable status of the action to match the wallet busy status.
if (super.bitcoinController.getModel().getActivePerWalletModelData().isBusy()) {
// Wallet is busy with another operation that may change the private keys - Action is disabled.
putValue(SHORT_DESCRIPTION, controller.getLocaliser().getString("multiBitSubmitAction.walletIsBusy",
new Object[]{controller.getLocaliser().getString(this.bitcoinController.getModel().getActivePerWalletModelData().getBusyTaskKey())}));
setEnabled(false);
} else {
// Enable unless wallet has been modified by another process.
if (!super.bitcoinController.getModel().getActivePerWalletModelData().isFilesHaveBeenChangedByAnotherProcess()) {
putValue(SHORT_DESCRIPTION, controller.getLocaliser().getString("exportPrivateKeysSubmitAction.text"));
setEnabled(true);
}
}
}
}
|
|
package net.ripe.db.whois.api.whois.rdap;
import com.google.common.collect.Lists;
import net.ripe.db.whois.api.whois.rdap.domain.Domain;
import net.ripe.db.whois.api.whois.rdap.domain.Entity;
import net.ripe.db.whois.api.whois.rdap.domain.Event;
import net.ripe.db.whois.api.whois.rdap.domain.Ip;
import net.ripe.db.whois.api.whois.rdap.domain.Link;
import net.ripe.db.whois.api.whois.rdap.domain.Nameserver;
import net.ripe.db.whois.api.whois.rdap.domain.Notice;
import net.ripe.db.whois.api.whois.rdap.domain.Remark;
import net.ripe.db.whois.api.whois.rdap.domain.Role;
import net.ripe.db.whois.api.whois.rdap.domain.vcard.VCard;
import org.junit.Test;
import javax.xml.datatype.XMLGregorianCalendar;
import static com.google.common.collect.Maps.immutableEntry;
import static net.ripe.db.whois.api.whois.rdap.VCardHelper.createAddress;
import static net.ripe.db.whois.api.whois.rdap.VCardHelper.createHonorifics;
import static net.ripe.db.whois.api.whois.rdap.VCardHelper.createMap;
import static net.ripe.db.whois.api.whois.rdap.VCardHelper.createName;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertThat;
public class RdapResponseJsonTest {
private static final String DATE_TIME = "2013-06-26T04:48:44Z";
private static final XMLGregorianCalendar LOCAL_DATE_TIME = RdapObjectMapper.dtf.newXMLGregorianCalendar(DATE_TIME);
@Test
public void entity() throws Exception {
VCardBuilder builder = new VCardBuilder();
VCard vcard = builder
.addVersion()
.addFn("Joe User")
.addN(Lists.newArrayList("User", "Joe", "", Lists.newArrayList("ing. jr", "M.Sc.")))
.addGender("M")
.addLang(createMap(immutableEntry("pref", "1")), "fr")
.build();
Entity entity = new Entity();
entity.setHandle("XXXX");
entity.setVcardArray(vcard);
entity.setPort43("whois.example.com");
assertThat(RdapHelperUtils.marshal(entity), equalTo("" +
"{\n" +
" \"handle\" : \"XXXX\",\n" +
" \"vcardArray\" : [ \"vcard\", [ [ \"version\", { }, \"text\", \"4.0\" ], [ \"fn\", { }, \"text\", \"Joe User\" ], [ \"n\", { }, \"text\", [ \"User\", \"Joe\", \"\", [ \"ing. jr\", \"M.Sc.\" ] ] ], [ \"gender\", { }, \"text\", \"M\" ], [ \"lang\", {\n" +
" \"pref\" : \"1\"\n" +
" }, \"language-tag\", \"fr\" ] ] ],\n" +
" \"objectClassName\" : \"entity\",\n" +
" \"port43\" : \"whois.example.com\"\n" +
"}"));
}
@Test
public void entity_vcard_serialization_test() throws Exception {
final VCardBuilder builder = new VCardBuilder();
builder.addVersion()
.addFn("Joe User")
.addN(createName("User", "Joe", "", "", createHonorifics("ing. jr", "M.Sc.")))
.addBday("--02-03")
.addAnniversary("20130101")
.addGender("M")
.addKind("individual")
.addLang(createMap(immutableEntry("pref", "1")), "fr")
.addLang(createMap(immutableEntry("pref", "2")), "en")
.addOrg("Example")
.addTitle("Research Scientist")
.addRole("Project Lead")
.addAdr(createMap(immutableEntry("type", "work")), VCardHelper.createAddress("", "Suite 1234", "4321 Rue Somewhere", "Quebec", "QC", "G1V 2M2", "Canada"))
.addAdr(createMap(immutableEntry("pref", "1")), VCardHelper.createAddress("", "", "", "", "", "", ""))
.addTel(createMap(immutableEntry("type", new String[]{"work", "voice"})), "tel:+1-555-555-1234;ext=102")
.addTel(createMap(immutableEntry("type", new String[]{"work", "cell", "voice", "video", "text"})), "tel:+1-555-555-4321")
.addEmail(createMap(immutableEntry("type", "work")), "[email protected]")
.addGeo(createMap(immutableEntry("type", "work")), "geo:46.772673,-71.282945")
.addKey(createMap(immutableEntry("type", "work")), "http://www.example.com/joe.user/joe.asc")
.addTz("-05:00")
.addKey(createMap(immutableEntry("type", "work")), "http://example.org");
assertThat(RdapHelperUtils.marshal(builder.build()), equalTo("" +
"{\n" +
" \"vcard\" : [ [ \"version\", { }, \"text\", \"4.0\" ], [ \"fn\", { }, \"text\", \"Joe User\" ], [ \"n\", { }, \"text\", [ \"User\", \"Joe\", \"\", \"\", [ \"ing. jr\", \"M.Sc.\" ] ] ], [ \"bday\", { }, \"date-and-or-time\", \"--02-03\" ], [ \"anniversary\", { }, \"date-and-or-time\", \"20130101\" ], [ \"gender\", { }, \"text\", \"M\" ], [ \"kind\", { }, \"text\", \"individual\" ], [ \"lang\", {\n" +
" \"pref\" : \"1\"\n" +
" }, \"language-tag\", \"fr\" ], [ \"lang\", {\n" +
" \"pref\" : \"2\"\n" +
" }, \"language-tag\", \"en\" ], [ \"org\", { }, \"text\", \"Example\" ], [ \"title\", { }, \"text\", \"Research Scientist\" ], [ \"role\", { }, \"text\", \"Project Lead\" ], [ \"adr\", {\n" +
" \"type\" : \"work\"\n" +
" }, \"text\", [ \"\", \"Suite 1234\", \"4321 Rue Somewhere\", \"Quebec\", \"QC\", \"G1V 2M2\", \"Canada\" ] ], [ \"adr\", {\n" +
" \"pref\" : \"1\"\n" +
" }, \"text\", [ \"\", \"\", \"\", \"\", \"\", \"\", \"\" ] ], [ \"tel\", {\n" +
" \"type\" : [ \"work\", \"voice\" ]\n" +
" }, \"uri\", \"tel:+1-555-555-1234;ext=102\" ], [ \"tel\", {\n" +
" \"type\" : [ \"work\", \"cell\", \"voice\", \"video\", \"text\" ]\n" +
" }, \"uri\", \"tel:+1-555-555-4321\" ], [ \"email\", {\n" +
" \"type\" : \"work\"\n" +
" }, \"text\", \"[email protected]\" ], [ \"geo\", {\n" +
" \"type\" : \"work\"\n" +
" }, \"uri\", \"geo:46.772673,-71.282945\" ], [ \"key\", {\n" +
" \"type\" : \"work\"\n" +
" }, \"text\", \"http://www.example.com/joe.user/joe.asc\" ], [ \"tz\", { }, \"utc-offset\", \"-05:00\" ], [ \"key\", {\n" +
" \"type\" : \"work\"\n" +
" }, \"text\", \"http://example.org\" ] ]\n" +
"}"));
}
@Test
public void nameserver_serialization_test() throws Exception {
final Nameserver nameserver = new Nameserver();
nameserver.setHandle("handle");
nameserver.setLdhName("ns1.xn--fo-5ja.example");
nameserver.setUnicodeName("foo.example");
nameserver.getStatus().add("active");
final Nameserver.IpAddresses ipAddresses = new Nameserver.IpAddresses();
ipAddresses.getIpv4().add("192.0.2.1");
ipAddresses.getIpv4().add("192.0.2.2");
ipAddresses.getIpv6().add("2001:db8::123");
nameserver.setIpAddresses(ipAddresses);
final Remark remark = new Remark();
remark.getDescription().addAll(Lists.newArrayList("She sells sea shells down by the sea shore.", "Originally written by Terry Sullivan."));
nameserver.getRemarks().add(remark);
nameserver.getLinks().add(RdapObjectMapper.createLink("self", "http://example.net/nameserver/xxxx","http://example.net/nameserver/xxxx"));
nameserver.setPort43("whois.example.net");
final Event registrationEvent = new Event();
registrationEvent.setEventAction("registration");
registrationEvent.setEventDate(LOCAL_DATE_TIME);
nameserver.getEvents().add(registrationEvent);
final Event lastChangedEvent = new Event();
lastChangedEvent.setEventAction("last changed");
lastChangedEvent.setEventDate(LOCAL_DATE_TIME);
lastChangedEvent.setEventActor("[email protected]");
nameserver.getEvents().add(lastChangedEvent);
assertThat(RdapHelperUtils.marshal(nameserver), equalTo("" +
"{\n" +
" \"handle\" : \"handle\",\n" +
" \"ldhName\" : \"ns1.xn--fo-5ja.example\",\n" +
" \"unicodeName\" : \"foo.example\",\n" +
" \"ipAddresses\" : {\n" +
" \"ipv4\" : [ \"192.0.2.1\", \"192.0.2.2\" ],\n" +
" \"ipv6\" : [ \"2001:db8::123\" ]\n" +
" },\n" +
" \"objectClassName\" : \"nameserver\",\n" +
" \"status\" : [ \"active\" ],\n" +
" \"remarks\" : [ {\n" +
" \"description\" : [ \"She sells sea shells down by the sea shore.\", \"Originally written by Terry Sullivan.\" ]\n" +
" } ],\n" +
" \"links\" : [ {\n" +
" \"value\" : \"http://example.net/nameserver/xxxx\",\n" +
" \"rel\" : \"self\",\n" +
" \"href\" : \"http://example.net/nameserver/xxxx\",\n" +
" \"type\" : \"application/rdap+json\"\n" +
" } ],\n" +
" \"events\" : [ {\n" +
" \"eventAction\" : \"registration\",\n" +
" \"eventDate\" : \"2013-06-26T04:48:44Z\"\n" +
" }, {\n" +
" \"eventAction\" : \"last changed\",\n" +
" \"eventDate\" : \"2013-06-26T04:48:44Z\",\n" +
" \"eventActor\" : \"[email protected]\"\n" +
" } ],\n" +
" \"port43\" : \"whois.example.net\"\n" +
"}"));
}
@Test
public void domain_serialization_test() throws Exception {
/* Since this doesn't use RdapObjectMapper, the links don't have "type"
* attributes. This is purposeful. */
final Domain domain = new Domain();
domain.setHandle("XXXX");
domain.setLdhName("192.in-addr.arpa");
final Nameserver nameserver1 = new Nameserver();
nameserver1.setLdhName("ns1.rir.example");
domain.getNameservers().add(nameserver1);
final Nameserver nameserver2 = new Nameserver();
nameserver2.setLdhName("ns2.rir.example");
domain.getNameservers().add(nameserver2);
final Remark remark = new Remark();
remark.getDescription().addAll(Lists.newArrayList("She sells sea shells down by the sea shore.", "Originally written by Terry Sullivan."));
domain.getRemarks().add(remark);
domain.setPort43("whois.example.com");
final Link link = new Link();
link.setHref("http://example.net/domain/XXXXX");
link.setValue("http://example.net/domain/XXXX");
link.setRel("self");
domain.getLinks().add(link);
final Event registrationEvent = new Event();
registrationEvent.setEventAction("registration");
registrationEvent.setEventDate(LOCAL_DATE_TIME);
domain.getEvents().add(registrationEvent);
final Event lastChangedEvent = new Event();
lastChangedEvent.setEventAction("last changed");
lastChangedEvent.setEventDate(LOCAL_DATE_TIME);
lastChangedEvent.setEventActor("[email protected]");
domain.getEvents().add(lastChangedEvent);
final Entity entity = new Entity();
entity.setHandle("XXXX");
entity.getRoles().add(Role.REGISTRANT);
entity.getRemarks().add(remark);
entity.getEvents().add(registrationEvent);
entity.getEvents().add(lastChangedEvent);
final Link entityLink = new Link();
entityLink.setHref("http://example.net/entity/xxxx");
entityLink.setValue("http://example.net/entity/xxxx");
entityLink.setRel("self");
entity.getLinks().add(entityLink);
entity.setPort43("whois.example.com");
domain.getEntities().add(entity);
final VCardBuilder builder = new VCardBuilder();
builder.addVersion()
.addFn("Joe User")
.addKind("individual")
.addOrg("Example")
.addTitle("Research Scientist")
.addRole("Project Lead")
.addAdr(VCardHelper.createAddress("", "Suite 1234", "4321 Rue Somewhere", "Quebec", "QC", "G1V 2M2", "Canada"))
.addTel("tel:+1-555-555-1234;ext=102")
.addEmail("[email protected]");
entity.setVcardArray(builder.build());
final Domain.SecureDNS secureDNS = new Domain.SecureDNS();
secureDNS.setDelegationSigned(Boolean.TRUE);
final Domain.SecureDNS.DsData dsData = new Domain.SecureDNS.DsData();
dsData.setKeyTag(12345L);
dsData.setAlgorithm((short) 3);
dsData.setDigestType(1);
dsData.setDigest("49FD46E6C4B45C55D4AC");
secureDNS.getDsData().add(dsData);
domain.setSecureDNS(secureDNS);
assertThat(RdapHelperUtils.marshal(domain), equalTo("" +
"{\n" +
" \"handle\" : \"XXXX\",\n" +
" \"ldhName\" : \"192.in-addr.arpa\",\n" +
" \"nameservers\" : [ {\n" +
" \"ldhName\" : \"ns1.rir.example\",\n" +
" \"objectClassName\" : \"nameserver\"\n" +
" }, {\n" +
" \"ldhName\" : \"ns2.rir.example\",\n" +
" \"objectClassName\" : \"nameserver\"\n" +
" } ],\n" +
" \"secureDNS\" : {\n" +
" \"delegationSigned\" : true,\n" +
" \"dsData\" : [ {\n" +
" \"keyTag\" : 12345,\n" +
" \"algorithm\" : 3,\n" +
" \"digest\" : \"49FD46E6C4B45C55D4AC\",\n" +
" \"digestType\" : 1\n" +
" } ]\n" +
" },\n" +
" \"objectClassName\" : \"domain\",\n" +
" \"entities\" : [ {\n" +
" \"handle\" : \"XXXX\",\n" +
" \"vcardArray\" : [ \"vcard\", [ [ \"version\", { }, \"text\", \"4.0\" ], [ \"fn\", { }, \"text\", \"Joe User\" ], [ \"kind\", { }, \"text\", \"individual\" ], [ \"org\", { }, \"text\", \"Example\" ], [ \"title\", { }, \"text\", \"Research Scientist\" ], [ \"role\", { }, \"text\", \"Project Lead\" ], [ \"adr\", { }, \"text\", [ \"\", \"Suite 1234\", \"4321 Rue Somewhere\", \"Quebec\", \"QC\", \"G1V 2M2\", \"Canada\" ] ], [ \"tel\", { }, \"uri\", \"tel:+1-555-555-1234;ext=102\" ], [ \"email\", { }, \"text\", \"[email protected]\" ] ] ],\n" +
" \"roles\" : [ \"registrant\" ],\n" +
" \"objectClassName\" : \"entity\",\n" +
" \"remarks\" : [ {\n" +
" \"description\" : [ \"She sells sea shells down by the sea shore.\", \"Originally written by Terry Sullivan.\" ]\n" +
" } ],\n" +
" \"links\" : [ {\n" +
" \"value\" : \"http://example.net/entity/xxxx\",\n" +
" \"rel\" : \"self\",\n" +
" \"href\" : \"http://example.net/entity/xxxx\"\n" +
" } ],\n" +
" \"events\" : [ {\n" +
" \"eventAction\" : \"registration\",\n" +
" \"eventDate\" : \"2013-06-26T04:48:44Z\"\n" +
" }, {\n" +
" \"eventAction\" : \"last changed\",\n" +
" \"eventDate\" : \"2013-06-26T04:48:44Z\",\n" +
" \"eventActor\" : \"[email protected]\"\n" +
" } ],\n" +
" \"port43\" : \"whois.example.com\"\n" +
" } ],\n" +
" \"remarks\" : [ {\n" +
" \"description\" : [ \"She sells sea shells down by the sea shore.\", \"Originally written by Terry Sullivan.\" ]\n" +
" } ],\n" +
" \"links\" : [ {\n" +
" \"value\" : \"http://example.net/domain/XXXX\",\n" +
" \"rel\" : \"self\",\n" +
" \"href\" : \"http://example.net/domain/XXXXX\"\n" +
" } ],\n" +
" \"events\" : [ {\n" +
" \"eventAction\" : \"registration\",\n" +
" \"eventDate\" : \"2013-06-26T04:48:44Z\"\n" +
" }, {\n" +
" \"eventAction\" : \"last changed\",\n" +
" \"eventDate\" : \"2013-06-26T04:48:44Z\",\n" +
" \"eventActor\" : \"[email protected]\"\n" +
" } ],\n" +
" \"port43\" : \"whois.example.com\"\n" +
"}"));
}
@Test
public void ip_serialization_test() throws Exception {
Ip ip = new Ip();
ip.setHandle("XXXX-RIR");
ip.setParentHandle("YYYY-RIR");
ip.setStartAddress("2001:db8::0");
ip.setEndAddress("2001:db8::0:FFFF:FFFF:FFFF:FFFF:FFFF");
ip.setIpVersion("v6");
ip.setName("NET-RTR-1");
ip.setType("DIRECT ALLOCATION");
ip.setCountry("AU");
ip.getStatus().add("allocated");
final Remark remark = new Remark();
remark.getDescription().addAll(Lists.newArrayList("She sells sea shells down by the sea shore.", "Originally written by Terry Sullivan."));
ip.getRemarks().add(remark);
ip.getLinks().add(RdapObjectMapper.createLink("self", "http://example.net/ip/2001:db8::/48", "http://example.net/ip/2001:db8::/48"));
ip.getLinks().add(RdapObjectMapper.createLink("up", "http://example.net/ip/2001:db8::/48", "http://example.net/ip/2001:C00::/23"));
final Event registrationEvent = new Event();
registrationEvent.setEventAction("registration");
registrationEvent.setEventDate(LOCAL_DATE_TIME);
ip.getEvents().add(registrationEvent);
final Event lastChangedEvent = new Event();
lastChangedEvent.setEventAction("last changed");
lastChangedEvent.setEventDate(LOCAL_DATE_TIME);
lastChangedEvent.setEventActor("[email protected]");
ip.getEvents().add(lastChangedEvent);
ip.setPort43("whois.example.com");
final Entity entity = new Entity();
entity.setHandle("XXXX");
final VCardBuilder builder = new VCardBuilder();
builder.addVersion()
.addFn("Joe User")
.addKind("individual")
.addOrg("Example")
.addTitle("Research Scientist")
.addRole("Project Lead")
.addAdr(createAddress("", "Suite 1234", "4321 Rue Somewhere", "Quebec", "QC", "G1V 2M2", "Canada"))
.addTel("tel:+1-555-555-1234;ext=102")
.addEmail("[email protected]");
entity.setVcardArray(builder.build());
entity.getRoles().add(Role.REGISTRANT);
entity.getRemarks().add(remark);
entity.getEvents().add(registrationEvent);
entity.getEvents().add(lastChangedEvent);
entity.getLinks().add(RdapObjectMapper.createLink("self", "http://example.net/entity/xxxx", "http://example.net/entity/xxxx"));
entity.setPort43("whois.example.com");
ip.getEntities().add(entity);
assertThat(RdapHelperUtils.marshal(ip), equalTo("" +
"{\n" +
" \"handle\" : \"XXXX-RIR\",\n" +
" \"startAddress\" : \"2001:db8::0\",\n" +
" \"endAddress\" : \"2001:db8::0:FFFF:FFFF:FFFF:FFFF:FFFF\",\n" +
" \"ipVersion\" : \"v6\",\n" +
" \"name\" : \"NET-RTR-1\",\n" +
" \"type\" : \"DIRECT ALLOCATION\",\n" +
" \"country\" : \"AU\",\n" +
" \"parentHandle\" : \"YYYY-RIR\",\n" +
" \"objectClassName\" : \"ip network\",\n" +
" \"status\" : [ \"allocated\" ],\n" +
" \"entities\" : [ {\n" +
" \"handle\" : \"XXXX\",\n" +
" \"vcardArray\" : [ \"vcard\", [ [ \"version\", { }, \"text\", \"4.0\" ], [ \"fn\", { }, \"text\", \"Joe User\" ], [ \"kind\", { }, \"text\", \"individual\" ], [ \"org\", { }, \"text\", \"Example\" ], [ \"title\", { }, \"text\", \"Research Scientist\" ], [ \"role\", { }, \"text\", \"Project Lead\" ], [ \"adr\", { }, \"text\", [ \"\", \"Suite 1234\", \"4321 Rue Somewhere\", \"Quebec\", \"QC\", \"G1V 2M2\", \"Canada\" ] ], [ \"tel\", { }, \"uri\", \"tel:+1-555-555-1234;ext=102\" ], [ \"email\", { }, \"text\", \"[email protected]\" ] ] ],\n" +
" \"roles\" : [ \"registrant\" ],\n" +
" \"objectClassName\" : \"entity\",\n" +
" \"remarks\" : [ {\n" +
" \"description\" : [ \"She sells sea shells down by the sea shore.\", \"Originally written by Terry Sullivan.\" ]\n" +
" } ],\n" +
" \"links\" : [ {\n" +
" \"value\" : \"http://example.net/entity/xxxx\",\n" +
" \"rel\" : \"self\",\n" +
" \"href\" : \"http://example.net/entity/xxxx\",\n" +
" \"type\" : \"application/rdap+json\"\n" +
" } ],\n" +
" \"events\" : [ {\n" +
" \"eventAction\" : \"registration\",\n" +
" \"eventDate\" : \"" + DATE_TIME + "\"\n" +
" }, {\n" +
" \"eventAction\" : \"last changed\",\n" +
" \"eventDate\" : \"" + DATE_TIME + "\",\n" +
" \"eventActor\" : \"[email protected]\"\n" +
" } ],\n" +
" \"port43\" : \"whois.example.com\"\n" +
" } ],\n" +
" \"remarks\" : [ {\n" +
" \"description\" : [ \"She sells sea shells down by the sea shore.\", \"Originally written by Terry Sullivan.\" ]\n" +
" } ],\n" +
" \"links\" : [ {\n" +
" \"value\" : \"http://example.net/ip/2001:db8::/48\",\n" +
" \"rel\" : \"self\",\n" +
" \"href\" : \"http://example.net/ip/2001:db8::/48\",\n" +
" \"type\" : \"application/rdap+json\"\n" +
" }, {\n" +
" \"value\" : \"http://example.net/ip/2001:db8::/48\",\n" +
" \"rel\" : \"up\",\n" +
" \"href\" : \"http://example.net/ip/2001:C00::/23\",\n" +
" \"type\" : \"application/rdap+json\"\n" +
" } ],\n" +
" \"events\" : [ {\n" +
" \"eventAction\" : \"registration\",\n" +
" \"eventDate\" : \"" + DATE_TIME + "\"\n" +
" }, {\n" +
" \"eventAction\" : \"last changed\",\n" +
" \"eventDate\" : \"" + DATE_TIME + "\",\n" +
" \"eventActor\" : \"[email protected]\"\n" +
" } ],\n" +
" \"port43\" : \"whois.example.com\"\n" +
"}"));
}
@Test
public void notices_serialization_test() throws Exception {
final Notice notices = new Notice();
notices.setTitle("Beverage policy");
notices.getDescription().add("Beverages with caffeine for keeping horses awake.");
notices.getDescription().add("Very effective.");
final Link link = RdapObjectMapper.createLink("self", "http://example.com/ip/" + RdapUrlFactory.urlencode("202.112.0.0 - 202.121.255.255"), "http://example.com/target_uri_href", "application/json");
link.getHreflang().add("en");
link.getHreflang().add("ch");
link.getTitle().add("title1");
link.getTitle().add("title2");
link.setMedia("screen");
notices.getLinks().add(link);
assertThat(RdapHelperUtils.marshal(notices), equalTo("" +
"{\n" +
" \"title\" : \"Beverage policy\",\n" +
" \"description\" : [ \"Beverages with caffeine for keeping horses awake.\", \"Very effective.\" ],\n" +
" \"links\" : [ {\n" +
" \"value\" : \"http://example.com/ip/202.112.0.0%20-%20202.121.255.255\",\n" +
" \"rel\" : \"self\",\n" +
" \"href\" : \"http://example.com/target_uri_href\",\n" +
" \"hreflang\" : [ \"en\", \"ch\" ],\n" +
" \"title\" : [ \"title1\", \"title2\" ],\n" +
" \"media\" : \"screen\",\n" +
" \"type\" : \"application/json\"\n" +
" } ]\n" +
"}"));
}
}
|
|
/**
* *****************************************************************************
* Copyright 2013 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
* ****************************************************************************
*/
package net.shad.s3rend.gfx.pixmap.filter;
import com.badlogic.gdx.graphics.Pixmap;
import net.shad.s3rend.gfx.pixmap.procedural.ProceduralInterface;
/**
* Class to generate threshold operation of the image
*
* @author Jaroslaw Czub (http://shad.net.pl)
*/
public class Threshold implements ProceduralInterface, FilterPixmapInterface
{
/**
*
* @param pixmap
*/
@Override
public void generate(final Pixmap pixmap){
generate(pixmap, 128, 128, 0);
}
/**
*
* @param pixmap
*/
@Override
public void filter(Pixmap pixmap){
generate(pixmap, 128, 128, 0);
}
/**
*
* @param pixmap
*/
@Override
public void random(final Pixmap pixmap){
generate(pixmap, (int) (96.0f + Math.random() * 64), (int) (96.0f + Math.random() * 64), (int) (Math.random() * 4));
}
/**
* Main process threshold filter
*
* @param pixmap
* @param threshold - Threshold value
* @param ratio - Threshold strength
* @param mode - 0 - Expand downwards, 1 - Expand upwards, 2 - Compress below, 3 - Compress above
*/
public static void generate(final Pixmap pixmap, int threshold, int ratio, int mode){
int width=pixmap.getWidth();
int height=pixmap.getHeight();
float fRatio=0;
switch (mode){
//
// Expand downwards
//
case 0:
default:
fRatio=1 + ratio * 0.1f;
for (int y=0; y < height; y++){
for (int x=0; x < width; x++){
int rgb=pixmap.getPixel(x, y);
int r=(rgb & 0xff000000) >>> 24;
int g=(rgb & 0x00ff0000) >>> 16;
int b=(rgb & 0x0000ff00) >>> 8;
int a=(rgb & 0x000000ff);
r=expandIntensity(r, fRatio, threshold);
g=expandIntensity(g, fRatio, threshold);
b=expandIntensity(b, fRatio, threshold);
pixmap.drawPixel(x, y, ((int) r << 24) | ((int) g << 16) | ((int) b << 8) | a);
}
}
break;
//
// Expand upwards
//
case 1:
fRatio=1 + ratio * 0.1f;
for (int y=0; y < width; y++){
for (int x=0; x < height; x++){
int rgb=pixmap.getPixel(x, y);
int r=(rgb & 0xff000000) >>> 24;
int g=(rgb & 0x00ff0000) >>> 16;
int b=(rgb & 0x0000ff00) >>> 8;
int a=(rgb & 0x000000ff);
r=expandIntensity(255 - r, fRatio, 255 - threshold);
g=expandIntensity(255 - g, fRatio, 255 - threshold);
b=expandIntensity(255 - b, fRatio, 255 - threshold);
pixmap.drawPixel(x, y, ((int) r << 24) | ((int) g << 16) | ((int) b << 8) | a);
}
}
break;
//
// Compress below
//
case 2:
fRatio=1 + ratio * 0.1f;
for (int y=0; y < width; y++){
for (int x=0; x < height; x++){
int rgb=pixmap.getPixel(x, y);
int r=(rgb & 0xff000000) >>> 24;
int g=(rgb & 0x00ff0000) >>> 16;
int b=(rgb & 0x0000ff00) >>> 8;
int a=(rgb & 0x000000ff);
r=compressIntensity(r, fRatio, threshold);
g=compressIntensity(g, fRatio, threshold);
b=compressIntensity(b, fRatio, threshold);
pixmap.drawPixel(x, y, ((int) r << 24) | ((int) g << 16) | ((int) b << 8) | a);
}
}
break;
//
// Compress above
//
case 3:
fRatio=1 + ratio * 0.1f;
for (int y=0; y < width; y++){
for (int x=0; x < height; x++){
int rgb=pixmap.getPixel(x, y);
int r=(rgb & 0xff000000) >>> 24;
int g=(rgb & 0x00ff0000) >>> 16;
int b=(rgb & 0x0000ff00) >>> 8;
int a=(rgb & 0x000000ff);
r=compressIntensity(255 - r, fRatio, 255 - threshold);
g=compressIntensity(255 - g, fRatio, 255 - threshold);
b=compressIntensity(255 - b, fRatio, 255 - threshold);
pixmap.drawPixel(x, y, ((int) r << 24) | ((int) g << 16) | ((int) b << 8) | a);
}
}
break;
}
}
/**
*
* @param intensity
* @param ratio
* @param threshold
* @return
*/
private static int expandIntensity(int intensity, float ratio, int threshold){
if (intensity < threshold){
int newIntensity=(threshold - (int) ((threshold - intensity) * ratio));
return newIntensity < 0 ? 0 : newIntensity;
} else {
return intensity;
}
}
/**
*
* @param intensity
* @param ratio
* @param threshold
* @return
*/
private static int compressIntensity(int intensity, float ratio, int threshold){
if (intensity < threshold){
int newIntensity=threshold - (int) ((threshold - intensity) / ratio);
return newIntensity < 0 ? 0 : newIntensity;
} else {
return intensity;
}
}
}
|
|
// @generated
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: src/com/facebook/buck/remoteexecution/proto/metadata.proto
package com.facebook.buck.remoteexecution.proto;
/**
* Protobuf type {@code facebook.remote_execution.ManifoldBucket}
*/
@javax.annotation.Generated(value="protoc", comments="annotations:ManifoldBucket.java.pb.meta")
public final class ManifoldBucket extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:facebook.remote_execution.ManifoldBucket)
ManifoldBucketOrBuilder {
private static final long serialVersionUID = 0L;
// Use ManifoldBucket.newBuilder() to construct.
private ManifoldBucket(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ManifoldBucket() {
name_ = "";
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ManifoldBucket(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.facebook.buck.remoteexecution.proto.RemoteExecutionMetadataProto.internal_static_facebook_remote_execution_ManifoldBucket_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.facebook.buck.remoteexecution.proto.RemoteExecutionMetadataProto.internal_static_facebook_remote_execution_ManifoldBucket_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.facebook.buck.remoteexecution.proto.ManifoldBucket.class, com.facebook.buck.remoteexecution.proto.ManifoldBucket.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
* <code>string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
* <code>string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!getNameBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getNameBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.facebook.buck.remoteexecution.proto.ManifoldBucket)) {
return super.equals(obj);
}
com.facebook.buck.remoteexecution.proto.ManifoldBucket other = (com.facebook.buck.remoteexecution.proto.ManifoldBucket) obj;
if (!getName()
.equals(other.getName())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.facebook.buck.remoteexecution.proto.ManifoldBucket parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.facebook.buck.remoteexecution.proto.ManifoldBucket parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.facebook.buck.remoteexecution.proto.ManifoldBucket parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.facebook.buck.remoteexecution.proto.ManifoldBucket parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.facebook.buck.remoteexecution.proto.ManifoldBucket parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.facebook.buck.remoteexecution.proto.ManifoldBucket parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.facebook.buck.remoteexecution.proto.ManifoldBucket parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.facebook.buck.remoteexecution.proto.ManifoldBucket parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.facebook.buck.remoteexecution.proto.ManifoldBucket parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.facebook.buck.remoteexecution.proto.ManifoldBucket parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.facebook.buck.remoteexecution.proto.ManifoldBucket parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.facebook.buck.remoteexecution.proto.ManifoldBucket parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.facebook.buck.remoteexecution.proto.ManifoldBucket prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code facebook.remote_execution.ManifoldBucket}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:facebook.remote_execution.ManifoldBucket)
com.facebook.buck.remoteexecution.proto.ManifoldBucketOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.facebook.buck.remoteexecution.proto.RemoteExecutionMetadataProto.internal_static_facebook_remote_execution_ManifoldBucket_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.facebook.buck.remoteexecution.proto.RemoteExecutionMetadataProto.internal_static_facebook_remote_execution_ManifoldBucket_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.facebook.buck.remoteexecution.proto.ManifoldBucket.class, com.facebook.buck.remoteexecution.proto.ManifoldBucket.Builder.class);
}
// Construct using com.facebook.buck.remoteexecution.proto.ManifoldBucket.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
name_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.facebook.buck.remoteexecution.proto.RemoteExecutionMetadataProto.internal_static_facebook_remote_execution_ManifoldBucket_descriptor;
}
@java.lang.Override
public com.facebook.buck.remoteexecution.proto.ManifoldBucket getDefaultInstanceForType() {
return com.facebook.buck.remoteexecution.proto.ManifoldBucket.getDefaultInstance();
}
@java.lang.Override
public com.facebook.buck.remoteexecution.proto.ManifoldBucket build() {
com.facebook.buck.remoteexecution.proto.ManifoldBucket result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.facebook.buck.remoteexecution.proto.ManifoldBucket buildPartial() {
com.facebook.buck.remoteexecution.proto.ManifoldBucket result = new com.facebook.buck.remoteexecution.proto.ManifoldBucket(this);
result.name_ = name_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.facebook.buck.remoteexecution.proto.ManifoldBucket) {
return mergeFrom((com.facebook.buck.remoteexecution.proto.ManifoldBucket)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.facebook.buck.remoteexecution.proto.ManifoldBucket other) {
if (other == com.facebook.buck.remoteexecution.proto.ManifoldBucket.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.facebook.buck.remoteexecution.proto.ManifoldBucket parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.facebook.buck.remoteexecution.proto.ManifoldBucket) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object name_ = "";
/**
* <code>string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>string name = 1;</code>
*/
public Builder setName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
* <code>string name = 1;</code>
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
* <code>string name = 1;</code>
*/
public Builder setNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:facebook.remote_execution.ManifoldBucket)
}
// @@protoc_insertion_point(class_scope:facebook.remote_execution.ManifoldBucket)
private static final com.facebook.buck.remoteexecution.proto.ManifoldBucket DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.facebook.buck.remoteexecution.proto.ManifoldBucket();
}
public static com.facebook.buck.remoteexecution.proto.ManifoldBucket getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ManifoldBucket>
PARSER = new com.google.protobuf.AbstractParser<ManifoldBucket>() {
@java.lang.Override
public ManifoldBucket parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ManifoldBucket(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ManifoldBucket> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ManifoldBucket> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.facebook.buck.remoteexecution.proto.ManifoldBucket getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
|
package com.countrygamer.capo.client.gui;
import io.netty.buffer.Unpooled;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import net.minecraft.client.gui.GuiButton;
import net.minecraft.client.gui.GuiTextField;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.network.PacketBuffer;
import net.minecraft.network.play.client.C17PacketCustomPayload;
import net.minecraft.util.ResourceLocation;
import org.apache.logging.log4j.LogManager;
import org.lwjgl.input.Keyboard;
import org.lwjgl.opengl.GL11;
import com.countrygamer.capo.common.Capo;
import com.countrygamer.capo.common.packet.PacketStorePlayerNames;
import com.countrygamer.capo.common.tileentity.TileEntityPlayerChecker;
import com.countrygamer.core.Base.client.gui.GuiButtonCheck;
import com.countrygamer.core.Base.client.gui.GuiButtonRedstoneController;
import com.countrygamer.core.Base.client.gui.GuiScreenBlockBase;
import com.countrygamer.core.common.Core;
import com.countrygamer.core.common.handler.packet.PacketUpdateRedstoneState;
import com.countrygamer.core.common.lib.CoreReference;
import com.countrygamer.core.common.lib.RedstoneState;
public class GuiPlayerChecker extends GuiScreenBlockBase {
private int guiLeft, guiTop;
protected int xSize = 176;
protected int ySize = 166;
public GuiTextField playerField;
private int playerFieldX, playerFieldY, playerFieldWidth;
private int playerFieldHeight, playerFieldBoxY, playerFieldBoxYMax;
private int nameHeight = 10;
// private String currentPlayerName = "";
private String[] validNames = new String[] {};
private HashMap<String, int[]> nameCoors = new HashMap<String, int[]>();
private GuiButtonCheck checkButton, xButton;
private GuiButton hideActive;
private boolean hideActiveNames = false;
private ArrayList<String> activeNames = new ArrayList<String>();
private GuiButtonRedstoneController redstoneButtonController;
int ignoreX, ignoreY, lowX, lowY, highX, highY;
public GuiPlayerChecker(TileEntityPlayerChecker tile) {
super(tile);
}
@SuppressWarnings("unchecked")
public void initGui() {
super.initGui();
this.guiLeft = (this.width - this.xSize) / 2;
this.guiTop = (this.height - this.ySize) / 2;
int buttonID = 0;
this.buttonList.clear();
this.buttonList.add(this.hideActive = new GuiButton(++buttonID, this.guiLeft
+ this.xSize - 64, this.guiTop + 20, 60, 20, "Hide Active"));
this.buttonList.add(this.checkButton = new GuiButtonCheck(++buttonID,
this.guiLeft + this.xSize - 60, this.guiTop + 50, true));
this.buttonList.add(this.xButton = new GuiButtonCheck(++buttonID,
this.guiLeft + this.xSize - 30, this.guiTop + 50, false));
this.redstoneButtonController = new GuiButtonRedstoneController();
this.ignoreX = (this.xSize / 2) + 28;
this.ignoreY = 80;
this.lowX = (this.xSize / 2) + 28;
this.lowY = 100;
this.highX = (this.xSize / 2) + 28;
this.highY = 120;
buttonID = this.redstoneButtonController.registerButtons(this.buttonList,
buttonID, new int[] {
this.guiLeft + this.ignoreX, this.guiTop + this.ignoreY,
this.guiLeft + this.lowX, this.guiTop + this.lowY,
this.guiLeft + this.highX, this.guiTop + this.highY
});
switch (this.tileEnt.getRedstoneState()) {
case IGNORE:
this.redstoneButtonController.ignore.isActive = true;
this.redstoneButtonController.low.isActive = false;
this.redstoneButtonController.high.isActive = false;
break;
case LOW:
this.redstoneButtonController.low.isActive = true;
this.redstoneButtonController.high.isActive = false;
this.redstoneButtonController.ignore.isActive = false;
break;
case HIGH:
this.redstoneButtonController.high.isActive = true;
this.redstoneButtonController.ignore.isActive = false;
this.redstoneButtonController.low.isActive = false;
break;
default:
this.redstoneButtonController.ignore.isActive = false;
this.redstoneButtonController.low.isActive = false;
this.redstoneButtonController.high.isActive = false;
break;
}
Keyboard.enableRepeatEvents(true);
this.playerFieldX = this.guiLeft + 10;
this.playerFieldY = this.guiTop + 20;
this.playerFieldWidth = 100;
this.playerFieldHeight = 10;
this.playerFieldBoxY = this.playerFieldY + this.playerFieldHeight + 1;
this.playerFieldBoxYMax = (this.ySize + 10) - this.playerFieldY;
this.playerField = new GuiTextField(this.fontRendererObj, this.playerFieldX,
this.playerFieldY, this.playerFieldWidth, this.playerFieldHeight);
this.setupTextField(this.playerField, 16);
this.refreshPlayerList();
}
@Override
protected void buttonPress(int id) {
String playerName = "";
if (id == this.hideActive.id) {
this.hideActiveNames = !this.hideActiveNames;
}
if (id == this.checkButton.id) {
playerName = this.getActualStringFromTextField(this.playerField
.getText());
if (playerName.equals("")) return;
PacketStorePlayerNames packet = new PacketStorePlayerNames(
tileEnt.xCoord, tileEnt.yCoord, tileEnt.zCoord, playerName, true);
Capo.packetChannel.sendToServer(packet);
Capo.packetChannel.sendToAll(packet);
this.playerField.setText("");
}
else if (id == this.xButton.id) {
playerName = this.getActualStringFromTextField(this.playerField
.getText());
if (playerName.equals("")) return;
PacketStorePlayerNames packet = new PacketStorePlayerNames(
tileEnt.xCoord, tileEnt.yCoord, tileEnt.zCoord, playerName,
false);
Capo.packetChannel.sendToServer(packet);
Capo.packetChannel.sendToAll(packet);
this.playerField.setText("");
}
else {
if (this.redstoneButtonController.buttonPressed(id)) {
RedstoneState redstoneState = this.redstoneButtonController
.getActiveState();
if (redstoneState != null) {
Capo.log.info("Sending redstone packet");
PacketUpdateRedstoneState packet = new PacketUpdateRedstoneState(
tileEnt.xCoord, tileEnt.yCoord, tileEnt.zCoord,
redstoneState);
Core.packetChannel.sendToServer(packet);
Core.packetChannel.sendToAll(packet);
}
}
}
}
protected void keyTyped(char letter, int par2) {
if (this.playerField.textboxKeyTyped(letter, par2)) {
this.sendKeyPacket(this.playerField);
this.refreshPlayerList();
}
else
super.keyTyped(letter, par2);
}
private void refreshPlayerList() {
String[] playerNames = this.getArrayOfPlayerNames();
this.validNames = this
.getValidNames(this.playerField.getText(), playerNames);
this.activeNames = ((TileEntityPlayerChecker) tileEnt).activePlayerNames;
}
private String[] getArrayOfPlayerNames() {
Map<String, EntityPlayerMP> players = ((TileEntityPlayerChecker) tileEnt).onlinePlayers;
String[] s = new String[players.keySet().toArray().length];
int l = players.keySet().toArray().length;
for (int i = 0; i < l; i++) {
s[i] = players.keySet().toArray()[i] + " ";
}
return s;
}
private String[] getValidNames(String s, String[] playerNames) {
ArrayList<String> tempNames = new ArrayList<String>();
for (String str : playerNames) {
if (s.equals("") || str.startsWith(s)) {
tempNames.add(str);
}
}
String[] strAr = new String[tempNames.size()];
for (int i = 0; i < tempNames.size(); i++) {
strAr[i] = tempNames.get(i);
}
return strAr;
}
private void sendKeyPacket(GuiTextField txtField) {
PacketBuffer packetbuffer = new PacketBuffer(Unpooled.buffer());
try {
packetbuffer.writeStringToBuffer(txtField.getText());
this.mc.getNetHandler().addToSendQueue(
new C17PacketCustomPayload("MC|ItemName", packetbuffer));
} catch (Exception exception) {
LogManager.getLogger().error("Couldn\'t send command block info",
exception);
} finally {
packetbuffer.release();
}
}
protected void mouseClicked(int x, int y, int button) {
super.mouseClicked(x, y, button);
this.playerField.mouseClicked(x, y, button);
String playerName = this.getActualStringFromTextField(this.playerField
.getText());
for (String str : this.validNames) {
str = this.getActualStringFromTextField(str);
if (this.nameCoors.containsKey(str)) {
int[] minMax = this.nameCoors.get(str);
if (minMax != null) {
int xMin = minMax[0];
int yMin = minMax[1];
int xMax = minMax[2];
int yMax = minMax[3];
if ((x >= xMin && x <= xMax) && (y >= yMin && y <= yMax)) {
playerName = str;
continue;
}
}
}
else
Capo.log.info("ValidNames: nameCoors doesnt contain \"" + str + "\"");
}
for (String str : this.activeNames) {
str = this.getActualStringFromTextField(str);
if (this.nameCoors.containsKey(str)) {
int[] minMax = this.nameCoors.get(str);
if (minMax != null) {
int xMin = minMax[0];
int yMin = minMax[1];
int xMax = minMax[2];
int yMax = minMax[3];
if ((x >= xMin && x <= xMax) && (y >= yMin && y <= yMax)) {
playerName = str;
continue;
}
}
}
else
Capo.log.info("ActiveNames: nameCoors doesnt contain \"" + str
+ "\"");
}
if (playerName != this.playerField.getText()) {
this.playerField.setText(playerName);
}
}
public void updateScreen() {
}
public boolean doesGuiPauseGame() {
return false;
}
public void onGuiClosed() {
super.onGuiClosed();
Keyboard.enableRepeatEvents(false);
}
protected void drawGuiContainerForegroundLayer(int par1, int par2) {
String s = ((TileEntityPlayerChecker) tileEnt).getInventoryName();
this.fontRendererObj.drawString(s,
(this.width / 2) - (this.fontRendererObj.getStringWidth(s) / 2),
this.guiTop + (int) (0.035 * this.ySize), 4210752);
// int gray = 4210752;
this.nameCoors.clear();
ArrayList<String> drawnNames = new ArrayList<String>();
int x = this.playerFieldX + 2;
int y = this.playerFieldBoxY + 2;
if (!this.hideActiveNames) {
for (int i = 0; i < this.activeNames.size(); i++) {
String str = this.activeNames.get(i);
if (y + this.nameHeight < this.playerFieldBoxY
+ this.playerFieldBoxYMax) {
this.fontRendererObj.drawString(str, x, y, 0x00C8FF);
this.nameCoors.put(str, new int[] {
x, y, x + this.fontRendererObj.getStringWidth(str),
y + this.nameHeight
});
drawnNames.add(str);
}
y += this.nameHeight;
}
}
for (int i = 0; i < this.validNames.length; i++) {
String str = this.getActualStringFromTextField(this.validNames[i]);
// y += (i + ((this.hideActiveNames || this.activeNames.isEmpty()) ? 0 : 1))
// * this.nameHeight;
if (!drawnNames.contains(str)
&& y + this.nameHeight < this.playerFieldBoxY
+ this.playerFieldBoxYMax) {
this.fontRendererObj.drawString(str, x, y, -1);
this.nameCoors.put(str, new int[] {
x, y, x + this.fontRendererObj.getStringWidth(str),
y + this.nameHeight
});
}
y += this.nameHeight;
}
drawnNames.clear();
}
protected void drawGuiContainerBackgroundLayer(float f, int i, int j) {
GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F);
this.mc.getTextureManager()
.bindTexture(
new ResourceLocation(CoreReference.MOD_ID,
"textures/gui/blank.png"));
drawTexturedModalRect(this.guiLeft, this.guiTop, 0, 0, this.xSize,
this.ySize);
this.mc.getTextureManager()
.bindTexture(
new ResourceLocation(CoreReference.MOD_ID,
"textures/gui/black.png"));
drawTexturedModalRect(this.playerFieldX, this.playerFieldBoxY, 0, 0,
this.playerFieldWidth, this.playerFieldBoxYMax);
this.playerField.drawTextBox();
}
private String getActualStringFromTextField(String str) {
if (str.length() <= 0
|| !str.substring(str.length() - 1, str.length()).equals(" "))
return str;
else
return str.substring(0, str.length() - 1);
}
@Override
public void drawScreen(int mouseX, int mouseY, float par3) {
super.drawScreen(mouseX, mouseY, par3);
List<String> hoverInfo = new ArrayList<String>();
if (this.func_146978_c(this.ignoreX, this.ignoreY, 18, 18, mouseX, mouseY)) {
hoverInfo.add("Redstone State: Ignore");
for (String str : RedstoneState.IGNORE.desc)
hoverInfo.add(str);
this.renderHoverTip(hoverInfo, mouseX, mouseY);
}
if (this.func_146978_c(this.lowX, this.lowY, 18, 18, mouseX, mouseY)) {
hoverInfo.add("Redstone State: Low");
for (String str : RedstoneState.LOW.desc)
hoverInfo.add(str);
this.renderHoverTip(hoverInfo, mouseX, mouseY);
}
if (this.func_146978_c(this.highX, this.highY, 18, 18, mouseX, mouseY)) {
hoverInfo.add("Redstone State: High");
for (String str : RedstoneState.HIGH.desc)
hoverInfo.add(str);
this.renderHoverTip(hoverInfo, mouseX, mouseY);
}
}
protected boolean func_146978_c(int x, int y, int w, int h, int mx, int my) {
int k1 = this.guiLeft;
int l1 = this.guiTop;
mx -= k1;
my -= l1;
return mx >= x - 1 && mx < x + w + 1 && my >= y - 1 && my < y + h + 1;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.google.storage;
import com.google.cloud.storage.Storage;
import com.google.cloud.storage.StorageClass;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriParams;
import org.apache.camel.spi.UriPath;
@UriParams
public class GoogleCloudStorageConfiguration implements Cloneable {
@UriPath(label = "common", description = "Bucket name or ARN")
@Metadata(required = true)
private String bucketName;
@UriParam(label = "common",
description = "The Service account key that can be used as credentials for the Storage client. It can be loaded by default from "
+ " classpath, but you can prefix with classpath:, file:, or http: to load the resource from different systems.")
private String serviceAccountKey;
@UriParam(label = "producer",
enums = "copyObject,listObjects,deleteObject,deleteBucket,listBuckets,getObject,createDownloadLink")
private GoogleCloudStorageOperations operation;
@UriParam(label = "producer", description = "The Object name inside the bucket")
private String objectName;
@UriParam(label = "common", defaultValue = "US-EAST1",
description = "The Cloud Storage location to use when creating the new buckets")
private String storageLocation = "US-EAST1";
@UriParam(label = "common", defaultValue = "STANDARD",
description = "The Cloud Storage class to use when creating the new buckets")
private StorageClass storageClass = StorageClass.STANDARD;
@UriParam(label = "common", defaultValue = "true")
private boolean autoCreateBucket = true;
@UriParam(label = "consumer")
private boolean moveAfterRead;
@UriParam(label = "consumer")
private String destinationBucket;
@UriParam(label = "consumer", defaultValue = "true")
private boolean deleteAfterRead = true;
@UriParam(label = "consumer", defaultValue = "true")
private boolean includeBody = true;
@UriParam(label = "consumer", defaultValue = "true")
private boolean includeFolders = true;
@UriParam
@Metadata(autowired = true)
private Storage storageClient;
public String getBucketName() {
return this.bucketName;
}
/**
* Bucket name
*/
public void setBucketName(String bucketName) {
this.bucketName = bucketName;
}
public String getServiceAccountKey() {
return serviceAccountKey;
}
/**
* The Service account key that can be used as credentials for the Storage client. It can be loaded by default from
* classpath, but you can prefix with "classpath:", "file:", or "http:" to load the resource from different systems.
*/
public void setServiceAccountKey(String serviceAccountKey) {
this.serviceAccountKey = serviceAccountKey;
}
public String getObjectName() {
return objectName;
}
/**
* The ObjectName (the file insisde the bucket)
*/
public void setObjectName(String objectName) {
this.objectName = objectName;
}
public String getStorageLocation() {
return storageLocation;
}
/**
* The Cloud Storage location to use when creating the new buckets. The complete available locations list at
* https://cloud.google.com/storage/docs/locations#location-mr
*/
public void setStorageLocation(String storageLocation) {
this.storageLocation = storageLocation;
}
public StorageClass getStorageClass() {
return storageClass;
}
/**
* The Cloud Storage class to use when creating the new buckets
*
* @param storageClass
*/
public void setStorageClass(StorageClass storageClass) {
this.storageClass = storageClass;
}
public Storage getStorageClient() {
return storageClient;
}
/**
* The storage client
*
* @param storageClient
*/
public void setStorageClient(Storage storageClient) {
this.storageClient = storageClient;
}
public GoogleCloudStorageOperations getOperation() {
return operation;
}
/**
* Set the operation for the producer
*
* @param operation
*/
public void setOperation(GoogleCloudStorageOperations operation) {
this.operation = operation;
}
public boolean isAutoCreateBucket() {
return autoCreateBucket;
}
/**
* Setting the autocreation of the bucket bucketName.
*/
public void setAutoCreateBucket(boolean autoCreateBucket) {
this.autoCreateBucket = autoCreateBucket;
}
public boolean isMoveAfterRead() {
return moveAfterRead;
}
/**
* Move objects from the origin bucket to a different bucket after they have been retrieved. To accomplish the
* operation the destinationBucket option must be set. The copy bucket operation is only performed if the Exchange
* is committed. If a rollback occurs, the object is not moved.
*/
public void setMoveAfterRead(boolean moveAfterRead) {
this.moveAfterRead = moveAfterRead;
}
public String getDestinationBucket() {
return destinationBucket;
}
/**
* Define the destination bucket where an object must be moved when moveAfterRead is set to true.
*/
public void setDestinationBucket(String destinationBucket) {
this.destinationBucket = destinationBucket;
}
/**
* If it is true, the folders/directories will be consumed. If it is false, they will be ignored, and Exchanges will
* not be created for those
*/
public void setIncludeFolders(boolean includeFolders) {
this.includeFolders = includeFolders;
}
public boolean isIncludeFolders() {
return includeFolders;
}
public boolean isDeleteAfterRead() {
return deleteAfterRead;
}
/**
* Delete objects from the bucket after they have been retrieved. The delete is only performed if the Exchange is
* committed. If a rollback occurs, the object is not deleted.
* <p/>
* If this option is false, then the same objects will be retrieve over and over again on the polls.
*/
public void setDeleteAfterRead(boolean deleteAfterRead) {
this.deleteAfterRead = deleteAfterRead;
}
/**
* If it is true, the Object exchange will be consumed and put into the body. If false the Object stream will be put
* raw into the body and the headers will be set with the object metadata.
*/
public void setIncludeBody(boolean includeBody) {
this.includeBody = includeBody;
}
public boolean isIncludeBody() {
return includeBody;
}
public GoogleCloudStorageConfiguration copy() {
try {
return (GoogleCloudStorageConfiguration) super.clone();
} catch (CloneNotSupportedException e) {
throw new RuntimeCamelException(e);
}
}
}
|
|
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* The following code is sourced from Google's Camera2Video template found at:
* https://github.com/googlesamples/android-Camera2Video
*
* Sections were modified for use with Audio/Speech recognition and location tracking
* Location tracking method and capturing was created using the Location API.
*/
package com.example.android.camera2video;
import android.Manifest;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.DialogFragment;
import android.app.Fragment;
import android.content.Context;
import android.content.DialogInterface;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.graphics.Matrix;
import android.graphics.RectF;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.location.Location;
import android.location.LocationManager;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Parcelable;
import android.support.annotation.NonNull;
import android.support.v13.app.FragmentCompat;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.PermissionChecker;
import android.util.Log;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.LayoutInflater;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.Toast;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
public class Camera2VideoFragment extends Fragment
implements View.OnClickListener, FragmentCompat.OnRequestPermissionsResultCallback {
private static final int SENSOR_ORIENTATION_DEFAULT_DEGREES = 90;
private static final int SENSOR_ORIENTATION_INVERSE_DEGREES = 270;
private static final SparseIntArray DEFAULT_ORIENTATIONS = new SparseIntArray();
private static final SparseIntArray INVERSE_ORIENTATIONS = new SparseIntArray();
private static final String TAG = "Camera2VideoFragment";
private static final int REQUEST_VIDEO_PERMISSIONS = 1;
private static final String FRAGMENT_DIALOG = "dialog";
private static final String[] VIDEO_PERMISSIONS = {
Manifest.permission.CAMERA,
Manifest.permission.RECORD_AUDIO,
Manifest.permission.ACCESS_FINE_LOCATION
};
static {
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_0, 90);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_90, 0);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_180, 270);
DEFAULT_ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
static {
INVERSE_ORIENTATIONS.append(Surface.ROTATION_0, 270);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_90, 180);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_180, 90);
INVERSE_ORIENTATIONS.append(Surface.ROTATION_270, 0);
}
/**
* An {@link AutoFitTextureView} for camera preview.
*/
private AutoFitTextureView mTextureView;
/**
* Button to record video
*/
private Button mButtonVideo;
/**
* A reference to the opened {@link android.hardware.camera2.CameraDevice}.
*/
private CameraDevice mCameraDevice;
/**
* A reference to the current {@link android.hardware.camera2.CameraCaptureSession} for
* preview.
*/
private CameraCaptureSession mPreviewSession;
/**
* {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a
* {@link TextureView}.
*/
private TextureView.SurfaceTextureListener mSurfaceTextureListener
= new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture,
int width, int height) {
openCamera(width, height);
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture,
int width, int height) {
configureTransform(width, height);
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
/**
* The {@link android.util.Size} of camera preview.
*/
private Size mPreviewSize;
/**
* The {@link android.util.Size} of video recording.
*/
private Size mVideoSize;
/**
* MediaRecorder
*/
private MediaRecorder mMediaRecorder;
/**
* Whether the app is recording video now
*/
private boolean mIsRecordingVideo;
/**
* An additional thread for running tasks that shouldn't block the UI.
*/
private HandlerThread mBackgroundThread;
/**
* A {@link Handler} for running tasks in the background.
*/
private Handler mBackgroundHandler;
/**
* A {@link Semaphore} to prevent the app from exiting before closing the camera.
*/
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
/**
* {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its status.
*/
private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice cameraDevice) {
mCameraDevice = cameraDevice;
startPreview();
mCameraOpenCloseLock.release();
if (null != mTextureView) {
configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
}
}
@Override
public void onDisconnected(@NonNull CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
@Override
public void onError(@NonNull CameraDevice cameraDevice, int error) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
Activity activity = getActivity();
if (null != activity) {
activity.finish();
}
}
};
private Integer mSensorOrientation;
private String mNextVideoAbsolutePath;
private CaptureRequest.Builder mPreviewBuilder;
public static double longitude;
public static double latitude;
public static Camera2VideoFragment newInstance() {
return new Camera2VideoFragment();
}
/**
* In this sample, we choose a video size with 3x4 aspect ratio. Also, we don't use sizes
* larger than 1080p, since MediaRecorder cannot handle such a high-resolution video.
*
* @param choices The list of available sizes
* @return The video size
*/
private static Size chooseVideoSize(Size[] choices) {
for (Size size : choices) {
if (size.getWidth() == size.getHeight() * 4 / 3 && size.getWidth() <= 1080) {
return size;
}
}
Log.e(TAG, "Couldn't find any suitable video size");
return choices[choices.length - 1];
}
/**
* Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose
* width and height are at least as large as the respective requested values, and whose aspect
* ratio matches with the specified value.
*
* @param choices The list of sizes that the camera supports for the intended output class
* @param width The minimum desired width
* @param height The minimum desired height
* @param aspectRatio The aspect ratio
* @return The optimal {@code Size}, or an arbitrary one if none were big enough
*/
private static Size chooseOptimalSize(Size[] choices, int width, int height, Size aspectRatio) {
// Collect the supported resolutions that are at least as big as the preview Surface
List<Size> bigEnough = new ArrayList<>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
for (Size option : choices) {
if (option.getHeight() == option.getWidth() * h / w &&
option.getWidth() >= width && option.getHeight() >= height) {
bigEnough.add(option);
}
}
// Pick the smallest of those, assuming we found any
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_camera2_video, container, false);
}
@Override
public void onViewCreated(final View view, Bundle savedInstanceState) {
mTextureView = (AutoFitTextureView) view.findViewById(R.id.texture);
mButtonVideo = (Button) view.findViewById(R.id.video);
mButtonVideo.setOnClickListener(this);
view.findViewById(R.id.info).setOnClickListener(this);
}
@Override
public void onResume() {
super.onResume();
startBackgroundThread();
if (mTextureView.isAvailable()) {
openCamera(mTextureView.getWidth(), mTextureView.getHeight());
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
@Override
public void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
@Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.video: {
if (mIsRecordingVideo) {
stopRecordingVideo();
} else {
startRecordingVideo();
}
break;
}
case R.id.info: {
Activity activity = getActivity();
if (null != activity) {
new AlertDialog.Builder(activity)
.setMessage(R.string.intro_message)
.setPositiveButton(android.R.string.ok, null)
.show();
}
break;
}
}
}
/**
* Starts a background thread and its {@link Handler}.
*/
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
/**
* Stops the background thread and its {@link Handler}.
*/
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
/**
* Gets whether you should show UI with rationale for requesting permissions.
*
* @param permissions The permissions your app wants to request.
* @return Whether you can show permission rationale UI.
*/
private boolean shouldShowRequestPermissionRationale(String[] permissions) {
for (String permission : permissions) {
if (FragmentCompat.shouldShowRequestPermissionRationale(this, permission)) {
return true;
}
}
return false;
}
/**
* Requests permissions needed for recording video.
*/
private void requestVideoPermissions() {
if (shouldShowRequestPermissionRationale(VIDEO_PERMISSIONS)) {
new ConfirmationDialog().show(getChildFragmentManager(), FRAGMENT_DIALOG);
} else {
FragmentCompat.requestPermissions(this, VIDEO_PERMISSIONS, REQUEST_VIDEO_PERMISSIONS);
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions,
@NonNull int[] grantResults) {
Log.d(TAG, "onRequestPermissionsResult");
if (requestCode == REQUEST_VIDEO_PERMISSIONS) {
if (grantResults.length == VIDEO_PERMISSIONS.length) {
for (int result : grantResults) {
if (result != PackageManager.PERMISSION_GRANTED) {
ErrorDialog.newInstance(getString(R.string.permission_request))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
break;
}
}
} else {
ErrorDialog.newInstance(getString(R.string.permission_request))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
}
} else {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
}
private boolean hasPermissionsGranted(String[] permissions) {
for (String permission : permissions) {
if (ActivityCompat.checkSelfPermission(getActivity(), permission)
!= PackageManager.PERMISSION_GRANTED) {
return false;
}
}
return true;
}
/**
* Tries to open a {@link CameraDevice}. The result is listened by `mStateCallback`.
*/
@SuppressWarnings("MissingPermission")
private void openCamera(int width, int height) {
if (!hasPermissionsGranted(VIDEO_PERMISSIONS)) {
requestVideoPermissions();
return;
}
final Activity activity = getActivity();
if (null == activity || activity.isFinishing()) {
return;
}
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
Log.d(TAG, "tryAcquire");
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
String cameraId = manager.getCameraIdList()[1];
// Choose the sizes for camera preview and video recording
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics
.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
if (map == null) {
throw new RuntimeException("Cannot get available preview/video sizes");
}
mVideoSize = chooseVideoSize(map.getOutputSizes(MediaRecorder.class));
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
width, height, mVideoSize);
int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
configureTransform(width, height);
mMediaRecorder = new MediaRecorder();
manager.openCamera(cameraId, mStateCallback, null);
} catch (CameraAccessException e) {
Toast.makeText(activity, "Cannot access the camera.", Toast.LENGTH_SHORT).show();
activity.finish();
} catch (NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// device this code runs.
ErrorDialog.newInstance(getString(R.string.camera_error))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.");
}
}
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
closePreviewSession();
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mMediaRecorder) {
mMediaRecorder.release();
mMediaRecorder = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.");
} finally {
mCameraOpenCloseLock.release();
}
}
/**
* Start the camera preview.
*/
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
return;
}
try {
closePreviewSession();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
Surface previewSurface = new Surface(texture);
mPreviewBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Collections.singletonList(previewSurface),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
mPreviewSession = session;
updatePreview();
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
Activity activity = getActivity();
if (null != activity) {
Toast.makeText(activity, "Failed", Toast.LENGTH_SHORT).show();
}
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Update the camera preview. {@link #startPreview()} needs to be called in advance.
*/
private void updatePreview() {
if (null == mCameraDevice) {
return;
}
try {
setUpCaptureRequestBuilder(mPreviewBuilder);
HandlerThread thread = new HandlerThread("CameraPreview");
thread.start();
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void setUpCaptureRequestBuilder(CaptureRequest.Builder builder) {
builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
}
/**
* Configures the necessary {@link android.graphics.Matrix} transformation to `mTextureView`.
* This method should not to be called until the camera preview size is determined in
* openCamera, or until the size of `mTextureView` is fixed.
*
* @param viewWidth The width of `mTextureView`
* @param viewHeight The height of `mTextureView`
*/
private void configureTransform(int viewWidth, int viewHeight) {
Activity activity = getActivity();
if (null == mTextureView || null == mPreviewSize || null == activity) {
return;
}
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mPreviewSize.getHeight(),
(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
}
mTextureView.setTransform(matrix);
}
private void setUpMediaRecorder() throws IOException {
final Activity activity = getActivity();
if (null == activity) {
return;
}
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
if (mNextVideoAbsolutePath == null || mNextVideoAbsolutePath.isEmpty()) {
mNextVideoAbsolutePath = getVideoFilePath(getActivity());
}
mMediaRecorder.setOutputFile(mNextVideoAbsolutePath);
mMediaRecorder.setVideoEncodingBitRate(10000000);
mMediaRecorder.setVideoFrameRate(30);
mMediaRecorder.setVideoSize(mVideoSize.getWidth(), mVideoSize.getHeight());
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
getGPSLocation(getActivity());
if (latitude != 0 && longitude != 0){
mMediaRecorder.setLocation((float)latitude, (float)longitude);
}
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
switch (mSensorOrientation) {
case SENSOR_ORIENTATION_DEFAULT_DEGREES:
mMediaRecorder.setOrientationHint(DEFAULT_ORIENTATIONS.get(rotation));
break;
case SENSOR_ORIENTATION_INVERSE_DEGREES:
mMediaRecorder.setOrientationHint(INVERSE_ORIENTATIONS.get(rotation));
break;
}
mMediaRecorder.prepare();
}
private String getVideoFilePath(Context context) {
final File dir = context.getExternalFilesDir(null);
return (dir == null ? "" : (dir.getAbsolutePath() + "/"))
+ System.currentTimeMillis() + ".mp4";
}
private void startRecordingVideo() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) {
return;
}
try {
closePreviewSession();
setUpMediaRecorder();
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
List<Surface> surfaces = new ArrayList<>();
// Set up Surface for the camera preview
Surface previewSurface = new Surface(texture);
surfaces.add(previewSurface);
mPreviewBuilder.addTarget(previewSurface);
// Set up Surface for the MediaRecorder
Surface recorderSurface = mMediaRecorder.getSurface();
surfaces.add(recorderSurface);
mPreviewBuilder.addTarget(recorderSurface);
// Start a capture session
// Once the session starts, we can update the UI and start recording
mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
mPreviewSession = cameraCaptureSession;
updatePreview();
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
// UI
mButtonVideo.setText(R.string.stop);
mIsRecordingVideo = true;
// Start recording
mMediaRecorder.start();
}
});
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
Activity activity = getActivity();
if (null != activity) {
Toast.makeText(activity, "Failed", Toast.LENGTH_SHORT).show();
}
}
}, mBackgroundHandler);
} catch (CameraAccessException | IOException e) {
e.printStackTrace();
}
}
private void closePreviewSession() {
if (mPreviewSession != null) {
mPreviewSession.close();
mPreviewSession = null;
}
}
private void stopRecordingVideo() {
// UI
mIsRecordingVideo = false;
mButtonVideo.setText(R.string.record);
// Stop recording
mMediaRecorder.stop();
mMediaRecorder.reset();
Activity activity = getActivity();
if (null != activity) {
Toast.makeText(activity, "Video saved: " + mNextVideoAbsolutePath,
Toast.LENGTH_SHORT).show();
Log.d(TAG, "Video saved: " + mNextVideoAbsolutePath);
}
mNextVideoAbsolutePath = null;
startPreview();
}
/**
* Compares two {@code Size}s based on their areas.
*/
static class CompareSizesByArea implements Comparator<Size> {
@Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
(long) rhs.getWidth() * rhs.getHeight());
}
}
public static class ErrorDialog extends DialogFragment {
private static final String ARG_MESSAGE = "message";
public static ErrorDialog newInstance(String message) {
ErrorDialog dialog = new ErrorDialog();
Bundle args = new Bundle();
args.putString(ARG_MESSAGE, message);
dialog.setArguments(args);
return dialog;
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Activity activity = getActivity();
return new AlertDialog.Builder(activity)
.setMessage(getArguments().getString(ARG_MESSAGE))
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
activity.finish();
}
})
.create();
}
}
public static class ConfirmationDialog extends DialogFragment {
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Fragment parent = getParentFragment();
return new AlertDialog.Builder(getActivity())
.setMessage(R.string.permission_request)
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
FragmentCompat.requestPermissions(parent, VIDEO_PERMISSIONS,
REQUEST_VIDEO_PERMISSIONS);
}
})
.setNegativeButton(android.R.string.cancel,
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
parent.getActivity().finish();
}
})
.create();
}
}
public void getGPSLocation(Context c) {
LocationManager locMgr = (LocationManager) c.getSystemService(Context.LOCATION_SERVICE);
//Check for permissions before setting locations
if (ActivityCompat.checkSelfPermission(getActivity().getApplicationContext(), Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED &&
ActivityCompat.checkSelfPermission(getActivity().getApplicationContext(), Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) {
return;
}
Location location = locMgr.getLastKnownLocation(LocationManager.GPS_PROVIDER);
longitude = location.getLongitude();
latitude = location.getLatitude();
}
}
|
|
/*
* Copyright (c) 1997, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.xml.internal.xsom.impl.parser.state;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
/**
* Dispatches incoming events into sub handlers appropriately
* so that the interleaving semantics will be correctly realized.
*
* @author Kohsuke Kawaguchi ([email protected])
*/
public abstract class NGCCInterleaveFilter implements NGCCEventSource, NGCCEventReceiver {
protected NGCCInterleaveFilter( NGCCHandler parent, int cookie ) {
this._parent = parent;
this._cookie = cookie;
}
protected void setHandlers( NGCCEventReceiver[] receivers ) {
this._receivers = receivers;
}
/** event receiverse. */
protected NGCCEventReceiver[] _receivers;
public int replace(NGCCEventReceiver oldHandler, NGCCEventReceiver newHandler) {
for( int i=0; i<_receivers.length; i++ )
if( _receivers[i]==oldHandler ) {
_receivers[i]=newHandler;
return i;
}
throw new InternalError(); // a bug in RelaxNGCC.
}
/** Parent handler. */
private final NGCCHandler _parent;
/** Cookie given by the parent. */
private final int _cookie;
//
//
// event handler
//
//
/**
* Receiver that is being locked and therefore receives all the events.
* <pre><xmp>
* <interleave>
* <element name="foo"/>
* <element name="bar">
* <element name="foo"/>
* </element>
* </interlaeve>
* </xmp></pre>
* When processing inside the bar element, this receiver is
* "locked" so that it can correctly receive its child foo element.
*/
private int lockedReceiver;
/**
* Nest level. Lock will be release when the lockCount becomes 0.
*/
private int lockCount=0;
public void enterElement(
String uri, String localName, String qname,Attributes atts) throws SAXException {
if(isJoining) return; // ignore any token if we are joining. See joinByXXXX.
if(lockCount++==0) {
lockedReceiver = findReceiverOfElement(uri,localName);
if(lockedReceiver==-1) {
// we can't process this token. join.
joinByEnterElement(null,uri,localName,qname,atts);
return;
}
}
_receivers[lockedReceiver].enterElement(uri,localName,qname,atts);
}
public void leaveElement(String uri, String localName, String qname) throws SAXException {
if(isJoining) return; // ignore any token if we are joining. See joinByXXXX.
if( lockCount-- == 0 )
joinByLeaveElement(null,uri,localName,qname);
else
_receivers[lockedReceiver].leaveElement(uri,localName,qname);
}
public void enterAttribute(String uri, String localName, String qname) throws SAXException {
if(isJoining) return; // ignore any token if we are joining. See joinByXXXX.
if(lockCount++==0) {
lockedReceiver = findReceiverOfAttribute(uri,localName);
if(lockedReceiver==-1) {
// we can't process this token. join.
joinByEnterAttribute(null,uri,localName,qname);
return;
}
}
_receivers[lockedReceiver].enterAttribute(uri,localName,qname);
}
public void leaveAttribute(String uri, String localName, String qname) throws SAXException {
if(isJoining) return; // ignore any token if we are joining. See joinByXXXX.
if( lockCount-- == 0 )
joinByLeaveAttribute(null,uri,localName,qname);
else
_receivers[lockedReceiver].leaveAttribute(uri,localName,qname);
}
public void text(String value) throws SAXException {
if(isJoining) return; // ignore any token if we are joining. See joinByXXXX.
if(lockCount!=0)
_receivers[lockedReceiver].text(value);
else {
int receiver = findReceiverOfText();
if(receiver!=-1) _receivers[receiver].text(value);
else joinByText(null,value);
}
}
/**
* Implemented by the generated code to determine the handler
* that can receive the given element.
*
* @return
* Thread ID of the receiver that can handle this event,
* or -1 if none.
*/
protected abstract int findReceiverOfElement( String uri, String local );
/**
* Returns the handler that can receive the given attribute, or null.
*/
protected abstract int findReceiverOfAttribute( String uri, String local );
/**
* Returns the handler that can receive text events, or null.
*/
protected abstract int findReceiverOfText();
//
//
// join method
//
//
/**
* Set to true when this handler is in the process of
* joining all branches.
*/
private boolean isJoining = false;
/**
* Joins all the child receivers.
*
* <p>
* This method is called by a child receiver when it sees
* something that it cannot handle, or by this object itself
* when it sees an event that it can't process.
*
* <p>
* This method forces children to move to its final state,
* then revert to the parent.
*
* @param source
* If this method is called by one of the child receivers,
* the receiver object. If this method is called by itself,
* null.
*/
public void joinByEnterElement( NGCCEventReceiver source,
String uri, String local, String qname, Attributes atts ) throws SAXException {
if(isJoining) return; // we are already in the process of joining. ignore.
isJoining = true;
// send special token to the rest of the branches.
// these branches don't understand this token, so they will
// try to move to a final state and send the token back to us,
// which this object will ignore (because isJoining==true)
// Otherwise branches will find an error.
for( int i=0; i<_receivers.length; i++ )
if( _receivers[i]!=source )
_receivers[i].enterElement(uri,local,qname,atts);
// revert to the parent
_parent._source.replace(this,_parent);
_parent.onChildCompleted(null,_cookie,true);
// send this event to the parent
_parent.enterElement(uri,local,qname,atts);
}
public void joinByLeaveElement( NGCCEventReceiver source,
String uri, String local, String qname ) throws SAXException {
if(isJoining) return; // we are already in the process of joining. ignore.
isJoining = true;
// send special token to the rest of the branches.
// these branches don't understand this token, so they will
// try to move to a final state and send the token back to us,
// which this object will ignore (because isJoining==true)
// Otherwise branches will find an error.
for( int i=0; i<_receivers.length; i++ )
if( _receivers[i]!=source )
_receivers[i].leaveElement(uri,local,qname);
// revert to the parent
_parent._source.replace(this,_parent);
_parent.onChildCompleted(null,_cookie,true);
// send this event to the parent
_parent.leaveElement(uri,local,qname);
}
public void joinByEnterAttribute( NGCCEventReceiver source,
String uri, String local, String qname ) throws SAXException {
if(isJoining) return; // we are already in the process of joining. ignore.
isJoining = true;
// send special token to the rest of the branches.
// these branches don't understand this token, so they will
// try to move to a final state and send the token back to us,
// which this object will ignore (because isJoining==true)
// Otherwise branches will find an error.
for( int i=0; i<_receivers.length; i++ )
if( _receivers[i]!=source )
_receivers[i].enterAttribute(uri,local,qname);
// revert to the parent
_parent._source.replace(this,_parent);
_parent.onChildCompleted(null,_cookie,true);
// send this event to the parent
_parent.enterAttribute(uri,local,qname);
}
public void joinByLeaveAttribute( NGCCEventReceiver source,
String uri, String local, String qname ) throws SAXException {
if(isJoining) return; // we are already in the process of joining. ignore.
isJoining = true;
// send special token to the rest of the branches.
// these branches don't understand this token, so they will
// try to move to a final state and send the token back to us,
// which this object will ignore (because isJoining==true)
// Otherwise branches will find an error.
for( int i=0; i<_receivers.length; i++ )
if( _receivers[i]!=source )
_receivers[i].leaveAttribute(uri,local,qname);
// revert to the parent
_parent._source.replace(this,_parent);
_parent.onChildCompleted(null,_cookie,true);
// send this event to the parent
_parent.leaveAttribute(uri,local,qname);
}
public void joinByText( NGCCEventReceiver source,
String value ) throws SAXException {
if(isJoining) return; // we are already in the process of joining. ignore.
isJoining = true;
// send special token to the rest of the branches.
// these branches don't understand this token, so they will
// try to move to a final state and send the token back to us,
// which this object will ignore (because isJoining==true)
// Otherwise branches will find an error.
for( int i=0; i<_receivers.length; i++ )
if( _receivers[i]!=source )
_receivers[i].text(value);
// revert to the parent
_parent._source.replace(this,_parent);
_parent.onChildCompleted(null,_cookie,true);
// send this event to the parent
_parent.text(value);
}
//
//
// event dispatching methods
//
//
public void sendEnterAttribute( int threadId,
String uri, String local, String qname) throws SAXException {
_receivers[threadId].enterAttribute(uri,local,qname);
}
public void sendEnterElement( int threadId,
String uri, String local, String qname, Attributes atts) throws SAXException {
_receivers[threadId].enterElement(uri,local,qname,atts);
}
public void sendLeaveAttribute( int threadId,
String uri, String local, String qname) throws SAXException {
_receivers[threadId].leaveAttribute(uri,local,qname);
}
public void sendLeaveElement( int threadId,
String uri, String local, String qname) throws SAXException {
_receivers[threadId].leaveElement(uri,local,qname);
}
public void sendText(int threadId, String value) throws SAXException {
_receivers[threadId].text(value);
}
}
|
|
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.sync;
import android.app.Dialog;
import android.support.test.InstrumentationRegistry;
import android.support.test.filters.LargeTest;
import android.support.test.filters.SmallTest;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import androidx.appcompat.app.AlertDialog;
import androidx.fragment.app.FragmentTransaction;
import androidx.preference.CheckBoxPreference;
import androidx.preference.Preference;
import androidx.recyclerview.widget.RecyclerView;
import org.junit.After;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.chromium.base.test.util.CommandLineFlags;
import org.chromium.base.test.util.DisabledTest;
import org.chromium.base.test.util.Feature;
import org.chromium.base.test.util.FlakyTest;
import org.chromium.chrome.R;
import org.chromium.chrome.browser.autofill.PersonalDataManager;
import org.chromium.chrome.browser.flags.ChromeFeatureList;
import org.chromium.chrome.browser.flags.ChromeSwitches;
import org.chromium.chrome.browser.settings.SettingsActivity;
import org.chromium.chrome.browser.settings.SettingsActivityTestRule;
import org.chromium.chrome.browser.sync.settings.ManageSyncSettings;
import org.chromium.chrome.browser.sync.ui.PassphraseCreationDialogFragment;
import org.chromium.chrome.browser.sync.ui.PassphraseDialogFragment;
import org.chromium.chrome.browser.sync.ui.PassphraseTypeDialogFragment;
import org.chromium.chrome.test.ChromeJUnit4ClassRunner;
import org.chromium.chrome.test.util.ActivityUtils;
import org.chromium.chrome.test.util.ChromeRenderTestRule;
import org.chromium.chrome.test.util.browser.Features;
import org.chromium.chrome.test.util.browser.sync.SyncTestUtil;
import org.chromium.components.browser_ui.settings.ChromeSwitchPreference;
import org.chromium.components.sync.ModelType;
import org.chromium.components.sync.PassphraseType;
import org.chromium.content_public.browser.test.util.TestThreadUtils;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* Tests for ManageSyncSettings.
*/
@RunWith(ChromeJUnit4ClassRunner.class)
@CommandLineFlags.Add({ChromeSwitches.DISABLE_FIRST_RUN_EXPERIENCE})
public class ManageSyncSettingsTest {
private static final String TAG = "ManageSyncSettingsTest";
/**
* Maps ModelTypes to their UI element IDs.
*/
private static final Map<Integer, String> UI_DATATYPES = new HashMap<>();
static {
UI_DATATYPES.put(ModelType.AUTOFILL, ManageSyncSettings.PREF_SYNC_AUTOFILL);
UI_DATATYPES.put(ModelType.BOOKMARKS, ManageSyncSettings.PREF_SYNC_BOOKMARKS);
UI_DATATYPES.put(ModelType.TYPED_URLS, ManageSyncSettings.PREF_SYNC_HISTORY);
UI_DATATYPES.put(ModelType.PASSWORDS, ManageSyncSettings.PREF_SYNC_PASSWORDS);
UI_DATATYPES.put(ModelType.PROXY_TABS, ManageSyncSettings.PREF_SYNC_RECENT_TABS);
UI_DATATYPES.put(ModelType.PREFERENCES, ManageSyncSettings.PREF_SYNC_SETTINGS);
}
private SettingsActivity mSettingsActivity;
@Rule
public SyncTestRule mSyncTestRule = new SyncTestRule();
@Rule
public SettingsActivityTestRule<ManageSyncSettings> mSettingsActivityTestRule =
new SettingsActivityTestRule<>(ManageSyncSettings.class, true);
@Rule
public final ChromeRenderTestRule mRenderTestRule = new ChromeRenderTestRule();
@After
public void tearDown() {
TestThreadUtils.runOnUiThreadBlocking(() -> ProfileSyncService.resetForTests());
}
@Test
@SmallTest
@Feature({"Sync"})
public void testSyncEverythingAndDataTypes() {
mSyncTestRule.setUpAccountAndSignInForTesting();
SyncTestUtil.waitForSyncActive();
ManageSyncSettings fragment = startManageSyncPreferences();
ChromeSwitchPreference syncEverything = getSyncEverything(fragment);
Collection<CheckBoxPreference> dataTypes = getDataTypes(fragment).values();
assertSyncOnState(fragment);
mSyncTestRule.togglePreference(syncEverything);
// When syncEverything is toggled off all data types are checked and enabled by default.
// User needs to manually uncheck to toggle sync off for data types.
for (CheckBoxPreference dataType : dataTypes) {
Assert.assertTrue(dataType.isChecked());
Assert.assertTrue(dataType.isEnabled());
}
}
@Test
@SmallTest
@Feature({"Sync"})
public void testSettingDataTypes() {
mSyncTestRule.setUpAccountAndSignInForTesting();
SyncTestUtil.waitForSyncActive();
ManageSyncSettings fragment = startManageSyncPreferences();
ChromeSwitchPreference syncEverything = getSyncEverything(fragment);
Map<Integer, CheckBoxPreference> dataTypes = getDataTypes(fragment);
InstrumentationRegistry.getInstrumentation().waitForIdleSync();
assertSyncOnState(fragment);
mSyncTestRule.togglePreference(syncEverything);
InstrumentationRegistry.getInstrumentation().waitForIdleSync();
Assert.assertFalse(syncEverything.isChecked());
for (CheckBoxPreference dataType : dataTypes.values()) {
Assert.assertTrue(dataType.isChecked());
Assert.assertTrue(dataType.isEnabled());
}
Set<Integer> expectedTypes = new HashSet<>(dataTypes.keySet());
assertChosenDataTypesAre(expectedTypes);
mSyncTestRule.togglePreference(dataTypes.get(ModelType.AUTOFILL));
mSyncTestRule.togglePreference(dataTypes.get(ModelType.PASSWORDS));
expectedTypes.remove(ModelType.AUTOFILL);
expectedTypes.remove(ModelType.PASSWORDS);
closeFragment(fragment);
assertChosenDataTypesAre(expectedTypes);
}
@Test
@SmallTest
@Feature({"Sync"})
public void testPaymentsIntegrationChecked() {
mSyncTestRule.setUpAccountAndSignInForTesting();
mSyncTestRule.setPaymentsIntegrationEnabled(true);
ManageSyncSettings fragment = startManageSyncPreferences();
assertSyncOnState(fragment);
CheckBoxPreference paymentsIntegration = (CheckBoxPreference) fragment.findPreference(
ManageSyncSettings.PREF_SYNC_PAYMENTS_INTEGRATION);
Assert.assertFalse(paymentsIntegration.isEnabled());
Assert.assertTrue(paymentsIntegration.isChecked());
}
@Test
@SmallTest
@Feature({"Sync"})
public void testPaymentsIntegrationUnchecked() {
mSyncTestRule.setUpAccountAndSignInForTesting();
mSyncTestRule.setPaymentsIntegrationEnabled(false);
mSyncTestRule.setChosenDataTypes(false, UI_DATATYPES.keySet());
ManageSyncSettings fragment = startManageSyncPreferences();
CheckBoxPreference paymentsIntegration = (CheckBoxPreference) fragment.findPreference(
ManageSyncSettings.PREF_SYNC_PAYMENTS_INTEGRATION);
// All data types are enabled by default as syncEverything is toggled off.
Assert.assertTrue(paymentsIntegration.isEnabled());
Assert.assertFalse(paymentsIntegration.isChecked());
}
@Test
@SmallTest
@Feature({"Sync"})
public void testPaymentsIntegrationCheckboxDisablesPaymentsIntegration() {
mSyncTestRule.setUpAccountAndSignInForTesting();
mSyncTestRule.setPaymentsIntegrationEnabled(true);
ManageSyncSettings fragment = startManageSyncPreferences();
assertSyncOnState(fragment);
ChromeSwitchPreference syncEverything = getSyncEverything(fragment);
mSyncTestRule.togglePreference(syncEverything);
CheckBoxPreference paymentsIntegration = (CheckBoxPreference) fragment.findPreference(
ManageSyncSettings.PREF_SYNC_PAYMENTS_INTEGRATION);
mSyncTestRule.togglePreference(paymentsIntegration);
closeFragment(fragment);
assertPaymentsIntegrationEnabled(false);
}
@Test
@SmallTest
@FlakyTest(message = "crbug.com/988622")
@Feature({"Sync"})
public void testPaymentsIntegrationCheckboxEnablesPaymentsIntegration() {
mSyncTestRule.setUpAccountAndSignInForTesting();
mSyncTestRule.setPaymentsIntegrationEnabled(false);
mSyncTestRule.setChosenDataTypes(false, UI_DATATYPES.keySet());
ManageSyncSettings fragment = startManageSyncPreferences();
CheckBoxPreference paymentsIntegration = (CheckBoxPreference) fragment.findPreference(
ManageSyncSettings.PREF_SYNC_PAYMENTS_INTEGRATION);
mSyncTestRule.togglePreference(paymentsIntegration);
closeFragment(fragment);
assertPaymentsIntegrationEnabled(true);
}
@DisabledTest(message = "crbug.com/994726")
@Test
@SmallTest
@Feature({"Sync"})
public void testPaymentsIntegrationCheckboxClearsServerAutofillCreditCards() {
mSyncTestRule.setUpAccountAndSignInForTesting();
mSyncTestRule.setPaymentsIntegrationEnabled(true);
Assert.assertFalse(
"There should be no server cards", mSyncTestRule.hasServerAutofillCreditCards());
mSyncTestRule.addServerAutofillCreditCard();
Assert.assertTrue(
"There should be server cards", mSyncTestRule.hasServerAutofillCreditCards());
ManageSyncSettings fragment = startManageSyncPreferences();
assertSyncOnState(fragment);
ChromeSwitchPreference syncEverything = getSyncEverything(fragment);
mSyncTestRule.togglePreference(syncEverything);
CheckBoxPreference paymentsIntegration = (CheckBoxPreference) fragment.findPreference(
ManageSyncSettings.PREF_SYNC_PAYMENTS_INTEGRATION);
mSyncTestRule.togglePreference(paymentsIntegration);
closeFragment(fragment);
assertPaymentsIntegrationEnabled(false);
Assert.assertFalse("There should be no server cards remaining",
mSyncTestRule.hasServerAutofillCreditCards());
}
@Test
@SmallTest
@Feature({"Sync"})
public void testPaymentsIntegrationDisabledByAutofillSyncCheckbox() {
mSyncTestRule.setUpAccountAndSignInForTesting();
mSyncTestRule.setPaymentsIntegrationEnabled(true);
ManageSyncSettings fragment = startManageSyncPreferences();
assertSyncOnState(fragment);
ChromeSwitchPreference syncEverything = getSyncEverything(fragment);
mSyncTestRule.togglePreference(syncEverything);
CheckBoxPreference syncAutofill =
(CheckBoxPreference) fragment.findPreference(ManageSyncSettings.PREF_SYNC_AUTOFILL);
mSyncTestRule.togglePreference(syncAutofill);
CheckBoxPreference paymentsIntegration = (CheckBoxPreference) fragment.findPreference(
ManageSyncSettings.PREF_SYNC_PAYMENTS_INTEGRATION);
Assert.assertFalse(paymentsIntegration.isEnabled());
Assert.assertFalse(paymentsIntegration.isChecked());
closeFragment(fragment);
assertPaymentsIntegrationEnabled(false);
}
@Test
@SmallTest
@Feature({"Sync"})
public void testPaymentsIntegrationEnabledBySyncEverything() {
mSyncTestRule.setUpAccountAndSignInForTesting();
mSyncTestRule.setPaymentsIntegrationEnabled(false);
mSyncTestRule.disableDataType(ModelType.AUTOFILL);
// Get the UI elements.
ManageSyncSettings fragment = startManageSyncPreferences();
ChromeSwitchPreference syncEverything = getSyncEverything(fragment);
CheckBoxPreference syncAutofill =
(CheckBoxPreference) fragment.findPreference(ManageSyncSettings.PREF_SYNC_AUTOFILL);
CheckBoxPreference paymentsIntegration = (CheckBoxPreference) fragment.findPreference(
ManageSyncSettings.PREF_SYNC_PAYMENTS_INTEGRATION);
// All three are unchecked and payments is disabled.
Assert.assertFalse(syncEverything.isChecked());
Assert.assertFalse(syncAutofill.isChecked());
Assert.assertTrue(syncAutofill.isEnabled());
Assert.assertFalse(paymentsIntegration.isChecked());
Assert.assertFalse(paymentsIntegration.isEnabled());
// All three are checked after toggling sync everything.
mSyncTestRule.togglePreference(syncEverything);
Assert.assertTrue(syncEverything.isChecked());
Assert.assertTrue(syncAutofill.isChecked());
Assert.assertFalse(syncAutofill.isEnabled());
Assert.assertTrue(paymentsIntegration.isChecked());
Assert.assertFalse(paymentsIntegration.isEnabled());
// Closing the fragment enabled payments integration.
closeFragment(fragment);
assertPaymentsIntegrationEnabled(true);
}
/**
* Test that choosing a passphrase type while sync is off doesn't crash.
*
* This is a regression test for http://crbug.com/507557.
*/
@Test
@SmallTest
@Feature({"Sync"})
public void testChoosePassphraseTypeWhenSyncIsOff() {
mSyncTestRule.setUpAccountAndSignInForTesting();
SyncTestUtil.waitForSyncActive();
ManageSyncSettings fragment = startManageSyncPreferences();
Preference encryption = getEncryption(fragment);
clickPreference(encryption);
final PassphraseTypeDialogFragment typeFragment = getPassphraseTypeDialogFragment();
mSyncTestRule.stopSync();
TestThreadUtils.runOnUiThreadBlocking(() -> {
typeFragment.onItemClick(null, null, 0, PassphraseType.CUSTOM_PASSPHRASE);
});
// No crash means we passed.
}
/**
* Test that entering a passphrase while sync is off doesn't crash.
*/
@Test
@SmallTest
@Feature({"Sync"})
public void testEnterPassphraseWhenSyncIsOff() {
mSyncTestRule.setUpAccountAndSignInForTesting();
SyncTestUtil.waitForSyncActive();
final ManageSyncSettings fragment = startManageSyncPreferences();
mSyncTestRule.stopSync();
TestThreadUtils.runOnUiThreadBlockingNoException(
() -> fragment.onPassphraseEntered("passphrase"));
// No crash means we passed.
}
@Test
@SmallTest
@Feature({"Sync"})
public void testPassphraseCreation() {
mSyncTestRule.setUpAccountAndSignInForTesting();
SyncTestUtil.waitForSyncActive();
final ManageSyncSettings fragment = startManageSyncPreferences();
TestThreadUtils.runOnUiThreadBlocking(
() -> fragment.onPassphraseTypeSelected(PassphraseType.CUSTOM_PASSPHRASE));
InstrumentationRegistry.getInstrumentation().waitForIdleSync();
PassphraseCreationDialogFragment pcdf = getPassphraseCreationDialogFragment();
AlertDialog dialog = (AlertDialog) pcdf.getDialog();
Button okButton = dialog.getButton(Dialog.BUTTON_POSITIVE);
EditText enterPassphrase = (EditText) dialog.findViewById(R.id.passphrase);
EditText confirmPassphrase = (EditText) dialog.findViewById(R.id.confirm_passphrase);
// Error if you try to submit empty passphrase.
Assert.assertNull(confirmPassphrase.getError());
clickButton(okButton);
Assert.assertTrue(pcdf.isResumed());
Assert.assertNotNull(enterPassphrase.getError());
Assert.assertNull(confirmPassphrase.getError());
// Error if you try to submit with only the first box filled.
clearError(confirmPassphrase);
setText(enterPassphrase, "foo");
clickButton(okButton);
Assert.assertTrue(pcdf.isResumed());
Assert.assertNull(enterPassphrase.getError());
Assert.assertNotNull(confirmPassphrase.getError());
// Remove first box should only show empty error message
setText(enterPassphrase, "");
clickButton(okButton);
Assert.assertNotNull(enterPassphrase.getError());
Assert.assertNull(confirmPassphrase.getError());
// Error if you try to submit with only the second box filled.
clearError(confirmPassphrase);
setText(confirmPassphrase, "foo");
clickButton(okButton);
Assert.assertTrue(pcdf.isResumed());
Assert.assertNull(enterPassphrase.getError());
Assert.assertNotNull(confirmPassphrase.getError());
// No error if text doesn't match without button press.
setText(enterPassphrase, "foo");
clearError(confirmPassphrase);
setText(confirmPassphrase, "bar");
Assert.assertNull(enterPassphrase.getError());
Assert.assertNull(confirmPassphrase.getError());
// Error if you try to submit unmatching text.
clearError(confirmPassphrase);
clickButton(okButton);
Assert.assertTrue(pcdf.isResumed());
Assert.assertNull(enterPassphrase.getError());
Assert.assertNotNull(confirmPassphrase.getError());
// Success if text matches.
setText(confirmPassphrase, "foo");
clickButton(okButton);
Assert.assertFalse(pcdf.isResumed());
}
/**
* Test the trusted vault key retrieval flow, which involves launching an intent and finally
* calling TrustedVaultClient.notifyKeysChanged().
*/
@Test
@LargeTest
@Feature({"Sync"})
public void testTrustedVaultKeyRetrieval() {
final byte[] trustedVaultKey = new byte[] {1, 2, 3, 4};
// Keys won't be populated by FakeTrustedVaultClientBackend unless corresponding key
// retrieval activity is about to be completed.
SyncTestRule.FakeTrustedVaultClientBackend.get().setKeys(
Collections.singletonList(trustedVaultKey));
mSyncTestRule.getFakeServerHelper().setTrustedVaultNigori(trustedVaultKey);
mSyncTestRule.setUpAccountAndSignInForTesting();
// Initially FakeTrustedVaultClientBackend doesn't provide any keys, so PSS should remain
// in TrustedVaultKeyRequired state.
SyncTestUtil.waitForTrustedVaultKeyRequired(true);
final ManageSyncSettings fragment = startManageSyncPreferences();
// Mimic the user tapping on Encryption. This should start DummyKeyRetrievalActivity and
// notify native client that keys were changed. Right before DummyKeyRetrievalActivity
// completion FakeTrustedVaultClientBackend will start populate keys.
Preference encryption = fragment.findPreference(ManageSyncSettings.PREF_ENCRYPTION);
clickPreference(encryption);
// Native client should fetch new keys and get out of TrustedVaultKeyRequired state.
SyncTestUtil.waitForTrustedVaultKeyRequired(false);
}
@Test
@SmallTest
@Feature({"Sync"})
@Features.EnableFeatures(ChromeFeatureList.MOBILE_IDENTITY_CONSISTENCY)
public void testAdvancedSyncFlowPreferencesAndBottomBarShown() {
mSyncTestRule.setUpAccountAndSignInForTesting();
SyncTestUtil.waitForSyncActive();
final ManageSyncSettings fragment = startManageSyncPreferencesFromSyncConsentFlow();
Assert.assertTrue(
fragment.findPreference(ManageSyncSettings.PREF_SYNCING_CATEGORY).isVisible());
Assert.assertTrue(
fragment.findPreference(ManageSyncSettings.PREF_SEARCH_AND_BROWSE_CATEGORY)
.isVisible());
Assert.assertNotNull(fragment.getView().findViewById(R.id.bottom_bar_shadow));
Assert.assertNotNull(fragment.getView().findViewById(R.id.bottom_bar_button_container));
}
@Test
@LargeTest
@Feature({"Sync", "RenderTest"})
@Features.EnableFeatures(ChromeFeatureList.MOBILE_IDENTITY_CONSISTENCY)
public void testAdvancedSyncFlowTopView() throws Exception {
mSyncTestRule.setUpAccountAndSignInForTesting();
SyncTestUtil.waitForSyncActive();
final ManageSyncSettings fragment = startManageSyncPreferencesFromSyncConsentFlow();
InstrumentationRegistry.getInstrumentation().waitForIdleSync();
mRenderTestRule.render(fragment.getView(), "advanced_sync_flow_top_view");
}
@Test
@LargeTest
@Feature({"Sync", "RenderTest"})
@Features.EnableFeatures(ChromeFeatureList.MOBILE_IDENTITY_CONSISTENCY)
public void testAdvancedSyncFlowBottomView() throws Exception {
mSyncTestRule.setUpAccountAndSignInForTesting();
SyncTestUtil.waitForSyncActive();
final ManageSyncSettings fragment = startManageSyncPreferencesFromSyncConsentFlow();
TestThreadUtils.runOnUiThreadBlocking(() -> {
RecyclerView recyclerView = fragment.getView().findViewById(R.id.recycler_view);
recyclerView.scrollToPosition(recyclerView.getAdapter().getItemCount() - 1);
});
InstrumentationRegistry.getInstrumentation().waitForIdleSync();
mRenderTestRule.render(fragment.getView(), "advanced_sync_flow_bottom_view");
}
private ManageSyncSettings startManageSyncPreferences() {
mSettingsActivity = mSettingsActivityTestRule.startSettingsActivity();
return mSettingsActivityTestRule.getFragment();
}
private ManageSyncSettings startManageSyncPreferencesFromSyncConsentFlow() {
Assert.assertTrue(
ChromeFeatureList.isEnabled(ChromeFeatureList.MOBILE_IDENTITY_CONSISTENCY));
mSettingsActivity = mSettingsActivityTestRule.startSettingsActivity(
ManageSyncSettings.createArguments(true));
return mSettingsActivityTestRule.getFragment();
}
private void closeFragment(ManageSyncSettings fragment) {
FragmentTransaction transaction =
mSettingsActivity.getSupportFragmentManager().beginTransaction();
transaction.remove(fragment);
transaction.commit();
InstrumentationRegistry.getInstrumentation().waitForIdleSync();
}
private ChromeSwitchPreference getSyncEverything(ManageSyncSettings fragment) {
return (ChromeSwitchPreference) fragment.findPreference(
ManageSyncSettings.PREF_SYNC_EVERYTHING);
}
private Map<Integer, CheckBoxPreference> getDataTypes(ManageSyncSettings fragment) {
Map<Integer, CheckBoxPreference> dataTypes = new HashMap<>();
for (Map.Entry<Integer, String> uiDataType : UI_DATATYPES.entrySet()) {
Integer modelType = uiDataType.getKey();
String prefId = uiDataType.getValue();
dataTypes.put(modelType, (CheckBoxPreference) fragment.findPreference(prefId));
}
return dataTypes;
}
private Preference getGoogleActivityControls(ManageSyncSettings fragment) {
return fragment.findPreference(ManageSyncSettings.PREF_GOOGLE_ACTIVITY_CONTROLS);
}
private Preference getEncryption(ManageSyncSettings fragment) {
return fragment.findPreference(ManageSyncSettings.PREF_ENCRYPTION);
}
private Preference getManageData(ManageSyncSettings fragment) {
return fragment.findPreference(ManageSyncSettings.PREF_SYNC_MANAGE_DATA);
}
private PassphraseDialogFragment getPassphraseDialogFragment() {
return ActivityUtils.waitForFragment(
mSettingsActivity, ManageSyncSettings.FRAGMENT_ENTER_PASSPHRASE);
}
private PassphraseTypeDialogFragment getPassphraseTypeDialogFragment() {
return ActivityUtils.waitForFragment(
mSettingsActivity, ManageSyncSettings.FRAGMENT_PASSPHRASE_TYPE);
}
private PassphraseCreationDialogFragment getPassphraseCreationDialogFragment() {
return ActivityUtils.waitForFragment(
mSettingsActivity, ManageSyncSettings.FRAGMENT_CUSTOM_PASSPHRASE);
}
private void assertSyncOnState(ManageSyncSettings fragment) {
ChromeSwitchPreference syncEverything = getSyncEverything(fragment);
Assert.assertTrue("The sync everything switch should be on.", syncEverything.isChecked());
Assert.assertTrue(
"The sync everything switch should be enabled.", syncEverything.isEnabled());
for (CheckBoxPreference dataType : getDataTypes(fragment).values()) {
String key = dataType.getKey();
Assert.assertTrue("Data type " + key + " should be checked.", dataType.isChecked());
Assert.assertFalse("Data type " + key + " should be disabled.", dataType.isEnabled());
}
Assert.assertTrue("The google activity controls button should always be enabled.",
getGoogleActivityControls(fragment).isEnabled());
Assert.assertTrue("The encryption button should always be enabled.",
getEncryption(fragment).isEnabled());
Assert.assertTrue("The manage sync data button should be always enabled.",
getManageData(fragment).isEnabled());
}
private void assertChosenDataTypesAre(final Set<Integer> enabledDataTypes) {
final Set<Integer> disabledDataTypes = new HashSet<>(UI_DATATYPES.keySet());
disabledDataTypes.removeAll(enabledDataTypes);
TestThreadUtils.runOnUiThreadBlocking(() -> {
Set<Integer> actualDataTypes =
mSyncTestRule.getProfileSyncService().getChosenDataTypes();
Assert.assertTrue(actualDataTypes.containsAll(enabledDataTypes));
Assert.assertTrue(Collections.disjoint(disabledDataTypes, actualDataTypes));
});
}
private void assertPaymentsIntegrationEnabled(final boolean enabled) {
TestThreadUtils.runOnUiThreadBlocking(() -> {
Assert.assertEquals(enabled, PersonalDataManager.isPaymentsIntegrationEnabled());
});
}
private void clickPreference(final Preference pref) {
TestThreadUtils.runOnUiThreadBlockingNoException(
() -> pref.getOnPreferenceClickListener().onPreferenceClick(pref));
InstrumentationRegistry.getInstrumentation().waitForIdleSync();
}
private void clickButton(final Button button) {
TestThreadUtils.runOnUiThreadBlocking((Runnable) button::performClick);
InstrumentationRegistry.getInstrumentation().waitForIdleSync();
}
private void setText(final TextView textView, final String text) {
TestThreadUtils.runOnUiThreadBlocking(() -> textView.setText(text));
InstrumentationRegistry.getInstrumentation().waitForIdleSync();
}
private void clearError(final TextView textView) {
TestThreadUtils.runOnUiThreadBlocking(() -> textView.setError(null));
InstrumentationRegistry.getInstrumentation().waitForIdleSync();
}
}
|
|
package com.cloudmine.api.db;
import android.content.ContentValues;
import android.content.Context;
import android.content.Intent;
import android.os.Handler;
import com.android.volley.RequestQueue;
import com.android.volley.Response;
import com.android.volley.VolleyError;
import com.cloudmine.api.CMApiCredentials;
import com.cloudmine.api.CMObject;
import com.cloudmine.api.CMSessionToken;
import com.cloudmine.api.JavaCMUser;
import com.cloudmine.api.LocallySavable;
import com.cloudmine.api.rest.BaseObjectDeleteRequest;
import com.cloudmine.api.rest.BaseObjectLoadRequest;
import com.cloudmine.api.rest.BaseObjectModificationRequest;
import com.cloudmine.api.rest.CloudMineRequest;
import com.cloudmine.api.rest.ObjectLoadRequestBuilder;
import com.cloudmine.api.rest.SharedRequestQueueHolders;
import com.cloudmine.api.rest.options.CMServerFunction;
import com.cloudmine.api.rest.response.CMObjectResponse;
import com.cloudmine.api.rest.response.ObjectModificationResponse;
import com.fasterxml.jackson.annotation.JsonIgnore;
import me.cloudmine.annotations.Expand;
import me.cloudmine.annotations.Optional;
import me.cloudmine.annotations.Single;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import static com.cloudmine.api.rest.SharedRequestQueueHolders.getRequestQueue;
/**
* A {@link CMObject} that can be stored locally. Note that local storage happens on the calling thread.
* <br>
* Copyright CloudMine, Inc. All rights reserved<br>
* See LICENSE file included with SDK for details.
*/
public class BaseLocallySavableCMObject extends CMObject implements LocallySavable {
private static final Logger LOG = LoggerFactory.getLogger(BaseLocallySavableCMObject.class);
@Expand(isStatic = true)
public static CloudMineRequest saveObjects(Context context, Collection <? extends CMObject> objects, @Optional CMSessionToken token, @Optional CMApiCredentials apiCredentials, @Optional CMServerFunction serverFunction, @Optional Response.Listener<ObjectModificationResponse> listener, @Optional Response.ErrorListener errorListener) {
CloudMineRequest request = new BaseObjectModificationRequest(CMObject.massTransportable(objects), token, apiCredentials, serverFunction, listener, errorListener);
getRequestQueue(context).add(request);
return request;
}
@Expand(isStatic = true)
public static CloudMineRequest saveObjects(Context context, Collection <? extends CMObject> objects, @Optional CMSessionToken token, @Optional CMApiCredentials apiCredentials, @Optional CMServerFunction serverFunction, Handler handler) {
CloudMineRequest request = new BaseObjectModificationRequest(CMObject.massTransportable(objects), token, apiCredentials, serverFunction, null, null);
request.setHandler(handler);
getRequestQueue(context).add(request);
return request;
}
@Expand(isStatic = true)
public static CloudMineRequest loadAllObjects(Context context, @Optional CMSessionToken token, @Optional CMApiCredentials apiCredentials, @Optional CMServerFunction serverFunction, Handler handler) {
return loadObjects(context, (Collection<String>) null, token, apiCredentials, serverFunction, handler);
}
@Expand(isStatic = true)
public static CloudMineRequest loadAllObjects(Context context, @Optional CMSessionToken token, @Optional CMApiCredentials apiCredentials, @Optional CMServerFunction serverFunction, @Optional Response.Listener<CMObjectResponse> listener, @Optional Response.ErrorListener errorListener) {
return loadObjects(context, (Collection<String>) null, token, apiCredentials, serverFunction, listener, errorListener);
}
@Expand(isStatic = true)
public static CloudMineRequest loadObject(Context context, String objectId, @Optional CMSessionToken token, @Optional CMApiCredentials apiCredentials, @Optional CMServerFunction serverFunction, @Optional Response.Listener<CMObjectResponse> listener, @Optional Response.ErrorListener errorListener) {
return loadObjects(context, Collections.singleton(objectId), token, apiCredentials, serverFunction, listener, errorListener);
}
@Expand(isStatic = true)
public static CloudMineRequest loadObjects(Context context, Collection <String> objectIds, @Optional CMSessionToken token, @Optional CMApiCredentials apiCredentials, @Optional CMServerFunction serverFunction, Response.Listener<CMObjectResponse> listener, @Optional Response.ErrorListener errorListener) {
RequestQueue queue = getRequestQueue(context);
BaseObjectLoadRequest request = new BaseObjectLoadRequest(objectIds, token, apiCredentials, serverFunction, listener, errorListener);
queue.add(request);
return request;
}
@Expand(isStatic = true)
public static CloudMineRequest loadObjects(Context context, Collection <String> objectIds, @Optional CMSessionToken token, @Optional CMApiCredentials apiCredentials, @Optional CMServerFunction serverFunction, Handler handler) {
RequestQueue queue = getRequestQueue(context);
BaseObjectLoadRequest request = new BaseObjectLoadRequest(objectIds, token, apiCredentials, serverFunction, null, null);
request.setHandler(handler);
queue.add(request);
return request;
}
@Expand(isStatic = true)
public static CloudMineRequest searchObjects(Context context, String searchString, @Optional CMSessionToken token, @Optional CMApiCredentials apiCredentials, @Optional CMServerFunction serverFunction, @Optional Response.Listener<CMObjectResponse> listener, @Optional Response.ErrorListener errorListener) {
RequestQueue queue = getRequestQueue(context);
BaseObjectLoadRequest request = new ObjectLoadRequestBuilder(token, listener, errorListener).search(searchString).runFunction(serverFunction).useCredentials(apiCredentials).build();
queue.add(request);
return request;
}
@Expand(isStatic = true)
public static CloudMineRequest delete(Context context, @Single Collection<String> objectIds, @Optional CMSessionToken sessionToken, @Optional CMApiCredentials apiCredentials, @Optional CMServerFunction serverFunction, @Optional Response.Listener<ObjectModificationResponse> successListener, @Optional Response.ErrorListener errorListener) {
BaseObjectDeleteRequest deleteRequest = new BaseObjectDeleteRequest(objectIds, sessionToken, apiCredentials, serverFunction, successListener, errorListener);
getRequestQueue(context).add(deleteRequest);
return deleteRequest;
}
/**
* Load the locally stored copy of the object with the given id. Will throw a {@link ClassCastException} if an object
* exists with the given ID but is of a different type.
* @param context activity context
* @param objectId object id of the object to load
* @param <OBJECT_TYPE> the type of the object to load. May be a superclass of the actual type
* @return the object if it was found, or null
*/
public static <OBJECT_TYPE extends BaseLocallySavableCMObject> OBJECT_TYPE loadLocalObject(Context context, String objectId) {
return CMObjectDBOpenHelper.getCMObjectDBHelper(context).loadObjectById(objectId);
}
/**
* Delete the specified object, if it exists
* @param context activity context
* @param objectId object id of object to delete
* @return negative number if operation failed, 0 if succeeded but nothing was deleted, 1 if the object was deleted
*/
public static int deleteLocalObject(Context context, String objectId) {
return CMObjectDBOpenHelper.getCMObjectDBHelper(context).deleteObjectById(objectId);
}
/**
* Load all of the objects of the specified class that are stored locally
* @param context activity context
* @param klass
* @param <OBJECT_TYPE>
* @return
*/
public static <OBJECT_TYPE extends BaseLocallySavableCMObject> List<OBJECT_TYPE> loadLocalObjectsByClass(Context context, Class<OBJECT_TYPE> klass) {
return CMObjectDBOpenHelper.getCMObjectDBHelper(context).loadObjectsByClass(klass);
}
public static List<BaseLocallySavableCMObject> loadLocalObjects(Context context) {
return CMObjectDBOpenHelper.getCMObjectDBHelper(context).loadAllObjects();
}
@JsonIgnore
private Date lastLocalSaveDate;
/**
* Save this object to local storage. Runs on the calling thread
* @param context
* @return true if the object was saved, false otherwise
*/
public boolean saveLocally(Context context) {
CMObjectDBOpenHelper cmObjectDBHelper = CMObjectDBOpenHelper.getCMObjectDBHelper(context);
boolean wasSaved = cmObjectDBHelper.insertCMObjectIfNewer(this);
if(wasSaved) lastLocalSaveDate = new Date();
return wasSaved;
}
/**
* Save this object to local storage, then eventually save it to the server. When the object is sent to the server,
* the most recent version from the database is used - so calls to saveEventually or saveLocally that occur before
* the request is sent will be sent to the server
* @param context activity context
* @return true if the request was inserted correctly and will eventually be saved
*/
public boolean saveEventually(Context context) {
return saveEventually(context, (CMSessionToken)null);
}
public boolean saveEventually(Context context, CMSessionToken sessionToken) {
boolean wasCreated = saveLocally(context);
LOG.debug("Was saved locally? " + wasCreated);
if(wasCreated) {
RequestDBObject request;
if(sessionToken != null) {
request = RequestDBObject.createUserObjectRequest(getObjectId(), sessionToken);
} else
request = RequestDBObject.createApplicationObjectRequest(getObjectId());
wasCreated = saveEventually(context, request);
}
return wasCreated;
}
boolean saveEventually(Context context, RequestDBObject request) {
boolean wasCreated = false;
try {
RequestDBOpenHelper.getRequestDBOpenHelper(context).insertRequest(request);
wasCreated = true;
LOG.debug("Request was inserted");
} catch (Exception e) {
wasCreated = false;
LOG.error("Failed", e);
}
if(wasCreated) {
context.startService(new Intent(context, RequestPerformerService.class));
}
return wasCreated;
}
public int deleteLocally(Context context) {
return BaseLocallySavableCMObject.deleteLocalObject(context, getObjectId());
}
@Expand
public CloudMineRequest save(Context context, @Optional CMApiCredentials apiCredentials, @Optional CMServerFunction serverFunction, @Optional Response.Listener< ObjectModificationResponse > successListener, @Optional Response.ErrorListener errorListener) {
RequestQueue queue = getRequestQueue(context);
BaseObjectModificationRequest request;
if(isUserLevel()) {
JavaCMUser user = getUser();
if(user != null && user.getSessionToken() != null) {
request = new BaseObjectModificationRequest(this, user.getSessionToken(), apiCredentials, serverFunction, successListener, errorListener);
} else {
if(errorListener != null) errorListener.onErrorResponse(new VolleyError("Can't save user level object when the associated user is not logged in"));
return CloudMineRequest.FAKE_REQUEST;
}
} else {
request = new BaseObjectModificationRequest(this, null, apiCredentials, serverFunction, successListener, errorListener);
}
queue.add(request);
return request;
}
@Expand
public CloudMineRequest save(Context context, CMSessionToken sessionToken, @Optional CMApiCredentials apiCredentials, @Optional CMServerFunction serverFunction, @Optional Response.Listener< ObjectModificationResponse > successListener, @Optional Response.ErrorListener errorListener) {
RequestQueue queue = getRequestQueue(context);
BaseObjectModificationRequest request = new BaseObjectModificationRequest(this, sessionToken, apiCredentials, serverFunction, successListener, errorListener);
queue.add(request);
return request;
}
public void grantAccess(BaseLocallySavableCMAccessList list) {
if(list == null)
return;
addAccessListId(list.getObjectId());
}
/**
* Get the last time this object was stored locally. Returns 0 if the object has never been stored locally
* @return
*/
@JsonIgnore
public int getLastLocalSavedDateAsSeconds() {
if(lastLocalSaveDate == null) return 0;
return (int) (lastLocalSaveDate.getTime() / 1000);
}
@JsonIgnore
public Date getLastLocalSaveDate() {
return lastLocalSaveDate;
}
@JsonIgnore
protected void setLastLocalSaveDate(Date lastSaveDate) {
this.lastLocalSaveDate = lastSaveDate;
}
@Expand
public CloudMineRequest delete(Context context, @Optional CMSessionToken sessionToken, @Optional CMApiCredentials apiCredentials, @Optional CMServerFunction serverFunction, @Optional Response.Listener<ObjectModificationResponse> successListener, @Optional Response.ErrorListener errorListener) {
CloudMineRequest request = new BaseObjectDeleteRequest(Collections.singleton(getObjectId()), sessionToken, apiCredentials, serverFunction, successListener, errorListener);
SharedRequestQueueHolders.getRequestQueue(context).add(request);
return request;
}
/**
* Used by the CMObjectDBOpenHelper to insert this object into the database
* @return
*/
ContentValues toContentValues() {
ContentValues values = new ContentValues();
values.put(CMObjectDBOpenHelper.OBJECT_ID_COLUMN, getObjectId());
values.put(CMObjectDBOpenHelper.JSON_COLUMN, transportableRepresentation());
values.put(CMObjectDBOpenHelper.SAVED_DATE_COLUMN, getLastLocalSavedDateAsSeconds());
values.put(CMObjectDBOpenHelper.CLASS_NAME_COLUMN, getClassName());
return values;
}
}
|
|
package com.example.lcom67.productdemoapp;
import android.app.Activity;
import android.app.ProgressDialog;
import android.content.ContentResolver;
import android.content.Intent;
import android.content.SharedPreferences;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.media.ExifInterface;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Environment;
import android.provider.MediaStore;
import android.support.annotation.NonNull;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import com.example.lcom67.productdemoapp.AsyncTaskClass.GetPostMethodClass;
import com.example.lcom67.productdemoapp.BottomSheet.BottomSheet;
import com.example.lcom67.productdemoapp.BottomSheet.BottomSheetListener;
import com.squareup.picasso.Picasso;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.Date;
public class ProfileDetailActivity extends AppCompatActivity implements View.OnClickListener, BottomSheetListener {
String URL = "http://192.168.200.64:4000/";
TextView firstname, lastname, username, address;
ImageView profileImage;
String str_firstname, str_lastname, str_username, str_address, str_profileImage;
private Toolbar toolbar;
JSONObject jsonObject;
int sign_id;
private GetPostMethodClass getPostMethodClass;
String selectURL = URL + "profile/select";
String updateImageURL = URL + "profile/updateimage";
private static final int ACTION_REQUEST_GALLERY = 1;
private static final int ACTION_REQUEST_CAMERA = 2;
private Bitmap bitmap;
int mState;
private String picturePath;
MenuItem save;
private Uri imageUri;
int id;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_profile_detail);
jsonObject = new JSONObject();
SharedPreferences prefs = getSharedPreferences("MyPref", MODE_PRIVATE);
sign_id = prefs.getInt("signup_id", 0);
toolbar = (Toolbar) findViewById(R.id.tool_bar); // Attaching the layout to the toolbar object
toolbar.setTitle("Profile Detail");
setSupportActionBar(toolbar);
firstname = (TextView) findViewById(R.id.detail_first_name);
lastname = (TextView) findViewById(R.id.detail_last_name);
username = (TextView) findViewById(R.id.detail_user_name);
address = (TextView) findViewById(R.id.detail_address);
profileImage = (ImageView) findViewById(R.id.img_profile);
mState = 1;
invalidateOptionsMenu();
profileImage.setOnClickListener(this);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setDisplayShowHomeEnabled(true);
toolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
onBackPressed();
}
});
try {
jsonObject.put("signup_id", sign_id);
} catch (JSONException e) {
e.printStackTrace();
}
new LongOperation().execute(selectURL);
}
@Override
public void onClick(View view) {
id = view.getId();
if (id == R.id.img_profile) {
/* AlertDialog.Builder builder = new AlertDialog.Builder(ProfileDetailActivity.this);
builder.setTitle("Choose Image Source");
builder.setItems(new CharSequence[]{"Gallery", "Camera"},
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
switch (which) {
case 0:
Intent i = new Intent(Intent.ACTION_PICK,
android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
startActivityForResult(i, ACTION_REQUEST_GALLERY);
break;
case 1:
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
File photo = new File(Environment.getExternalStorageDirectory(), timeStamp + ".jpg");
intent.putExtra(MediaStore.EXTRA_OUTPUT,
Uri.fromFile(photo));
imageUri = Uri.fromFile(photo);
startActivityForResult(intent, ACTION_REQUEST_CAMERA);
break;
default:
break;
}
}
});
builder.show();*/
new BottomSheet.Builder(this)
.setSheet(R.menu.profile_sheet)
.grid()
.setTitle("Options")
.setListener(this)
.show();
mState = 0;
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
mState = 0;
invalidateOptionsMenu();
if (requestCode == ACTION_REQUEST_CAMERA && resultCode == Activity.RESULT_OK) {
super.onActivityResult(requestCode, resultCode, data);
Uri selectedImage = imageUri;
getContentResolver().notifyChange(selectedImage, null);
ContentResolver cr = getContentResolver();
try
{
bitmap = android.provider.MediaStore.Images.Media.getBitmap(cr, selectedImage);
picturePath = selectedImage.getPath();
ExifInterface ei = new ExifInterface(picturePath);
int orientation = ei.getAttributeInt(ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_UNDEFINED);
switch (orientation)
{
case ExifInterface.ORIENTATION_ROTATE_90:
bitmap = rotateImage(bitmap, 90);
break;
case ExifInterface.ORIENTATION_ROTATE_180:
bitmap = rotateImage(bitmap, 180);
break;
case ExifInterface.ORIENTATION_ROTATE_270:
bitmap = rotateImage(bitmap, 270);
break;
case ExifInterface.ORIENTATION_NORMAL:
default:
break;
}
profileImage.setImageBitmap(bitmap);
Toast.makeText(this, picturePath, Toast.LENGTH_LONG).show();
} catch (Exception e) {
Toast.makeText(this, "Failed to load", Toast.LENGTH_SHORT).show();
Log.d("Message", "Camera" + e.toString());
}
} else if (requestCode == ACTION_REQUEST_GALLERY && resultCode == Activity.RESULT_OK) {
if (data.getData() != null) {
Uri selectedImage = data.getData();
String[] filePathColumn = {MediaStore.Images.Media.DATA};
Cursor cursor = getContentResolver().query(selectedImage,
filePathColumn, null, null, null);
cursor.moveToFirst();
int columnIndex = cursor.getColumnIndex(filePathColumn[0]);
picturePath = cursor.getString(columnIndex);
cursor.close();
profileImage.setImageBitmap(BitmapFactory.decodeFile(picturePath));
} else {
bitmap = (Bitmap) data.getExtras().get("data");
profileImage.setImageBitmap(bitmap);
}
super.onActivityResult(requestCode, resultCode, data);
}
}
public static Bitmap rotateImage(Bitmap source, float angle) {
Matrix matrix = new Matrix();
matrix.postRotate(angle);
return Bitmap.createBitmap(source, 0, 0, source.getWidth(), source.getHeight(), matrix, true);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu, menu);
save = menu.findItem(R.id.btn_save);
if (mState == 1) {
save.setVisible(false);
} else {
save.setVisible(true);
}
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == R.id.btn_save) {
new UpdateImageOperation().execute(updateImageURL);
}
return super.onOptionsItemSelected(item);
}
@Override
public void onSheetShown(@NonNull BottomSheet bottomSheet) {
}
@Override
public void onSheetItemSelected(@NonNull BottomSheet bottomSheet, MenuItem item) {
if (item.getItemId() == R.id.profile_gallery) {
Intent i = new Intent(Intent.ACTION_PICK,
android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
startActivityForResult(i, ACTION_REQUEST_GALLERY);
} else if (item.getItemId() == R.id.profile_camera) {
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
File photo = new File(Environment.getExternalStorageDirectory(), timeStamp + ".jpg");
intent.putExtra(MediaStore.EXTRA_OUTPUT,
Uri.fromFile(photo));
imageUri = Uri.fromFile(photo);
startActivityForResult(intent, ACTION_REQUEST_CAMERA);
} else if (item.getItemId() == R.id.profile_delete) {
profileImage.setImageResource(R.drawable.profile_image);
}
}
@Override
public void onSheetDismissed(@NonNull BottomSheet bottomSheet, @DismissEvent int dismissEvent) {
}
private class LongOperation extends AsyncTask<String, Void, String> {
String data;
ProgressDialog dialog;
@Override
protected void onPreExecute() {
dialog = new ProgressDialog(ProfileDetailActivity.this);
dialog.setMessage("Please wait..");
dialog.show();
data = jsonObject.toString();
Log.d("Message", " DATA : " + data);
super.onPreExecute();
}
@Override
protected String doInBackground(String... strings) {
getPostMethodClass = new GetPostMethodClass();
String result = getPostMethodClass.sendPostRequest(selectURL, data);
Log.d("Message", "RESULT : " + result);
return result;
}
@Override
protected void onPostExecute(String s) {
super.onPostExecute(s);
dialog.dismiss();
try {
JSONObject json = new JSONObject(s);
str_firstname = json.getString("firstname");
str_lastname = json.getString("lastname");
str_username = json.getString("username");
str_address = json.getString("address");
str_profileImage = json.getString("profile_image");
Log.d("Message", " firstNM : " + str_firstname);
Log.d("Message", " lastNM : " + str_lastname);
Log.d("Message", " userNM : " + str_username);
Log.d("Message", " addrs : " + str_address);
Log.d("Message", " Profile : " + str_profileImage);
if (json.getString("status").equals("1")) {
firstname.setText(str_firstname);
lastname.setText(str_lastname);
username.setText(str_username);
address.setText(str_address);
Picasso.with(ProfileDetailActivity.this)
.load(str_profileImage)
.placeholder(R.drawable.profile_image)
.into(profileImage);
} else {
Toast.makeText(ProfileDetailActivity.this, " No User Exist..", Toast.LENGTH_SHORT).show();
}
} catch (JSONException e) {
e.printStackTrace();
}
}
}
private class UpdateImageOperation extends AsyncTask<String, Void, String> {
ProgressDialog dialog;
@Override
protected void onPreExecute() {
dialog = new ProgressDialog(ProfileDetailActivity.this);
dialog.setMessage("Please wait..");
dialog.show();
super.onPreExecute();
}
@Override
protected String doInBackground(String... strings) {
String result = uploadFileAndParam(updateImageURL, sign_id, picturePath);
Log.d("Message", " Result iS : " + result);
return result;
}
@Override
protected void onPostExecute(String s) {
super.onPostExecute(s);
dialog.dismiss();
try {
JSONObject json = new JSONObject(s);
if (json.getString("status").equals("1")) {
str_profileImage = json.getString("profile_image");
/*
Picasso.with(ProfileDetailActivity.this)
.load(str_profileImage)
.placeholder(R.drawable.profile_image)
.into(profileImage);*/
Toast.makeText(ProfileDetailActivity.this, " Profile Pic Update..", Toast.LENGTH_SHORT).show();
save.setVisible(false);
Intent ii = new Intent();
ii.putExtra("profile_image", str_profileImage);
setResult(RESULT_OK, ii);
finish();
} else
{
Picasso.with(ProfileDetailActivity.this)
.load(str_profileImage)
.placeholder(R.drawable.profile_image)
.into(profileImage);
Toast.makeText(ProfileDetailActivity.this, " Something Went Wrong", Toast.LENGTH_SHORT).show();
save.setVisible(false);
}
} catch (JSONException e) {
e.printStackTrace();
}
}
}
public String uploadFileAndParam(String urlUploadPic, int signup_id, String sourceFileUri) {
String fileName = sourceFileUri;
String jsonString = "";
HttpURLConnection conn = null;
DataOutputStream dos = null;
String lineEnd = "\r\n";
String twoHyphens = "--";
String boundary = "*****";
int bytesRead, bytesAvailable, bufferSize;
byte[] buffer;
int maxBufferSize = 1 * 1024 * 1024;
File sourceFile = new File(sourceFileUri);
if (!sourceFile.isFile()) {
Log.e("uploadFile", "Source File not exist :" + fileName);
return null;
} else {
try {
FileInputStream fileInputStream = new FileInputStream(sourceFile);
URL url = new URL(urlUploadPic);
conn = (HttpURLConnection) url.openConnection();
conn.setDoInput(true); // Allow Inputs
conn.setDoOutput(true); // Allow Outputs
conn.setUseCaches(false); // Don't use a Cached Copy
conn.setRequestMethod("POST");
conn.setRequestProperty("Connection", "Keep-Alive");
conn.setRequestProperty("Accept-Encoding", "");
conn.setRequestProperty("Content-Type", "multipart/form-data;boundary=" + boundary);
dos = new DataOutputStream(conn.getOutputStream());
dos.writeBytes(twoHyphens + boundary + lineEnd);
dos.writeBytes("Content-Disposition: form-data; name=\"signup_id\"" + lineEnd);
dos.writeBytes(lineEnd);
dos.writeBytes(String.valueOf(signup_id));
dos.writeBytes(lineEnd);
dos.writeBytes(twoHyphens + boundary + lineEnd);
dos.writeBytes("Content-Disposition: form-data; name=\"profile_image\";filename=\"" + fileName + "\"" + lineEnd);
dos.writeBytes(lineEnd);
Log.e("uploadImg", "Headers are written");
// create a buffer of maximum size
bytesAvailable = fileInputStream.available();
bufferSize = Math.min(bytesAvailable, maxBufferSize);
buffer = new byte[bufferSize];
// read file and write it into form...
bytesRead = fileInputStream.read(buffer, 0, bufferSize);
while (bytesRead > 0) {
dos.write(buffer, 0, bufferSize);
bytesAvailable = fileInputStream.available();
bufferSize = Math.min(bytesAvailable, maxBufferSize);
bytesRead = fileInputStream.read(buffer, 0, bufferSize);
}
dos.writeBytes(lineEnd);
dos.writeBytes(twoHyphens + boundary + twoHyphens + lineEnd);
// close streams
int responseCode = conn.getResponseCode();
Log.d("uploadImg", "File Sent, Response: " + responseCode);
InputStream in = conn.getInputStream();
byte data[] = new byte[1024];
int counter = -1;
while ((counter = in.read(data)) != -1) {
jsonString += new String(data, 0, counter);
}
Log.d("Debug", " JSON String: " + jsonString);
fileInputStream.close();
dos.flush();
fileInputStream.close();
dos.flush();
dos.close();
} catch (MalformedURLException ex) {
ex.printStackTrace();
Log.e("Upload file to server", "error: " + ex.getMessage(), ex);
} catch (Exception e) {
e.printStackTrace();
Log.e("Exception", "" + e.getMessage(), e);
}
return jsonString;
}
}
}
|
|
// Copyright 2000-2022 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package com.intellij.openapi.vcs.changes.shelf;
import com.intellij.diff.FrameDiffTool;
import com.intellij.diff.chains.DiffRequestProducer;
import com.intellij.diff.requests.DiffRequest;
import com.intellij.diff.util.DiffPlaces;
import com.intellij.icons.AllIcons;
import com.intellij.ide.DataManager;
import com.intellij.ide.DeleteProvider;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.actions.EditSourceAction;
import com.intellij.ide.dnd.*;
import com.intellij.ide.dnd.aware.DnDAwareTree;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationAction;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.ListSelection;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileTypes.FileTypeManager;
import com.intellij.openapi.progress.util.BackgroundTaskUtil;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.startup.StartupActivity;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.registry.RegistryValue;
import com.intellij.openapi.util.registry.RegistryValueListener;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.*;
import com.intellij.openapi.vcs.changes.*;
import com.intellij.openapi.vcs.changes.ChangeViewDiffRequestProcessor.Wrapper;
import com.intellij.openapi.vcs.changes.actions.ShowDiffPreviewAction;
import com.intellij.openapi.vcs.changes.patch.PatchFileType;
import com.intellij.openapi.vcs.changes.shelf.DiffShelvedChangesActionProvider.PatchesPreloader;
import com.intellij.openapi.vcs.changes.ui.*;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.openapi.wm.ToolWindow;
import com.intellij.pom.Navigatable;
import com.intellij.pom.NavigatableAdapter;
import com.intellij.ui.PopupHandler;
import com.intellij.ui.ScrollPaneFactory;
import com.intellij.ui.SideBorder;
import com.intellij.ui.SimpleTextAttributes;
import com.intellij.ui.content.Content;
import com.intellij.ui.content.impl.ContentImpl;
import com.intellij.util.*;
import com.intellij.util.IconUtil.IconSizeWrapper;
import com.intellij.util.concurrency.annotations.RequiresEdt;
import com.intellij.util.containers.UtilKt;
import com.intellij.util.text.DateFormatUtil;
import com.intellij.util.ui.GraphicsUtil;
import com.intellij.util.ui.tree.TreeUtil;
import com.intellij.util.ui.update.MergingUpdateQueue;
import com.intellij.util.ui.update.Update;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.CellEditorListener;
import javax.swing.event.ChangeEvent;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeCellEditor;
import javax.swing.tree.TreeCellEditor;
import javax.swing.tree.TreePath;
import java.awt.*;
import java.awt.event.MouseEvent;
import java.util.List;
import java.util.*;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static com.intellij.icons.AllIcons.Vcs.Patch_applied;
import static com.intellij.openapi.vcs.VcsNotificationIdsHolder.SHELVE_DELETION_UNDO;
import static com.intellij.openapi.vcs.changes.ChangesViewManager.isEditorPreview;
import static com.intellij.openapi.vcs.changes.ui.ChangesGroupingSupport.REPOSITORY_GROUPING;
import static com.intellij.openapi.vcs.changes.ui.ChangesViewContentManager.*;
import static com.intellij.openapi.vcs.changes.ui.ChangesViewContentManagerKt.isCommitToolWindowShown;
import static com.intellij.util.FontUtil.spaceAndThinSpace;
import static com.intellij.util.containers.ContainerUtil.*;
import static java.util.Comparator.comparing;
import static java.util.Objects.requireNonNull;
// open for Rider
public class ShelvedChangesViewManager implements Disposable {
private static final Logger LOG = Logger.getInstance(ShelvedChangesViewManager.class);
@NonNls static final String SHELF_CONTEXT_MENU = "Vcs.Shelf.ContextMenu";
private static final String SHELVE_PREVIEW_SPLITTER_PROPORTION = "ShelvedChangesViewManager.DETAILS_SPLITTER_PROPORTION"; //NON-NLS
private final ShelveChangesManager myShelveChangesManager;
private final Project myProject;
private ShelfToolWindowPanel myPanel = null;
private ContentImpl myContent = null;
private final MergingUpdateQueue myUpdateQueue;
private final List<Runnable> myPostUpdateEdtActivity = new ArrayList<>();
public static final DataKey<ChangesTree> SHELVED_CHANGES_TREE =
DataKey.create("ShelveChangesManager.ShelvedChangesTree");
public static final DataKey<List<ShelvedChangeList>> SHELVED_CHANGELIST_KEY =
DataKey.create("ShelveChangesManager.ShelvedChangeListData");
public static final DataKey<List<ShelvedChangeList>> SHELVED_RECYCLED_CHANGELIST_KEY =
DataKey.create("ShelveChangesManager.ShelvedRecycledChangeListData");
public static final DataKey<List<ShelvedChangeList>> SHELVED_DELETED_CHANGELIST_KEY =
DataKey.create("ShelveChangesManager.ShelvedDeletedChangeListData");
public static final DataKey<List<ShelvedChange>> SHELVED_CHANGE_KEY = DataKey.create("ShelveChangesManager.ShelvedChange");
public static final DataKey<List<ShelvedBinaryFile>> SHELVED_BINARY_FILE_KEY = DataKey.create("ShelveChangesManager.ShelvedBinaryFile");
public static ShelvedChangesViewManager getInstance(Project project) {
return project.getService(ShelvedChangesViewManager.class);
}
public ShelvedChangesViewManager(Project project) {
myProject = project;
myShelveChangesManager = ShelveChangesManager.getInstance(project);
myUpdateQueue = new MergingUpdateQueue("Update Shelf Content", 200, true, null, myProject, null, true);
project.getMessageBus().connect().subscribe(ShelveChangesManager.SHELF_TOPIC, e -> scheduleContentUpdate());
}
private void scheduleContentUpdate() {
myUpdateQueue.queue(new MyContentUpdater());
}
private void updateTreeIfShown(@NotNull Consumer<? super ShelfTree> treeConsumer) {
if (myContent == null) return;
treeConsumer.consume(myPanel.myTree);
}
@RequiresEdt
void updateViewContent() {
if (myShelveChangesManager.getAllLists().isEmpty()) {
if (myContent != null) {
removeContent(myContent);
VcsNotifier.getInstance(myProject).hideAllNotificationsByType(ShelfNotification.class);
}
myContent = null;
}
else {
if (myContent == null) {
myPanel = new ShelfToolWindowPanel(myProject);
myContent = new ContentImpl(myPanel.myRootPanel, VcsBundle.message("shelf.tab"), false);
myContent.setTabName(SHELF); //NON-NLS overridden by displayName above
MyDnDTarget dnDTarget = new MyDnDTarget(myPanel.myProject, myContent);
myContent.putUserData(Content.TAB_DND_TARGET_KEY, dnDTarget);
myContent.putUserData(IS_IN_COMMIT_TOOLWINDOW_KEY, true);
myContent.setCloseable(false);
myContent.setDisposer(myPanel);
DnDSupport.createBuilder(myPanel.myTree)
.setImageProvider(myPanel::createDraggedImage)
.setBeanProvider(myPanel::createDragStartBean)
.setTargetChecker(dnDTarget)
.setDropHandler(dnDTarget)
.setDisposableParent(myContent)
.install();
addContent(myContent);
}
updateTreeIfShown(tree -> {
tree.rebuildTree();
});
}
}
protected void removeContent(Content content) {
ChangesViewContentI contentManager = ChangesViewContentManager.getInstance(myProject);
contentManager.removeContent(content);
contentManager.selectContent(ChangesViewContentManager.LOCAL_CHANGES);
}
protected void addContent(Content content) {
ChangesViewContentI contentManager = ChangesViewContentManager.getInstance(myProject);
contentManager.addContent(content);
}
protected void activateContent() {
ChangesViewContentI contentManager = ChangesViewContentManager.getInstance(myProject);
contentManager.setSelectedContent(myContent);
ToolWindow window = getToolWindowFor(myProject, SHELF);
if (window != null && !window.isVisible()) {
window.activate(null);
}
}
private static final class MyShelvedTreeModelBuilder extends TreeModelBuilder {
private MyShelvedTreeModelBuilder(Project project, @NotNull ChangesGroupingPolicyFactory grouping) {
super(project, grouping);
}
public void setShelvedLists(@NotNull List<ShelvedChangeList> shelvedLists) {
createShelvedListsWithChangesNode(shelvedLists, myRoot);
}
public void setDeletedShelvedLists(@NotNull List<ShelvedChangeList> shelvedLists) {
createShelvedListsWithChangesNode(shelvedLists, createTagNode(VcsBundle.message("shelve.recently.deleted.node")));
}
private void createShelvedListsWithChangesNode(@NotNull List<ShelvedChangeList> shelvedLists, @NotNull ChangesBrowserNode<?> parentNode) {
shelvedLists.forEach(changeList -> {
List<ShelvedWrapper> shelvedChanges = new ArrayList<>();
requireNonNull(changeList.getChanges()).stream().map(change -> new ShelvedWrapper(change, changeList)).forEach(shelvedChanges::add);
changeList.getBinaryFiles().stream().map(binaryChange -> new ShelvedWrapper(binaryChange, changeList)).forEach(shelvedChanges::add);
shelvedChanges.sort(comparing(s -> s.getChange(myProject), CHANGE_COMPARATOR));
ShelvedListNode shelvedListNode = new ShelvedListNode(changeList);
insertSubtreeRoot(shelvedListNode, parentNode);
for (ShelvedWrapper shelved : shelvedChanges) {
Change change = shelved.getChange(myProject);
FilePath filePath = ChangesUtil.getFilePath(change);
insertChangeNode(change, shelvedListNode, new ShelvedChangeNode(shelved, filePath, change.getOriginText(myProject)));
}
});
}
}
@RequiresEdt
private void updateTreeModel() {
updateTreeIfShown(tree -> tree.setPaintBusy(true));
BackgroundTaskUtil.executeOnPooledThread(myProject, () -> {
List<ShelvedChangeList> lists = myShelveChangesManager.getAllLists();
lists.forEach(l -> l.loadChangesIfNeeded(myProject));
List<ShelvedChangeList> sortedLists = sorted(lists, ChangelistComparator.getInstance());
ApplicationManager.getApplication().invokeLater(() -> {
updateViewContent();
updateTreeIfShown(tree -> {
tree.setLoadedLists(sortedLists);
tree.setPaintBusy(false);
tree.rebuildTree();
});
myPostUpdateEdtActivity.forEach(Runnable::run);
myPostUpdateEdtActivity.clear();
}, ModalityState.NON_MODAL, myProject.getDisposed());
});
}
@RequiresEdt
public void startEditing(@NotNull ShelvedChangeList shelvedChangeList) {
runAfterUpdate(() -> {
selectShelvedList(shelvedChangeList);
updateTreeIfShown(tree -> tree.startEditingAtPath(tree.getLeadSelectionPath()));
});
}
static class ChangelistComparator implements Comparator<ShelvedChangeList> {
private final static ChangelistComparator ourInstance = new ChangelistComparator();
public static ChangelistComparator getInstance() {
return ourInstance;
}
@Override
public int compare(ShelvedChangeList o1, ShelvedChangeList o2) {
return o2.DATE.compareTo(o1.DATE);
}
}
public void activateView(@Nullable final ShelvedChangeList list) {
runAfterUpdate(() -> {
if (myContent == null) return;
if (list != null) {
selectShelvedList(list);
}
activateContent();
});
}
private void runAfterUpdate(@NotNull Runnable postUpdateRunnable) {
ModalityUiUtil.invokeLaterIfNeeded(ModalityState.NON_MODAL, myProject.getDisposed(), () -> {
myUpdateQueue.cancelAllUpdates();
myPostUpdateEdtActivity.add(postUpdateRunnable);
updateTreeModel();
});
}
@Override
public void dispose() {
myUpdateQueue.cancelAllUpdates();
}
public void closeEditorPreview() {
ApplicationManager.getApplication().assertIsDispatchThread();
if (myContent == null) {
return;
}
EditorTabPreview diffPreview = myPanel.myEditorDiffPreview;
if (diffPreview != null) {
diffPreview.closePreview();
}
}
public void openEditorPreview() {
ApplicationManager.getApplication().assertIsDispatchThread();
if (myContent == null) return;
myPanel.openEditorPreview(false);
}
public void updateOnVcsMappingsChanged() {
ApplicationManager.getApplication().invokeLater(() -> {
updateTreeIfShown(tree -> {
ChangesGroupingSupport treeGroupingSupport = tree.getGroupingSupport();
if (treeGroupingSupport.isAvailable(REPOSITORY_GROUPING) && treeGroupingSupport.get(REPOSITORY_GROUPING)) {
tree.rebuildTree();
}
});
}, myProject.getDisposed());
}
public void selectShelvedList(@NotNull ShelvedChangeList list) {
updateTreeIfShown(tree -> {
DefaultMutableTreeNode treeNode = TreeUtil.findNodeWithObject((DefaultMutableTreeNode)tree.getModel().getRoot(), list);
if (treeNode == null) {
LOG.warn(VcsBundle.message("shelve.changelist.not.found", list.DESCRIPTION));
return;
}
TreeUtil.selectNode(tree, treeNode);
});
}
private static final class ShelfTree extends ChangesTree {
private List<ShelvedChangeList> myLoadedLists = emptyList();
private final DeleteProvider myDeleteProvider = new MyShelveDeleteProvider(myProject, this);
private ShelfTree(@NotNull Project project) {
super(project, false, false, true);
setKeepTreeState(true);
setDoubleClickHandler(e -> showShelvedChangesDiff());
setEnterKeyHandler(e -> showShelvedChangesDiff());
}
public void setLoadedLists(@NotNull List<ShelvedChangeList> lists) {
myLoadedLists = new ArrayList<>(lists);
}
@Override
public boolean isPathEditable(TreePath path) {
return isEditable() && getSelectionCount() == 1 && path.getLastPathComponent() instanceof ShelvedListNode;
}
@NotNull
@Override
protected ChangesGroupingSupport installGroupingSupport() {
return new ChangesGroupingSupport(myProject, this, false);
}
@Override
public int getToggleClickCount() {
return 2;
}
private boolean showShelvedChangesDiff() {
if (!hasExactlySelectedChanges()) return false;
DiffShelvedChangesActionProvider.showShelvedChangesDiff(DataManager.getInstance().getDataContext(this));
return true;
}
private boolean hasExactlySelectedChanges() {
return !UtilKt.isEmpty(VcsTreeModelData.exactlySelected(this).userObjectsStream(ShelvedWrapper.class));
}
@Override
public void rebuildTree() {
boolean showRecycled = ShelveChangesManager.getInstance(myProject).isShowRecycled();
MyShelvedTreeModelBuilder modelBuilder = new MyShelvedTreeModelBuilder(myProject, getGrouping());
modelBuilder.setShelvedLists(filter(myLoadedLists, l -> !l.isDeleted() && (showRecycled || !l.isRecycled())));
modelBuilder.setDeletedShelvedLists(filter(myLoadedLists, ShelvedChangeList::isDeleted));
updateTreeModel(modelBuilder.build());
}
@Nullable
@Override
public Object getData(@NotNull @NonNls String dataId) {
if (SHELVED_CHANGES_TREE.is(dataId)) {
return this;
}
else if (SHELVED_CHANGELIST_KEY.is(dataId)) {
return new ArrayList<>(getSelectedLists(this, l -> !l.isRecycled() && !l.isDeleted()));
}
else if (SHELVED_RECYCLED_CHANGELIST_KEY.is(dataId)) {
return new ArrayList<>(getSelectedLists(this, l -> l.isRecycled() && !l.isDeleted()));
}
else if (SHELVED_DELETED_CHANGELIST_KEY.is(dataId)) {
return new ArrayList<>(getSelectedLists(this, l -> l.isDeleted()));
}
else if (SHELVED_CHANGE_KEY.is(dataId)) {
return StreamEx.of(VcsTreeModelData.selected(this).userObjectsStream(ShelvedWrapper.class)).map(s -> s.getShelvedChange())
.nonNull().toList();
}
else if (SHELVED_BINARY_FILE_KEY.is(dataId)) {
return StreamEx.of(VcsTreeModelData.selected(this).userObjectsStream(ShelvedWrapper.class)).map(s -> s.getBinaryFile())
.nonNull().toList();
}
else if (VcsDataKeys.HAVE_SELECTED_CHANGES.is(dataId)) {
return getSelectionCount() > 0;
}
else if (VcsDataKeys.CHANGES.is(dataId)) {
List<ShelvedWrapper> shelvedChanges = VcsTreeModelData.selected(this).userObjects(ShelvedWrapper.class);
if (!shelvedChanges.isEmpty()) {
return map2Array(shelvedChanges, Change.class, s -> s.getChange(myProject));
}
}
else if (PlatformDataKeys.DELETE_ELEMENT_PROVIDER.is(dataId)) {
return myDeleteProvider;
}
else if (CommonDataKeys.NAVIGATABLE_ARRAY.is(dataId)) {
List<ShelvedWrapper> shelvedChanges = VcsTreeModelData.selected(this).userObjects(ShelvedWrapper.class);
final ArrayDeque<Navigatable> navigatables = new ArrayDeque<>();
for (final ShelvedWrapper shelvedChange : shelvedChanges) {
if (shelvedChange.getBeforePath() != null && !FileStatus.ADDED.equals(shelvedChange.getFileStatus())) {
final NavigatableAdapter navigatable = new NavigatableAdapter() {
@Override
public void navigate(boolean requestFocus) {
final VirtualFile vf = shelvedChange.getBeforeVFUnderProject(myProject);
if (vf != null) {
navigate(myProject, vf, true);
}
}
};
navigatables.add(navigatable);
}
}
return navigatables.toArray(Navigatable.EMPTY_NAVIGATABLE_ARRAY);
}
return super.getData(dataId);
}
}
@NotNull
private static Set<ShelvedChangeList> getSelectedLists(@NotNull ChangesTree tree,
@NotNull Predicate<? super ShelvedChangeList> condition) {
TreePath[] selectionPaths = tree.getSelectionPaths();
if (selectionPaths == null) return Collections.emptySet();
return StreamEx.of(selectionPaths)
.map(path -> TreeUtil.findObjectInPath(path, ShelvedChangeList.class))
.filter(Objects::nonNull)
.filter(condition)
.collect(Collectors.toSet());
}
@NotNull
static ListSelection<ShelvedWrapper> getSelectedChangesOrAll(@NotNull DataContext dataContext) {
ChangesTree tree = dataContext.getData(SHELVED_CHANGES_TREE);
if (tree == null) return ListSelection.empty();
ListSelection<ShelvedWrapper> wrappers = ListSelection.createAt(VcsTreeModelData.selected(tree).userObjects(ShelvedWrapper.class), 0);
if (wrappers.getList().size() == 1) {
// return all changes for selected changelist
ShelvedChangeList changeList = getFirstItem(getSelectedLists(tree, it -> true));
if (changeList != null) {
ChangesBrowserNode<?> changeListNode = (ChangesBrowserNode<?>)TreeUtil.findNodeWithObject(tree.getRoot(), changeList);
if (changeListNode != null) {
List<ShelvedWrapper> allWrappers = changeListNode.getAllObjectsUnder(ShelvedWrapper.class);
if (allWrappers.size() > 1) {
ShelvedWrapper toSelect = getFirstItem(wrappers.getList());
return ListSelection.create(allWrappers, toSelect);
}
}
}
}
return wrappers;
}
@NotNull
public static List<ShelvedChangeList> getShelvedLists(@NotNull final DataContext dataContext) {
List<ShelvedChangeList> shelvedChangeLists = new ArrayList<>();
shelvedChangeLists.addAll(notNullize(SHELVED_CHANGELIST_KEY.getData(dataContext)));
shelvedChangeLists.addAll(notNullize(SHELVED_RECYCLED_CHANGELIST_KEY.getData(dataContext)));
shelvedChangeLists.addAll(notNullize(SHELVED_DELETED_CHANGELIST_KEY.getData(dataContext)));
return shelvedChangeLists;
}
@NotNull
public static List<ShelvedChangeList> getExactlySelectedLists(@NotNull final DataContext dataContext) {
ChangesTree shelvedChangeTree = dataContext.getData(SHELVED_CHANGES_TREE);
if (shelvedChangeTree == null) return emptyList();
return StreamEx.of(VcsTreeModelData.exactlySelected(shelvedChangeTree).userObjectsStream(ShelvedChangeList.class)).toList();
}
@NotNull
public static List<ShelvedChange> getShelveChanges(@NotNull final DataContext dataContext) {
return notNullize(dataContext.getData(SHELVED_CHANGE_KEY));
}
@NotNull
public static List<ShelvedBinaryFile> getBinaryShelveChanges(@NotNull final DataContext dataContext) {
return notNullize(dataContext.getData(SHELVED_BINARY_FILE_KEY));
}
@NotNull
public static List<String> getSelectedShelvedChangeNames(@NotNull final DataContext dataContext) {
ChangesTree shelvedChangeTree = dataContext.getData(SHELVED_CHANGES_TREE);
if (shelvedChangeTree == null) return emptyList();
return StreamEx.of(VcsTreeModelData.selected(shelvedChangeTree).userObjectsStream(ShelvedWrapper.class))
.map(ShelvedWrapper::getPath).toList();
}
private static final class MyShelveDeleteProvider implements DeleteProvider {
@NotNull private final Project myProject;
@NotNull private final ShelfTree myTree;
private MyShelveDeleteProvider(@NotNull Project project, @NotNull ShelfTree tree) {
myProject = project;
myTree = tree;
}
@Override
public void deleteElement(@NotNull DataContext dataContext) {
List<ShelvedChangeList> shelvedListsToDelete = TreeUtil.collectSelectedObjectsOfType(myTree, ShelvedChangeList.class);
List<ShelvedChange> changesToDelete = getChangesNotInLists(shelvedListsToDelete, getShelveChanges(dataContext));
List<ShelvedBinaryFile> binariesToDelete = getBinariesNotInLists(shelvedListsToDelete, getBinaryShelveChanges(dataContext));
ShelveChangesManager manager = ShelveChangesManager.getInstance(myProject);
int fileListSize = binariesToDelete.size() + changesToDelete.size();
Map<ShelvedChangeList, Date> createdDeletedListsWithOriginalDates =
manager.deleteShelves(shelvedListsToDelete, getShelvedLists(dataContext), changesToDelete, binariesToDelete);
if (!createdDeletedListsWithOriginalDates.isEmpty()) {
showUndoDeleteNotification(shelvedListsToDelete, fileListSize, createdDeletedListsWithOriginalDates);
}
}
private void showUndoDeleteNotification(@NotNull List<ShelvedChangeList> shelvedListsToDelete,
int fileListSize,
@NotNull Map<ShelvedChangeList, Date> createdDeletedListsWithOriginalDate) {
String message = constructDeleteSuccessfullyMessage(fileListSize, shelvedListsToDelete.size(), getFirstItem(shelvedListsToDelete));
Notification shelfDeletionNotification = new ShelfDeleteNotification(message);
shelfDeletionNotification.setDisplayId(VcsNotificationIdsHolder.SHELF_UNDO_DELETE);
shelfDeletionNotification.addAction(new UndoShelfDeletionAction(myProject, createdDeletedListsWithOriginalDate));
shelfDeletionNotification.addAction(ActionManager.getInstance().getAction("ShelvedChanges.ShowRecentlyDeleted"));
VcsNotifier.getInstance(myProject).showNotificationAndHideExisting(shelfDeletionNotification, ShelfDeleteNotification.class);
}
private static final class UndoShelfDeletionAction extends NotificationAction {
@NotNull private final Project myProject;
@NotNull private final Map<ShelvedChangeList, Date> myListDateMap;
private UndoShelfDeletionAction(@NotNull Project project, @NotNull Map<ShelvedChangeList, Date> listDateMap) {
super(IdeBundle.messagePointer("undo.dialog.title"));
myProject = project;
myListDateMap = listDateMap;
}
@Override
public void actionPerformed(@NotNull AnActionEvent e, @NotNull Notification notification) {
ShelveChangesManager manager = ShelveChangesManager.getInstance(myProject);
List<ShelvedChangeList> cantRestoreList = findAll(myListDateMap.keySet(), l -> !manager.getDeletedLists().contains(l));
myListDateMap.forEach((l, d) -> manager.restoreList(l, d));
notification.expire();
if (!cantRestoreList.isEmpty()) {
VcsNotifier.getInstance(myProject).notifyMinorWarning(SHELVE_DELETION_UNDO,
VcsBundle.message("shelve.undo.deletion"),
VcsBundle.message("shelve.changes.restore.error", cantRestoreList.size()));
}
}
}
private static List<ShelvedBinaryFile> getBinariesNotInLists(@NotNull List<ShelvedChangeList> listsToDelete,
@NotNull List<ShelvedBinaryFile> binaryFiles) {
List<ShelvedBinaryFile> result = new ArrayList<>(binaryFiles);
for (ShelvedChangeList list : listsToDelete) {
result.removeAll(list.getBinaryFiles());
}
return result;
}
@NotNull
private static List<ShelvedChange> getChangesNotInLists(@NotNull List<ShelvedChangeList> listsToDelete,
@NotNull List<ShelvedChange> shelvedChanges) {
List<ShelvedChange> result = new ArrayList<>(shelvedChanges);
// all changes should be loaded because action performed from loaded shelf tab
listsToDelete.stream().map(list -> requireNonNull(list.getChanges())).forEach(result::removeAll);
return result;
}
@NotNull
@Nls
private static String constructDeleteSuccessfullyMessage(int fileNum, int listNum, @Nullable ShelvedChangeList first) {
String filesMessage = fileNum != 0 ? VcsBundle.message("shelve.delete.files.successful.message", fileNum) : "";
String changelistsMessage = listNum != 0 ? VcsBundle
.message("shelve.delete.changelists.message", listNum, listNum == 1 && first != null ? first.DESCRIPTION : "") : "";
return StringUtil.capitalize(
VcsBundle.message("shelve.delete.successful.message", filesMessage, fileNum > 0 && listNum > 0 ? 1 : 0, changelistsMessage));
}
@Override
public boolean canDeleteElement(@NotNull DataContext dataContext) {
return !getShelvedLists(dataContext).isEmpty();
}
}
private static final class MyDnDTarget extends VcsToolwindowDnDTarget {
private MyDnDTarget(@NotNull Project project, @NotNull Content content) {
super(project, content);
}
@Override
public void drop(DnDEvent event) {
super.drop(event);
Object attachedObject = event.getAttachedObject();
if (attachedObject instanceof ChangeListDragBean) {
FileDocumentManager.getInstance().saveAllDocuments();
List<Change> changes = Arrays.asList(((ChangeListDragBean)attachedObject).getChanges());
ShelveChangesManager.getInstance(myProject).shelveSilentlyUnderProgress(changes, true);
}
}
@Override
public boolean isDropPossible(@NotNull DnDEvent event) {
Object attachedObject = event.getAttachedObject();
return attachedObject instanceof ChangeListDragBean && ((ChangeListDragBean)attachedObject).getChanges().length > 0;
}
}
private static final class ShelfToolWindowPanel implements ChangesViewContentManagerListener, DataProvider, Disposable {
@NotNull private static final RegistryValue isOpenEditorDiffPreviewWithSingleClick =
Registry.get("show.diff.preview.as.editor.tab.with.single.click");
private final Project myProject;
private final ShelveChangesManager myShelveChangesManager;
private final VcsConfiguration myVcsConfiguration;
@NotNull private final JScrollPane myTreeScrollPane;
private final ShelfTree myTree;
private final ActionToolbar myToolbar;
@NotNull private final JPanel myRootPanel = new JPanel(new BorderLayout());
private MyShelvedPreviewProcessor myEditorChangeProcessor;
private MyShelvedPreviewProcessor mySplitterChangeProcessor;
private EditorTabPreview myEditorDiffPreview;
private PreviewDiffSplitterComponent mySplitterDiffPreview;
private ShelfToolWindowPanel(@NotNull Project project) {
myProject = project;
myShelveChangesManager = ShelveChangesManager.getInstance(myProject);
myVcsConfiguration = VcsConfiguration.getInstance(myProject);
myTree = new ShelfTree(myProject);
myTree.setEditable(true);
myTree.setDragEnabled(!ApplicationManager.getApplication().isHeadlessEnvironment());
myTree.getGroupingSupport().setGroupingKeysOrSkip(myShelveChangesManager.getGrouping());
myTree.addGroupingChangeListener(e -> {
myShelveChangesManager.setGrouping(myTree.getGroupingSupport().getGroupingKeys());
myTree.rebuildTree();
});
DefaultTreeCellEditor treeCellEditor = new DefaultTreeCellEditor(myTree, null) {
@Override
public boolean isCellEditable(EventObject event) {
return !(event instanceof MouseEvent) && super.isCellEditable(event);
}
};
myTree.setCellEditor(treeCellEditor);
treeCellEditor.addCellEditorListener(new CellEditorListener() {
@Override
public void editingStopped(ChangeEvent e) {
DefaultMutableTreeNode node = (DefaultMutableTreeNode)myTree.getLastSelectedPathComponent();
if (node instanceof ShelvedListNode && e.getSource() instanceof TreeCellEditor) {
String editorValue = ((TreeCellEditor)e.getSource()).getCellEditorValue().toString();
ShelvedChangeList shelvedChangeList = ((ShelvedListNode)node).getList();
myShelveChangesManager.renameChangeList(shelvedChangeList, editorValue);
}
}
@Override
public void editingCanceled(ChangeEvent e) {
}
});
final AnAction showDiffAction = ActionManager.getInstance().getAction(IdeActions.ACTION_SHOW_DIFF_COMMON);
showDiffAction.registerCustomShortcutSet(showDiffAction.getShortcutSet(), myTree);
final EditSourceAction editSourceAction = new EditSourceAction();
editSourceAction.registerCustomShortcutSet(editSourceAction.getShortcutSet(), myTree);
DefaultActionGroup actionGroup = new DefaultActionGroup();
actionGroup.addAll((ActionGroup)ActionManager.getInstance().getAction("ShelvedChangesToolbar"));
actionGroup.add(Separator.getInstance());
actionGroup.add(new MyToggleDetailsAction());
myToolbar = ActionManager.getInstance().createActionToolbar("ShelvedChanges", actionGroup, false);
myToolbar.setTargetComponent(myTree);
myTreeScrollPane = ScrollPaneFactory.createScrollPane(myTree, SideBorder.LEFT);
myRootPanel.add(myTreeScrollPane, BorderLayout.CENTER);
addToolbar(isCommitToolWindowShown(myProject));
setDiffPreview();
EditorTabDiffPreviewManager.getInstance(project).subscribeToPreviewVisibilityChange(this, this::setDiffPreview);
isOpenEditorDiffPreviewWithSingleClick.addListener(new RegistryValueListener() {
@Override
public void afterValueChanged(@NotNull RegistryValue value) {
if (myEditorDiffPreview != null) setDiffPreview();
}
}, this);
myProject.getMessageBus().connect(this).subscribe(ChangesViewContentManagerListener.TOPIC, this);
DataManager.registerDataProvider(myRootPanel, this);
PopupHandler.installPopupMenu(myTree, "ShelvedChangesPopupMenu", SHELF_CONTEXT_MENU);
}
@Override
public void dispose() {
}
@Override
public void toolWindowMappingChanged() {
addToolbar(isCommitToolWindowShown(myProject));
}
private void addToolbar(boolean isHorizontal) {
if (isHorizontal) {
myToolbar.setOrientation(SwingConstants.HORIZONTAL);
myRootPanel.add(myToolbar.getComponent(), BorderLayout.NORTH);
}
else {
myToolbar.setOrientation(SwingConstants.VERTICAL);
myRootPanel.add(myToolbar.getComponent(), BorderLayout.WEST);
}
}
private void setDiffPreview() {
boolean isEditorPreview = isEditorPreview(myProject);
boolean hasSplitterPreview = !isCommitToolWindowShown(myProject);
if (myEditorChangeProcessor != null) Disposer.dispose(myEditorChangeProcessor);
if (mySplitterChangeProcessor != null) Disposer.dispose(mySplitterChangeProcessor);
if (isEditorPreview) {
myEditorChangeProcessor = new MyShelvedPreviewProcessor(myProject, myTree, true);
Disposer.register(this, myEditorChangeProcessor);
myEditorDiffPreview = installEditorPreview(myEditorChangeProcessor, hasSplitterPreview);
}
else {
myEditorDiffPreview = null;
}
if (hasSplitterPreview) {
mySplitterChangeProcessor = new MyShelvedPreviewProcessor(myProject, myTree, false);
Disposer.register(this, mySplitterChangeProcessor);
mySplitterDiffPreview = installSplitterPreview(mySplitterChangeProcessor);
}
else {
mySplitterDiffPreview = null;
}
}
@NotNull
private EditorTabPreview installEditorPreview(@NotNull MyShelvedPreviewProcessor changeProcessor, boolean hasSplitterPreview) {
return new SimpleTreeEditorDiffPreview(changeProcessor, myTree, myTreeScrollPane,
isOpenEditorDiffPreviewWithSingleClick.asBoolean() && !hasSplitterPreview) {
@Override
public void returnFocusToTree() {
ToolWindow toolWindow = getToolWindowFor(myProject, SHELF);
if (toolWindow != null) toolWindow.activate(null);
}
@Override
public void updateAvailability(@NotNull AnActionEvent event) {
DiffShelvedChangesActionProvider.updateAvailability(event);
}
@Override
protected String getCurrentName() {
Wrapper myCurrentShelvedElement = changeProcessor.getCurrentChange();
return myCurrentShelvedElement != null
? VcsBundle.message("shelve.editor.diff.preview.title", myCurrentShelvedElement.getPresentableName())
: VcsBundle.message("shelved.version.name");
}
@Override
protected boolean skipPreviewUpdate() {
if (super.skipPreviewUpdate()) return true;
if (!myTree.equals(IdeFocusManager.getInstance(myProject).getFocusOwner())) return true;
if (!isPreviewOpen() && !isEditorPreviewAllowed()) return true;
return false;
}
};
}
@NotNull
private PreviewDiffSplitterComponent installSplitterPreview(@NotNull MyShelvedPreviewProcessor changeProcessor) {
PreviewDiffSplitterComponent previewSplitter =
new PreviewDiffSplitterComponent(changeProcessor, SHELVE_PREVIEW_SPLITTER_PROPORTION);
previewSplitter.setFirstComponent(myTreeScrollPane);
previewSplitter.setPreviewVisible(myVcsConfiguration.SHELVE_DETAILS_PREVIEW_SHOWN, false);
myTree.addSelectionListener(() -> previewSplitter.updatePreview(false), changeProcessor);
myRootPanel.add(previewSplitter, BorderLayout.CENTER);
Disposer.register(changeProcessor, () -> {
myRootPanel.remove(previewSplitter);
myRootPanel.add(myTreeScrollPane, BorderLayout.CENTER);
myRootPanel.revalidate();
myRootPanel.repaint();
});
return previewSplitter;
}
private boolean isEditorPreviewAllowed() {
return !isOpenEditorDiffPreviewWithSingleClick.asBoolean() || myVcsConfiguration.SHELVE_DETAILS_PREVIEW_SHOWN;
}
private void openEditorPreview(boolean focusEditor) {
if (myEditorDiffPreview == null) return;
if (!isEditorPreviewAllowed()) return;
myEditorDiffPreview.openPreview(focusEditor);
}
@Nullable
private DnDDragStartBean createDragStartBean(@NotNull DnDActionInfo info) {
if (info.isMove()) {
DataContext dc = DataManager.getInstance().getDataContext(myTree);
return new DnDDragStartBean(new ShelvedChangeListDragBean(getShelveChanges(dc), getBinaryShelveChanges(dc), getShelvedLists(dc)));
}
return null;
}
@NotNull
private DnDImage createDraggedImage(@NotNull DnDActionInfo info) {
String imageText = VcsBundle.message("unshelve.changes.action");
Image image = DnDAwareTree.getDragImage(myTree, imageText, null).getFirst();
return new DnDImage(image, new Point(-image.getWidth(null), -image.getHeight(null)));
}
@Override
public @Nullable Object getData(@NotNull String dataId) {
if (EditorTabDiffPreviewManager.EDITOR_TAB_DIFF_PREVIEW.is(dataId)) {
return myEditorDiffPreview;
}
return myTree.getData(dataId);
}
private class MyToggleDetailsAction extends ShowDiffPreviewAction {
@Override
public void update(@NotNull AnActionEvent e) {
super.update(e);
e.getPresentation().setEnabledAndVisible(mySplitterDiffPreview != null || isOpenEditorDiffPreviewWithSingleClick.asBoolean());
}
@Override
public void setSelected(@NotNull AnActionEvent e, boolean state) {
ObjectUtils.chooseNotNull(mySplitterDiffPreview, myEditorDiffPreview).setPreviewVisible(state, false);
myVcsConfiguration.SHELVE_DETAILS_PREVIEW_SHOWN = state;
}
@Override
public boolean isSelected(@NotNull AnActionEvent e) {
return myVcsConfiguration.SHELVE_DETAILS_PREVIEW_SHOWN;
}
}
}
private static class MyShelvedPreviewProcessor extends ChangeViewDiffRequestProcessor implements DiffPreviewUpdateProcessor {
@NotNull private final ShelfTree myTree;
private final boolean myIsInEditor;
@NotNull private final PatchesPreloader myPreloader;
MyShelvedPreviewProcessor(@NotNull Project project, @NotNull ShelfTree tree, boolean isInEditor) {
super(project, DiffPlaces.SHELVE_VIEW);
myTree = tree;
myIsInEditor = isInEditor;
myPreloader = new PatchesPreloader(project);
putContextUserData(PatchesPreloader.SHELF_PRELOADER, myPreloader);
}
@RequiresEdt
@Override
public void clear() {
setCurrentChange(null);
dropCaches();
}
@Override
protected boolean shouldAddToolbarBottomBorder(@NotNull FrameDiffTool.ToolbarComponents toolbarComponents) {
return !myIsInEditor || super.shouldAddToolbarBottomBorder(toolbarComponents);
}
@Override
public @NotNull Stream<? extends Wrapper> getSelectedChanges() {
return VcsTreeModelData.selected(myTree).userObjectsStream(ShelvedWrapper.class);
}
@Override
public @NotNull Stream<Wrapper> getAllChanges() {
Set<ShelvedChangeList> changeLists =
VcsTreeModelData.selected(myTree).userObjectsStream(ShelvedWrapper.class)
.map(wrapper -> wrapper.getChangeList()).collect(Collectors.toSet());
return VcsTreeModelData.all(myTree).rawNodesStream()
.filter(node -> node instanceof ShelvedListNode && changeLists.contains(((ShelvedListNode)node).getList()))
.flatMap(node -> VcsTreeModelData.allUnder(node).userObjectsStream(ShelvedWrapper.class));
}
@Override
protected void selectChange(@NotNull Wrapper change) {
if (change instanceof ShelvedWrapper) {
DefaultMutableTreeNode root = myTree.getRoot();
DefaultMutableTreeNode changelistNode = TreeUtil.findNodeWithObject(root, ((ShelvedWrapper)change).getChangeList());
if (changelistNode == null) return;
DefaultMutableTreeNode node = TreeUtil.findNodeWithObject(changelistNode, change);
if (node == null) return;
TreeUtil.selectPath(myTree, TreeUtil.getPathFromRoot(node), false);
}
}
@Override
protected @Nullable DiffRequest loadRequestFast(@NotNull DiffRequestProducer provider) {
if (provider instanceof ShelvedWrapperDiffRequestProducer) {
ShelvedChange shelvedChange = ((ShelvedWrapperDiffRequestProducer)provider).getWrapper().getShelvedChange();
if (shelvedChange != null && myPreloader.isPatchFileChanged(shelvedChange.getPatchPath())) return null;
}
return super.loadRequestFast(provider);
}
}
private static class ShelvedListNode extends ChangesBrowserNode<ShelvedChangeList> {
private static final Icon PatchIcon = PatchFileType.INSTANCE.getIcon();
private static final Icon AppliedPatchIcon =
new IconSizeWrapper(Patch_applied, Patch_applied.getIconWidth(), Patch_applied.getIconHeight()) {
@Override
public void paintIcon(Component c, Graphics g, int x, int y) {
GraphicsUtil.paintWithAlpha(g, 0.6f);
super.paintIcon(c, g, x, y);
}
};
private static final Icon DisabledToDeleteIcon = IconUtil.desaturate(AllIcons.Actions.GC);
@NotNull private final ShelvedChangeList myList;
ShelvedListNode(@NotNull ShelvedChangeList list) {
super(list);
myList = list;
}
@NotNull
public ShelvedChangeList getList() {
return myList;
}
@Override
public void render(@NotNull ChangesBrowserNodeRenderer renderer, boolean selected, boolean expanded, boolean hasFocus) {
String listName = myList.DESCRIPTION;
if (StringUtil.isEmptyOrSpaces(listName)) listName = VcsBundle.message("changes.nodetitle.empty.changelist.name");
if (myList.isRecycled() || myList.isDeleted()) {
renderer.appendTextWithIssueLinks(listName, SimpleTextAttributes.GRAYED_BOLD_ATTRIBUTES);
renderer.setIcon(myList.isMarkedToDelete() || myList.isDeleted() ? DisabledToDeleteIcon : AppliedPatchIcon);
}
else {
renderer.appendTextWithIssueLinks(listName, SimpleTextAttributes.REGULAR_ATTRIBUTES);
renderer.setIcon(PatchIcon);
}
appendCount(renderer);
String date = DateFormatUtil.formatPrettyDateTime(myList.DATE);
renderer.append(", " + date, SimpleTextAttributes.GRAYED_ATTRIBUTES);
}
@Override
public @Nls String getTextPresentation() {
return getUserObject().toString();
}
}
private static class ShelvedChangeNode extends ChangesBrowserNode<ShelvedWrapper> implements Comparable<ShelvedChangeNode> {
@NotNull private final ShelvedWrapper myShelvedChange;
@NotNull private final FilePath myFilePath;
@Nullable @Nls private final String myAdditionalText;
protected ShelvedChangeNode(@NotNull ShelvedWrapper shelvedChange,
@NotNull FilePath filePath,
@Nullable @Nls String additionalText) {
super(shelvedChange);
myShelvedChange = shelvedChange;
myFilePath = filePath;
myAdditionalText = additionalText;
}
@Override
public void render(@NotNull ChangesBrowserNodeRenderer renderer, boolean selected, boolean expanded, boolean hasFocus) {
String path = myShelvedChange.getRequestName();
String directory = StringUtil.defaultIfEmpty(PathUtil.getParentPath(path), VcsBundle.message("shelve.default.path.rendering"));
String fileName = StringUtil.defaultIfEmpty(PathUtil.getFileName(path), path);
renderer.append(fileName, new SimpleTextAttributes(SimpleTextAttributes.STYLE_PLAIN, myShelvedChange.getFileStatus().getColor()));
if (myAdditionalText != null) {
renderer.append(spaceAndThinSpace() + myAdditionalText, SimpleTextAttributes.REGULAR_ATTRIBUTES);
}
if (renderer.isShowFlatten()) {
renderer.append(spaceAndThinSpace() + FileUtil.toSystemDependentName(directory), SimpleTextAttributes.GRAYED_ATTRIBUTES);
}
renderer.setIcon(FileTypeManager.getInstance().getFileTypeByFileName(fileName).getIcon());
}
@Override
public String getTextPresentation() {
return PathUtil.getFileName(myShelvedChange.getRequestName());
}
@Override
protected boolean isFile() {
return true;
}
@Override
public int compareTo(@NotNull ShelvedChangeNode o) {
return compareFilePaths(myFilePath, o.myFilePath);
}
@Nullable
@Override
public Color getBackgroundColor(@NotNull Project project) {
return getBackgroundColorFor(project, myFilePath);
}
}
private class MyContentUpdater extends Update {
MyContentUpdater() {
super("ShelfContentUpdate");
}
@Override
public void run() {
updateTreeModel();
}
@Override
public boolean canEat(Update update) {
return true;
}
}
public static class PostStartupActivity implements StartupActivity.Background {
@Override
public void runActivity(@NotNull Project project) {
if (ApplicationManager.getApplication().isHeadlessEnvironment()) return;
getInstance(project).scheduleContentUpdate();
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.mina.core.buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.HashMap;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
/**
* An {@link IoBufferAllocator} that caches the buffers which are likely to
* be reused during auto-expansion of the buffers.
* <p>
* In {@link SimpleBufferAllocator}, the underlying {@link ByteBuffer} of
* the {@link IoBuffer} is reallocated on its capacity change, which means
* the newly allocated bigger {@link ByteBuffer} replaces the old small
* {@link ByteBuffer}. Consequently, the old {@link ByteBuffer} is marked
* for garbage collection.
* <p>
* It's not a problem in most cases as long as the capacity change doesn't
* happen frequently. However, once it happens too often, it burdens the
* VM and the cost of filling the newly allocated {@link ByteBuffer} with
* {@code NUL} surpass the cost of accessing the cache. In 2 dual-core
* Opteron Italy 270 processors, {@link CachedBufferAllocator} outperformed
* {@link SimpleBufferAllocator} in the following situation:
* <ul>
* <li>when a 32 bytes buffer is expanded 4 or more times,</li>
* <li>when a 64 bytes buffer is expanded 4 or more times,</li>
* <li>when a 128 bytes buffer is expanded 2 or more times,</li>
* <li>and when a 256 bytes or bigger buffer is expanded 1 or more times.</li>
* </ul>
* Please note the observation above is subject to change in a different
* environment.
* <p>
* {@link CachedBufferAllocator} uses {@link ThreadLocal} to store the cached
* buffer, allocates buffers whose capacity is power of 2 only and provides
* performance advantage if {@link IoBuffer#free()} is called properly.
*
* @author <a href="http://mina.apache.org">Apache MINA Project</a>
*/
public class CachedBufferAllocator implements IoBufferAllocator {
private static final int DEFAULT_MAX_POOL_SIZE = 8;
private static final int DEFAULT_MAX_CACHED_BUFFER_SIZE = 1 << 18; // 256KB
private final int maxPoolSize;
private final int maxCachedBufferSize;
private final ThreadLocal<Map<Integer, Queue<CachedBuffer>>> heapBuffers;
private final ThreadLocal<Map<Integer, Queue<CachedBuffer>>> directBuffers;
/**
* Creates a new instance with the default parameters
* ({@literal #DEFAULT_MAX_POOL_SIZE} and {@literal #DEFAULT_MAX_CACHED_BUFFER_SIZE}).
*/
public CachedBufferAllocator() {
this(DEFAULT_MAX_POOL_SIZE, DEFAULT_MAX_CACHED_BUFFER_SIZE);
}
/**
* Creates a new instance.
*
* @param maxPoolSize the maximum number of buffers with the same capacity per thread.
* <tt>0</tt> disables this limitation.
* @param maxCachedBufferSize the maximum capacity of a cached buffer.
* A buffer whose capacity is bigger than this value is
* not pooled. <tt>0</tt> disables this limitation.
*/
public CachedBufferAllocator(int maxPoolSize, int maxCachedBufferSize) {
if (maxPoolSize < 0) {
throw new IllegalArgumentException("maxPoolSize: " + maxPoolSize);
}
if (maxCachedBufferSize < 0) {
throw new IllegalArgumentException("maxCachedBufferSize: " + maxCachedBufferSize);
}
this.maxPoolSize = maxPoolSize;
this.maxCachedBufferSize = maxCachedBufferSize;
this.heapBuffers = new ThreadLocal<Map<Integer, Queue<CachedBuffer>>>() {
@Override
protected Map<Integer, Queue<CachedBuffer>> initialValue() {
return newPoolMap();
}
};
this.directBuffers = new ThreadLocal<Map<Integer, Queue<CachedBuffer>>>() {
@Override
protected Map<Integer, Queue<CachedBuffer>> initialValue() {
return newPoolMap();
}
};
}
/**
* Returns the maximum number of buffers with the same capacity per thread.
* <tt>0</tt> means 'no limitation'.
*/
public int getMaxPoolSize() {
return maxPoolSize;
}
/**
* Returns the maximum capacity of a cached buffer. A buffer whose
* capacity is bigger than this value is not pooled. <tt>0</tt> means
* 'no limitation'.
*/
public int getMaxCachedBufferSize() {
return maxCachedBufferSize;
}
Map<Integer, Queue<CachedBuffer>> newPoolMap() {
Map<Integer, Queue<CachedBuffer>> poolMap = new HashMap<Integer, Queue<CachedBuffer>>();
for (int i = 0; i < 31; i++) {
poolMap.put(1 << i, new ConcurrentLinkedQueue<CachedBuffer>());
}
poolMap.put(0, new ConcurrentLinkedQueue<CachedBuffer>());
poolMap.put(Integer.MAX_VALUE, new ConcurrentLinkedQueue<CachedBuffer>());
return poolMap;
}
public IoBuffer allocate(int requestedCapacity, boolean direct) {
int actualCapacity = IoBuffer.normalizeCapacity(requestedCapacity);
IoBuffer buf;
if ((maxCachedBufferSize != 0) && (actualCapacity > maxCachedBufferSize)) {
if (direct) {
buf = wrap(ByteBuffer.allocateDirect(actualCapacity));
} else {
buf = wrap(ByteBuffer.allocate(actualCapacity));
}
} else {
Queue<CachedBuffer> pool;
if (direct) {
pool = directBuffers.get().get(actualCapacity);
} else {
pool = heapBuffers.get().get(actualCapacity);
}
// Recycle if possible.
buf = pool.poll();
if (buf != null) {
buf.clear();
buf.setAutoExpand(false);
buf.order(ByteOrder.BIG_ENDIAN);
} else {
if (direct) {
buf = wrap(ByteBuffer.allocateDirect(actualCapacity));
} else {
buf = wrap(ByteBuffer.allocate(actualCapacity));
}
}
}
buf.limit(requestedCapacity);
return buf;
}
public ByteBuffer allocateNioBuffer(int capacity, boolean direct) {
return allocate(capacity, direct).buf();
}
public IoBuffer wrap(ByteBuffer nioBuffer) {
return new CachedBuffer(nioBuffer);
}
public void dispose() {
// Do nothing
}
private class CachedBuffer extends AbstractIoBuffer {
private final Thread ownerThread;
private ByteBuffer buf;
protected CachedBuffer(ByteBuffer buf) {
super(CachedBufferAllocator.this, buf.capacity());
this.ownerThread = Thread.currentThread();
this.buf = buf;
buf.order(ByteOrder.BIG_ENDIAN);
}
protected CachedBuffer(CachedBuffer parent, ByteBuffer buf) {
super(parent);
this.ownerThread = Thread.currentThread();
this.buf = buf;
}
@Override
public ByteBuffer buf() {
if (buf == null) {
throw new IllegalStateException("Buffer has been freed already.");
}
return buf;
}
@Override
protected void buf(ByteBuffer buf) {
ByteBuffer oldBuf = this.buf;
this.buf = buf;
free(oldBuf);
}
@Override
protected IoBuffer duplicate0() {
return new CachedBuffer(this, buf().duplicate());
}
@Override
protected IoBuffer slice0() {
return new CachedBuffer(this, buf().slice());
}
@Override
protected IoBuffer asReadOnlyBuffer0() {
return new CachedBuffer(this, buf().asReadOnlyBuffer());
}
@Override
public byte[] array() {
return buf().array();
}
@Override
public int arrayOffset() {
return buf().arrayOffset();
}
@Override
public boolean hasArray() {
return buf().hasArray();
}
@Override
public void free() {
free(buf);
buf = null;
}
private void free(ByteBuffer oldBuf) {
if ((oldBuf == null) || ((maxCachedBufferSize != 0) && (oldBuf.capacity() > maxCachedBufferSize))
|| oldBuf.isReadOnly() || isDerived() || (Thread.currentThread() != ownerThread)) {
return;
}
// Add to the cache.
Queue<CachedBuffer> pool;
if (oldBuf.isDirect()) {
pool = directBuffers.get().get(oldBuf.capacity());
} else {
pool = heapBuffers.get().get(oldBuf.capacity());
}
if (pool == null) {
return;
}
// Restrict the size of the pool to prevent OOM.
if ((maxPoolSize == 0) || (pool.size() < maxPoolSize)) {
pool.offer(new CachedBuffer(oldBuf));
}
}
}
}
|
|
/**
* Copyright (c) 2014 Samsung Electronics, Inc.,
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.samsung.sec.dexter.cppcheck.plugin;
import com.google.common.base.Strings;
import com.samsung.sec.dexter.core.analyzer.AnalysisConfig;
import com.samsung.sec.dexter.core.analyzer.AnalysisEntityFactory;
import com.samsung.sec.dexter.core.analyzer.AnalysisResult;
import com.samsung.sec.dexter.core.analyzer.IAnalysisEntityFactory;
import com.samsung.sec.dexter.core.checker.CheckerConfig;
import com.samsung.sec.dexter.core.config.DexterConfig;
import com.samsung.sec.dexter.core.config.DexterConfig.LANGUAGE;
import com.samsung.sec.dexter.core.exception.DexterRuntimeException;
import com.samsung.sec.dexter.core.plugin.IDexterPlugin;
import com.samsung.sec.dexter.core.plugin.PluginDescription;
import com.samsung.sec.dexter.core.plugin.PluginVersion;
import com.samsung.sec.dexter.core.util.DexterUtil;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import org.apache.commons.io.FileUtils;
import org.apache.log4j.Logger;
import net.xeoh.plugins.base.annotations.PluginImplementation;
@PluginImplementation
public class CppcheckDexterPlugin implements IDexterPlugin {
public final static String PLUGIN_NAME = "cppcheck";
public final static PluginVersion PLUGIN_VERSION = new PluginVersion("0.10.4");
private CppcheckWrapper cppcheck = new CppcheckWrapper();
private final static Logger logger = Logger.getLogger(CppcheckWrapper.class);
private static PluginDescription PLUGIN_DESCRIPTION = new PluginDescription(CppcheckDexterPlugin.PLUGIN_NAME,
PLUGIN_NAME,
PLUGIN_VERSION,
DexterConfig.LANGUAGE.CPP, "Dexter plug-in for Cppcheck");;
/*
* (non-Javadoc)
*
* @see com.samsung.sec.dexter.core.plugin.IDexterPlugin#init()
*/
@Override
public void init() {
cppcheck.initCheckerConfig();
copyCppcheckRunModule();
if (DexterUtil.getOS() == DexterUtil.OS.LINUX || DexterUtil.getOS() == DexterUtil.OS.MAC) {
checkCppcheckPermission();
}
}
/*
* (non-Javadoc)
*
* @see com.samsung.sec.dexter.core.plugin.IDexterPlugin#destroy()
*/
@Override
public void destroy() {
// do nothing
}
public boolean checkCppcheckPermission() {
String dexterHome = DexterConfig.getInstance().getDexterHome();
Process changePermissionProcess = null;
StringBuilder changePermissionCmd = new StringBuilder(500);
String dexterBin = dexterHome + DexterUtil.FILE_SEPARATOR + "bin";
String cppcheckHome = dexterBin + DexterUtil.FILE_SEPARATOR + "cppcheck";
if (Strings.isNullOrEmpty(dexterBin)) {
logger.error("Can't initialize Cppcheck plugin, because the dexter_home/bin is not initialized");
return false;
}
if (Strings.isNullOrEmpty(cppcheckHome)) {
logger.error("Can't initialize Cppcheck plugin, because the cppcheckHome is not initialized");
return false;
}
String baseCommand = DexterConfig.EXECUTION_PERMISSION + " ";
changePermissionCmd.append(baseCommand).append(cppcheckHome).append(DexterUtil.FILE_SEPARATOR)
.append("cppcheck");
try {
changePermissionProcess = Runtime.getRuntime().exec(changePermissionCmd.toString());
} catch (IOException e) {
throw new DexterRuntimeException(e.getMessage() + " changePermissionCmd: " + changePermissionCmd.toString(),
e);
} finally {
if (changePermissionProcess != null) {
changePermissionProcess.destroy();
}
}
return true;
}
private void copyCppcheckRunModule() {
String dexterHome = DexterConfig.getInstance().getDexterHome();
if (Strings.isNullOrEmpty(dexterHome)) {
throw new DexterRuntimeException(
"Can't initialize Cppcheck plugin, because the dexter_home is not initialized");
}
// copy %DEXTER_HOME%/bin/cppcheck
String zipFilePath = dexterHome;
String cppcheckPath = "";
if (DexterUtil.getOS() == DexterUtil.OS.WINDOWS) {
//zipFilePath += "/temp/cppcheck-windows_0.10.2.zip";
zipFilePath += "/temp/cppcheck-windows_" + PLUGIN_VERSION + ".zip";
cppcheckPath = "/cppcheck-windows.zip";
} else { // LINUX or MAC
if(DexterUtil.getBit() == DexterUtil.BIT._32){
//zipFilePath += "/temp/cppcheck-linux_0.10.2.zip";
zipFilePath += "/temp/cppcheck-linux_"+ PLUGIN_VERSION + "_32.zip";
cppcheckPath = "/cppcheck-linux-32.zip";
}else{
//zipFilePath += "/temp/cppcheck-linux_0.10.2.zip";
zipFilePath += "/temp/cppcheck-linux_"+ PLUGIN_VERSION + "_64.zip";
cppcheckPath = "/cppcheck-linux-64.zip";
}
}
final File file = new File(zipFilePath);
if (!file.exists()) {
final InputStream is = getClass().getResourceAsStream(cppcheckPath);
if (is == null) {
throw new DexterRuntimeException("can't find cppcheck.zip file: " + cppcheckPath);
}
try {
FileUtils.copyInputStreamToFile(is, file);
DexterUtil.unzip(zipFilePath, dexterHome + CppcheckWrapper.CPPCHECK_HOME_DIR);
} catch (Exception e) {
throw new DexterRuntimeException(e.getMessage(), e);
} finally {
try {
is.close();
} catch (IOException e) {
// do nothing
}
}
}
}
/*
* (non-Javadoc)
*
* @see
* com.samsung.sec.dexter.core.plugin.IDexterPlugin#getSaPluginDescription()
*/
@Override
public PluginDescription getDexterPluginDescription() {
/*
* it does not work on Eclipse Plugin version
* if (this.pluginDescription == null) {
* this.pluginDescription = new PluginDescription(CppcheckDexterPlugin.PLUGIN_NAME, PLUGIN_NAME,
* PluginVersion.fromImplementationVersion(CppcheckDexterPlugin.class),
* DexterConfig.LANGUAGE.CPP, "Dexter plug-in for Cppcheck");
* }
* return this.pluginDescription;
*/
return PLUGIN_DESCRIPTION;
}
/*
* (non-Javadoc)
*
* @see
* com.samsung.sec.dexter.core.plugin.IDexterPlugin#analyze(com.samsung.
* sec.dexter.core.analyzer.AnalysisConfig,
* com.samsung.sec.dexter.core.analyzer.AnalysisResult)
*/
@Override
public AnalysisResult analyze(final AnalysisConfig config) {
if (config != null) {
cppcheck.setAnalysisConfig(config);
} else {
throw new DexterRuntimeException("analysis config is null");
}
/*
* File bin = new File(DexterConfig.getInstance().getDexterHome() + "/bin");
* if (bin.exists() == false) {
* copyCppcheckRunModule();
* if (DexterUtil.getOS() == DexterUtil.OS.LINUX || DexterUtil.getOS() == DexterUtil.OS.MAC) {
* checkCppcheckPermission();
* }
* }
*
* File cppcheckFolder = new File(DexterConfig.getInstance().getDexterHome() +
* CppcheckWrapper.CPPCHECK_HOME_DIR);
* if (cppcheckFolder.exists() == false)
* DexterConfig.getInstance().createInitialFolderAndFiles();
*/
IAnalysisEntityFactory factory = new AnalysisEntityFactory();
AnalysisResult result = factory.createAnalysisResult(config);
cppcheck.analyze(result);
return result;
}
/*
* (non-Javadoc)
*
* @see
* com.samsung.sec.dexter.core.plugin.IDexterPlugin#setCheckerConfig(com
* .samsung.sec.dexter.core.checker.CheckerConfig)
*/
@Override
public void setCheckerConfig(final CheckerConfig cc) {
cppcheck.setCheckerConfig(cc);
}
/*
* (non-Javadoc)
*
* @see com.samsung.sec.dexter.core.plugin.IDexterPlugin#getCheckerConfig()
*/
@Override
public CheckerConfig getCheckerConfig() {
return cppcheck.getCheckerConfig();
}
/*
* (non-Javadoc)
*
* @see
* com.samsung.sec.dexter.core.plugin.IDexterPlugin#supportLanguage(com.
* samsung.sec.dexter.core.util.DexterConfig.LANGUAGE)
*/
@Override
public boolean supportLanguage(final LANGUAGE language) {
if (language == DexterConfig.LANGUAGE.C || language == DexterConfig.LANGUAGE.CPP) {
return true;
} else {
return false;
}
}
@Override
public void handleDexterHomeChanged(String oldPath, String newPath) {
copyCppcheckRunModule();
if (DexterUtil.getOS() == DexterUtil.OS.LINUX || DexterUtil.getOS() == DexterUtil.OS.MAC) {
checkCppcheckPermission();
}
}
@Override
public String[] getSupportingFileExtensions() {
return new String[] { "c", "cpp", "h", "hpp" };
}
}
|
|
/**
*
*/
package io.github.pbremer.icecreammanager.batch;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import java.io.IOException;
import java.math.BigDecimal;
import java.util.Date;
import java.util.EnumSet;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.JobParametersInvalidException;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
import org.springframework.batch.core.repository.JobRestartException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.SpringApplicationConfiguration;
import org.springframework.boot.test.WebIntegrationTest;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.TestPropertySource;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import io.github.pbremer.icecreammanager.Application;
import io.github.pbremer.icecreammanager.entity.InputFileMetaData;
import io.github.pbremer.icecreammanager.entity.InputFileMetaData.FileType;
import io.github.pbremer.icecreammanager.entity.InputFileMetaData.Status;
import io.github.pbremer.icecreammanager.repository.InputFileMetaDataRepository;
import io.github.pbremer.icecreammanager.service.BeginDayInventoryService;
import io.github.pbremer.icecreammanager.service.CityService;
import io.github.pbremer.icecreammanager.service.DriverInstanceService;
import io.github.pbremer.icecreammanager.service.DriverService;
import io.github.pbremer.icecreammanager.service.EndDayInventoryService;
import io.github.pbremer.icecreammanager.service.InventoryLossService;
import io.github.pbremer.icecreammanager.service.RouteService;
import io.github.pbremer.icecreammanager.service.TruckInstanceService;
import io.github.pbremer.icecreammanager.service.TruckService;
import io.github.pbremer.icecreammanager.service.WarehouseInventoryService;
import io.github.pbremer.icecreammanager.service.ZoneService;
/**
* @author Patrick Bremer
*/
@RunWith(SpringJUnit4ClassRunner.class)
@SpringApplicationConfiguration(Application.class)
@WebIntegrationTest(randomPort = true)
@ActiveProfiles("local")
@TestPropertySource(properties = {
"spring.datasource.url = jdbc:h2:~/local;DB_CLOSE_ON_EXIT=FALSE" })
public class BatchJobTest {
private static final Logger log =
LoggerFactory.getLogger(BatchJobTest.class);
@Autowired
private JobLauncher launcher;
@Autowired
private Job job;
@Autowired
private InputFileMetaDataRepository inputFileMetaDataRepository;
@Autowired
private CityService cityService;
@Autowired
private ZoneService zoneService;
@Autowired
private RouteService routeService;
@Autowired
private TruckService truckService;
@Autowired
private WarehouseInventoryService warehouseInventoryService;
@Autowired
private DriverService driverService;
@Autowired
private TruckInstanceService truckInstanceService;
@Autowired
private DriverInstanceService driverInstanceService;
@Autowired
private BeginDayInventoryService beginDayInventoryService;
@Autowired
private EndDayInventoryService endDayInventoryService;
@Autowired
private InventoryLossService inventoryLossService;
@Before
public void setup() {
for (FileType type : EnumSet.allOf(FileType.class)) {
log.debug("Adding {} entry to database", type.getFileName());
InputFileMetaData data = new InputFileMetaData();
data.setDay(new Date(0L));
data.setSequenceNumber(0);
data.setStatus(Status.WAITING);
data.setFileType(type);
inputFileMetaDataRepository.save(data);
}
}
@Test
public void testJobFlow() throws JobExecutionAlreadyRunningException,
JobRestartException, JobInstanceAlreadyCompleteException,
JobParametersInvalidException, IOException {
log.info("Starting city job");
JobExecution jobExecution = launcher.run(job,
new JobParametersBuilder()
.addLong("time", System.currentTimeMillis())
.addString("input.file.name",
"classpath:input-files/city/cityUpload.txt")
.addString("input.file.countablerow.regex",
"^(?!T\\s)(?!HD\\s).*")
.toJobParameters());
log.info("Starting city job validation");
assertThat("Exit status is not COMEPLETE",
jobExecution.getExitStatus().getExitCode(),
equalTo(ExitStatus.COMPLETED.getExitCode()));
assertThat("City data is not stored",
cityService.existsAndIsActive("Dearborn"), equalTo(true));
assertThat("Zone data is not stored",
zoneService.existsAndIsActive("Dearborn 1"), equalTo(true));
assertThat("Zone data is not stored",
zoneService.existsAndIsActive("Dearborn 2"), equalTo(true));
assertThat("Zone data is not stored",
zoneService.existsAndIsActive("Dearborn 3"), equalTo(true));
log.info("Starting route job");
jobExecution = launcher.run(job,
new JobParametersBuilder()
.addLong("time", System.currentTimeMillis())
.addString("input.file.name",
"classpath:input-files/route/routeUpload.txt")
.addString("input.file.countablerow.regex", "^[ACD].*")
.toJobParameters());
jobExecution.getExitStatus().getExitDescription();
log.info("Starting route job validation");
assertThat("Exit status is not COMEPLETE",
jobExecution.getExitStatus().getExitCode(),
equalTo(ExitStatus.COMPLETED.getExitCode()));
assertThat("Route data is not stored",
routeService.existsAndIsActive("0001"), equalTo(true));
log.info("Starting truck job");
jobExecution = launcher.run(job,
new JobParametersBuilder()
.addLong("time", System.currentTimeMillis())
.addString("input.file.name",
"classpath:input-files/truck/truckUpload.txt")
.addString("input.file.countablerow.regex", "^[0-9].*")
.toJobParameters());
log.info("Starting truck job validation");
assertThat("Exit status is not COMEPLETE",
jobExecution.getExitStatus().getExitCode(),
equalTo(ExitStatus.COMPLETED.getExitCode()));
assertThat("Truck data is not stored",
truckService.existsAndIsActive("0001"), equalTo(true));
assertThat("Truck data is not stored",
truckService.existsAndIsActive("0010"), equalTo(true));
assertThat("Truck data is not stored",
truckService.existsAndIsActive("0110"), equalTo(true));
log.info("Starting driver job");
jobExecution = launcher.run(job,
new JobParametersBuilder()
.addLong("time", System.currentTimeMillis())
.addString("input.file.name",
"classpath:input-files/driver/driverUpload.txt")
.addString("input.file.countablerow.regex", "^[0-9].*")
.toJobParameters());
log.info("Starting driver job validation");
assertThat("Exit status is not COMEPLETE",
jobExecution.getExitStatus().getExitCode(),
equalTo(ExitStatus.COMPLETED.getExitCode()));
assertThat("Driver data not stored",
driverService.existsAndIsActive("0023"), equalTo(true));
assertThat("Driver data not stored",
driverService.existsAndIsActive("0345"), equalTo(true));
log.info("Starting warehouse inventory job");
jobExecution = launcher.run(job,
new JobParametersBuilder()
.addLong("time", System.currentTimeMillis())
.addString("input.file.name",
"classpath:input-files/inventory/dailyInventory.txt")
.addString("input.file.countablerow.regex", "^[0-9].*")
.toJobParameters());
log.info("Starting warehouse inventory job validation");
assertThat("Exit status is not COMEPLETE",
jobExecution.getExitStatus().getExitCode(),
equalTo(ExitStatus.COMPLETED.getExitCode()));
assertThat("Warehouse inventory data is not stored",
warehouseInventoryService.findWhereIsActiveEquals(true).size(),
equalTo(1));
log.info("Starting truck-route mapping job");
jobExecution = launcher.run(job,
new JobParametersBuilder()
.addLong("time", System.currentTimeMillis())
.addString("input.file.name",
"classpath:input-files/truck-route/truckRouteUpload.txt")
.addString("input.file.countablerow.regex", "^[0-9].*")
.toJobParameters());
log.info("Starting driver-truck mapping job validation");
assertThat("Exit status is not COMEPLETE",
jobExecution.getExitStatus().getExitCode(),
equalTo(ExitStatus.COMPLETED.getExitCode()));
assertThat("Truck-route data is not stored",
truckInstanceService.findAll().size(), equalTo(1));
log.info("Starting driver-truck mapping job");
jobExecution = launcher.run(job,
new JobParametersBuilder()
.addLong("time", System.currentTimeMillis())
.addString("input.file.name",
"classpath:input-files/driver-truck/driverTruck.txt")
.addString("input.file.countablerow.regex", "^[0-9].*")
.toJobParameters());
log.info("Starting driver-truck mapping job validation");
assertThat("Exit status is not COMEPLETE",
jobExecution.getExitStatus().getExitCode(),
equalTo(ExitStatus.COMPLETED.getExitCode()));
assertThat("Driver-truck data is not stored",
truckInstanceService.findAll().size(), equalTo(1));
assertThat("Driver-truck data is not stored",
driverInstanceService.findAll().size(), equalTo(1));
log.info("Starting load truck job");
jobExecution = launcher.run(job,
new JobParametersBuilder()
.addLong("time", System.currentTimeMillis())
.addString("input.file.name",
"classpath:input-files/load-truck/loadTruck.txt")
.addString("input.file.countablerow.regex", "^[0-9].*")
.toJobParameters());
log.info("Starting load truck mapping job validation");
assertThat("Exit status is not COMEPLETE",
jobExecution.getExitStatus().getExitCode(),
equalTo(ExitStatus.COMPLETED.getExitCode()));
assertThat("Begin day inventory data is not stored",
beginDayInventoryService.findAll().size(), equalTo(1));
log.info("Starting route price job");
jobExecution = launcher.run(job,
new JobParametersBuilder()
.addLong("time", System.currentTimeMillis())
.addString("input.file.name",
"classpath:input-files/route-price/routePrice.txt")
.addString("input.file.countablerow.regex", "^[0-9].*")
.toJobParameters());
log.info("Starting route price mapping job validation");
assertThat("Exit status is not COMEPLETE",
jobExecution.getExitStatus().getExitCode(),
equalTo(ExitStatus.COMPLETED.getExitCode()));
assertThat("Price adjustment data is not stored",
beginDayInventoryService.findAll().get(0).getPrice(),
equalTo(new BigDecimal("4.00")));
log.info("Starting daily sales job");
jobExecution = launcher.run(job,
new JobParametersBuilder()
.addLong("time", System.currentTimeMillis())
.addString("input.file.name",
"classpath:input-files/truck-sales/dailySales.txt")
.addString("input.file.countablerow.regex", "^[0-9].*")
.toJobParameters());
log.info("Starting daily sales mapping job validation");
assertThat("Exit status is not COMEPLETE",
jobExecution.getExitStatus().getExitCode(),
equalTo(ExitStatus.COMPLETED.getExitCode()));
assertThat("End day inventory data is not stored",
endDayInventoryService.findAll().size(), equalTo(1));
log.info("Starting costs job");
jobExecution = launcher.run(job,
new JobParametersBuilder()
.addLong("time", System.currentTimeMillis())
.addString("input.file.name",
"classpath:input-files/cost/cost.txt")
.addString("input.file.countablerow.regex", "^[0-9].*")
.toJobParameters());
log.info("Starting daily sales mapping job validation");
assertThat("Exit status is not COMEPLETE",
jobExecution.getExitStatus().getExitCode(),
equalTo(ExitStatus.COMPLETED.getExitCode()));
assertThat("Inventory loss data is not stored",
inventoryLossService.findAll().size(), equalTo(1));
assertThat("Truck gas data is not stored",
truckInstanceService.findAll().get(0).getGasSpent(),
equalTo(new BigDecimal("72.00")));
assertThat("Truck hours out data is not stored",
truckInstanceService.findAll().get(0).getHoursOut(),
equalTo(new BigDecimal("8.58")));
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteException;
import org.apache.ignite.internal.IgniteClientReconnectAbstractTest;
import org.apache.ignite.internal.util.typedef.X;
import java.util.ArrayList;
import java.util.Collection;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
import static org.apache.ignite.cache.CacheMode.PARTITIONED;
/**
* Concurrent and advanced tests for WAL state change.
*/
@SuppressWarnings("unchecked")
public class WalModeChangeAdvancedSelfTest extends WalModeChangeCommonAbstractSelfTest {
/**
* Constructor.
*/
public WalModeChangeAdvancedSelfTest() {
super(false);
}
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
deleteWorkFiles();
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
super.afterTest();
stopAllGrids();
deleteWorkFiles();
}
/**
* Test cache cleanup on restart.
*
* @throws Exception If failed.
*/
public void testCacheCleanup() throws Exception {
Ignite srv = startGrid(config(SRV_1, false, false));
srv.cluster().active(true);
IgniteCache cache1 = srv.getOrCreateCache(cacheConfig(CACHE_NAME, PARTITIONED, TRANSACTIONAL));
IgniteCache cache2 = srv.getOrCreateCache(cacheConfig(CACHE_NAME_2, PARTITIONED, TRANSACTIONAL));
assertForAllNodes(CACHE_NAME, true);
assertForAllNodes(CACHE_NAME_2, true);
for (int i = 0; i < 10; i++) {
cache1.put(i, i);
cache2.put(i, i);
}
srv.cluster().disableWal(CACHE_NAME);
assertForAllNodes(CACHE_NAME, false);
assertForAllNodes(CACHE_NAME_2, true);
for (int i = 10; i < 20; i++) {
cache1.put(i, i);
cache2.put(i, i);
}
srv.cluster().disableWal(CACHE_NAME_2);
assertForAllNodes(CACHE_NAME, false);
assertForAllNodes(CACHE_NAME_2, false);
for (int i = 20; i < 30; i++) {
cache1.put(i, i);
cache2.put(i, i);
}
assertEquals(cache1.size(), 30);
assertEquals(cache2.size(), 30);
srv.cluster().enableWal(CACHE_NAME);
assertForAllNodes(CACHE_NAME, true);
assertForAllNodes(CACHE_NAME_2, false);
assertEquals(cache1.size(), 30);
assertEquals(cache2.size(), 30);
stopAllGrids(true);
srv = startGrid(config(SRV_1, false, false));
srv.cluster().active(true);
cache1 = srv.cache(CACHE_NAME);
cache2 = srv.cache(CACHE_NAME_2);
assertForAllNodes(CACHE_NAME, true);
assertForAllNodes(CACHE_NAME_2, false);
assertEquals(30, cache1.size());
assertEquals(0, cache2.size());
}
/**
* Test simple node join.
*
* @throws Exception If failed.
*/
public void testJoin() throws Exception {
checkJoin(false);
}
/**
* Test simple node join when operations is performed from coordinator.
*
* @throws Exception If failed.
*/
public void testJoinCoordinator() throws Exception {
checkJoin(true);
}
/**
* Check node join behavior.
*
* @param crdFiltered {@code True} if first node is coordinator.
* @throws Exception If failed.
*/
private void checkJoin(boolean crdFiltered) throws Exception {
// Start node and disable WAL.
Ignite srv = startGrid(config(SRV_1, false, crdFiltered));
srv.cluster().active(true);
srv.getOrCreateCache(cacheConfig(PARTITIONED));
assertForAllNodes(CACHE_NAME, true);
if (!crdFiltered) {
srv.cluster().disableWal(CACHE_NAME);
assertForAllNodes(CACHE_NAME, false);
}
// Start other nodes.
startGrid(config(SRV_2, false, false));
assertForAllNodes(CACHE_NAME, false);
if (crdFiltered) {
srv.cluster().disableWal(CACHE_NAME);
assertForAllNodes(CACHE_NAME, false);
}
startGrid(config(SRV_3, false, !crdFiltered));
assertForAllNodes(CACHE_NAME, false);
startGrid(config(CLI, true, false));
assertForAllNodes(CACHE_NAME, false);
// Stop nodes and restore WAL state on the first node.
stopGrid(SRV_2, true);
stopGrid(SRV_3, true);
stopGrid(CLI, true);
if (!crdFiltered) {
srv.cluster().enableWal(CACHE_NAME);
assertForAllNodes(CACHE_NAME, true);
}
// Start other nodes again.
startGrid(config(SRV_2, false, false));
assertForAllNodes(CACHE_NAME, true);
if (crdFiltered) {
srv.cluster().enableWal(CACHE_NAME);
assertForAllNodes(CACHE_NAME, true);
}
startGrid(config(SRV_3, false, !crdFiltered));
assertForAllNodes(CACHE_NAME, true);
startGrid(config(CLI, true, false));
assertForAllNodes(CACHE_NAME, true);
}
/**
* Test server restart (non-coordinator).
*
* @throws Exception If failed.
*/
public void testServerRestartNonCoordinator() throws Exception {
checkNodeRestart(false);
}
/**
* Test server restart (coordinator).
*
* @throws Exception If failed.
*/
public void testServerRestartCoordinator() throws Exception {
fail("https://issues.apache.org/jira/browse/IGNITE-7472");
checkNodeRestart(true);
}
/**
* Test coordinator node migration.
*
* @param failCrd Whether to fail coordinator nodes.
* @throws Exception If failed.
*/
public void checkNodeRestart(boolean failCrd) throws Exception {
startGrid(config(SRV_1, false, false));
startGrid(config(SRV_2, false, false));
Ignite cli = startGrid(config(CLI, true, false));
cli.cluster().active(true);
cli.getOrCreateCache(cacheConfig(PARTITIONED));
final AtomicInteger restartCnt = new AtomicInteger();
final int restarts = 10;
Thread t = new Thread(new Runnable() {
@Override public void run() {
boolean firstOrSecond = true;
while (restartCnt.get() < restarts) {
String victimName;
if (failCrd) {
victimName = firstOrSecond ? SRV_1 : SRV_2;
firstOrSecond = !firstOrSecond;
}
else
victimName = SRV_2;
try {
stopGrid(victimName);
startGrid(config(victimName, false, false));
Thread.sleep(500);
}
catch (Exception e) {
throw new RuntimeException();
}
restartCnt.incrementAndGet();
X.println(">>> Finished restart: " + restartCnt.get());
}
}
});
t.start();
boolean state = true;
while (restartCnt.get() < restarts && !Thread.currentThread().isInterrupted()) {
try {
if (state)
cli.cluster().disableWal(CACHE_NAME);
else
cli.cluster().enableWal(CACHE_NAME);
state = !state;
}
catch (IgniteException e) {
// Possible disconnect, re-try.
}
}
}
/**
* Test client re-connect.
*
* @throws Exception If failed.
*/
public void testClientReconnect() throws Exception {
final Ignite srv = startGrid(config(SRV_1, false, false));
Ignite cli = startGrid(config(CLI, true, false));
cli.cluster().active(true);
cli.getOrCreateCache(cacheConfig(PARTITIONED));
final AtomicBoolean done = new AtomicBoolean();
final CountDownLatch latch = new CountDownLatch(1);
// Start load.
Thread t = new Thread(new Runnable() {
@Override public void run() {
boolean state = false;
while (!done.get()) {
try {
if (state)
cli.cluster().enableWal(CACHE_NAME);
else
cli.cluster().disableWal(CACHE_NAME);
}
catch (IgniteException e) {
String msg = e.getMessage();
assert msg.startsWith("Client node disconnected") ||
msg.startsWith("Client node was disconnected") : e.getMessage();
}
finally {
state = !state;
}
}
latch.countDown();
}
});
t.setName("wal-load-" + cli.name());
t.start();
// Now perform multiple client reconnects.
for (int i = 1; i <= 10; i++) {
Thread.sleep(ThreadLocalRandom.current().nextLong(200, 1000));
IgniteClientReconnectAbstractTest.reconnectClientNode(log, cli, srv, new Runnable() {
@Override public void run() {
// No-op.
}
});
X.println(">>> Finished iteration: " + i);
}
done.set(true);
latch.await();
}
/**
* Test client re-connect.
*
* @throws Exception If failed.
*/
public void testCacheDestroy() throws Exception {
final Ignite srv = startGrid(config(SRV_1, false, false));
Ignite cli = startGrid(config(CLI, true, false));
cli.cluster().active(true);
srv.createCache(cacheConfig(PARTITIONED));
final AtomicBoolean done = new AtomicBoolean();
final CountDownLatch latch = new CountDownLatch(1);
// Start load.
Thread t = new Thread(new Runnable() {
@Override public void run() {
boolean state = false;
while (!done.get()) {
try {
if (state)
cli.cluster().enableWal(CACHE_NAME);
else
cli.cluster().disableWal(CACHE_NAME);
}
catch (IgniteException e) {
String msg = e.getMessage();
assert msg.startsWith("Cache doesn't exist") ||
msg.startsWith("Failed to change WAL mode because some caches no longer exist") :
e.getMessage();
}
finally {
state = !state;
}
}
latch.countDown();
}
});
t.setName("wal-load-" + cli.name());
t.start();
// Now perform multiple client reconnects.
for (int i = 1; i <= 20; i++) {
Thread.sleep(ThreadLocalRandom.current().nextLong(200, 1000));
srv.destroyCache(CACHE_NAME);
Thread.sleep(100);
srv.createCache(cacheConfig(PARTITIONED));
X.println(">>> Finished iteration: " + i);
}
done.set(true);
latch.await();
}
/**
* Test that concurrent enable/disable events doesn't leave to hangs.
*
* @throws Exception If failed.
*/
public void testConcurrentOperations() throws Exception {
final Ignite srv1 = startGrid(config(SRV_1, false, false));
final Ignite srv2 = startGrid(config(SRV_2, false, false));
final Ignite srv3 = startGrid(config(SRV_3, false, true));
final Ignite cli = startGrid(config(CLI, true, false));
final Ignite cacheCli = startGrid(config(CLI_2, true, false));
cacheCli.cluster().active(true);
final IgniteCache cache = cacheCli.getOrCreateCache(cacheConfig(PARTITIONED));
for (int i = 1; i <= 3; i++) {
// Start pushing requests.
Collection<Ignite> walNodes = new ArrayList<>();
walNodes.add(srv1);
walNodes.add(srv2);
walNodes.add(srv3);
walNodes.add(cli);
final AtomicBoolean done = new AtomicBoolean();
final CountDownLatch latch = new CountDownLatch(5);
for (Ignite node : walNodes) {
final Ignite node0 = node;
Thread t = new Thread(new Runnable() {
@Override public void run() {
checkConcurrentOperations(done, node0);
latch.countDown();
}
});
t.setName("wal-load-" + node0.name());
t.start();
}
// Do some cache loading in the mean time.
Thread t = new Thread(new Runnable() {
@Override public void run() {
int i = 0;
while (!done.get())
cache.put(i, i++);
latch.countDown();
}
});
t.setName("cache-load");
t.start();
Thread.sleep(20_000);
done.set(true);
X.println(">>> Stopping iteration: " + i);
latch.await();
X.println(">>> Iteration finished: " + i);
}
}
/**
* Check concurrent operations.
*
* @param done Done flag.
* @param node Node.
*/
private static void checkConcurrentOperations(AtomicBoolean done, Ignite node) {
ThreadLocalRandom rnd = ThreadLocalRandom.current();
boolean state = rnd.nextBoolean();
while (!done.get()) {
if (state)
node.cluster().enableWal(CACHE_NAME);
else
node.cluster().disableWal(CACHE_NAME);
state = !state;
}
try {
Thread.sleep(rnd.nextLong(200, 1000));
}
catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
|
|
package hudson.plugins.newgenserversstatus;
import hudson.Extension;
import hudson.model.*;
import org.apache.commons.lang.StringUtils;
import org.kohsuke.stapler.export.Exported;
import org.kohsuke.stapler.export.ExportedBean;
import javax.servlet.jsp.jstl.core.LoopTagStatus;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Status Monitor, shows the configured Jobs in a single screen overview
*
* @author astasauskas
*/
@ExportedBean (defaultVisibility = 999)
@Extension
public class MonitorAction implements RootAction {
private static final long serialVersionUID = 1L;
private static final int COLUMNS = 1;
public String getDisplayName() {
// The Name on the Dashboard
return "Servers Status";
}
public String getIconFileName() {
return MonitorDescriptor.ACTION_LOGO_MEDIUM;
}
public String getUrlName() {
// The name of the URL path segment
return "/newgenservers";
}
/**
* @return list projects that will be displayed
*/
private List<AbstractProject> getProjects() {
List<AbstractProject> result = new ArrayList<AbstractProject>();
List<TopLevelItem> topLevelItems = Hudson.getInstance().getItems();
for (TopLevelItem topLevelItem : topLevelItems) {
if (topLevelItem instanceof AbstractProject) {
AbstractProject abstractProject = (AbstractProject) topLevelItem;
if (abstractProject.getPublishersList().get(MonitorPublisher.DESCRIPTOR) != null) {
result.add(abstractProject);
}
}
}
return result;
}
public String getResult(AbstractProject project) {
String result;
if ((project.getLastCompletedBuild() != null) && (project.getLastCompletedBuild().getResult() != null)) {
if (project.isDisabled()) {
result = "DISABLED";
}
else {
result = project.getLastCompletedBuild().getResult().toString();
}
}
else {
result = "NOT_BUILD";
}
return result;
}
// private int getRows() {
// int size = getProjects().size();
// if (size <= 3) {
// return size;
// }
// return ((size % COLUMNS) == 0) ? (size / COLUMNS) : ((size + 1) / COLUMNS);
// }
/**
* Find out the server
* For projects are named according to default regexp or entered, name is parsed against the regex string
* For projects where server host and port are defined, they are taken to use
*
* @param project
*/
@Exported
public ServerIdentifier findServer(AbstractProject project) {
if (project.getName() == null) {
return new ServerIdentifier("unknown", "????", "");
}
if (!StringUtils.isEmpty(getHostFromProject(project)) && !StringUtils.isEmpty(getPortFromProject(project))) {
return new ServerIdentifier(getHostFromProject(project), getPortFromProject(project), getAppNameFromProject(project));
} else if (!StringUtils.isEmpty(getRegexpFromProject(project))) {
Pattern pattern = Pattern.compile(getRegexpFromProject(project));
Matcher matcher = pattern.matcher(project.getName());
if (matcher.matches()) {
return new ServerIdentifier(matcher.group(1), matcher.group(2), getAppNameFromProject(project));
} else {
return new ServerIdentifier("unknown", "????", "");
}
} else {
return new ServerIdentifier("unknown", "????", "");
}
}
private String getHostFromProject(AbstractProject project) {
return ((MonitorPublisher)project.getPublishersList().get(MonitorPublisher.DESCRIPTOR)).getServerHost();
}
private String getPortFromProject(AbstractProject project) {
return ((MonitorPublisher)project.getPublishersList().get(MonitorPublisher.DESCRIPTOR)).getServerPort();
}
private String getRegexpFromProject(AbstractProject project) {
return ((MonitorPublisher)project.getPublishersList().get(MonitorPublisher.DESCRIPTOR)).getRegexpPattern();
}
private String getAppNameFromProject(AbstractProject project) {
String appName = ((MonitorPublisher)project.getPublishersList().get(MonitorPublisher.DESCRIPTOR)).getServerAppName();
if (appName == null) {
appName = "";
}
return appName;
}
/**
* Get column count
*
* @return
*/
private int getRows() {
List<AbstractProject> projects = getProjects();
List<ServerIdentifier> servers = extractServersList(projects);
return servers.size();
//return 2;
}
/**
* Get column count
*
* @return
*/
private int getProjectsInServerCount(AbstractProject project) {
List<AbstractProject> projects = getProjects();
List<ServerIdentifier> servers = extractServersList(projects);
for (ServerIdentifier server: servers) {
if (server.compareTo(findServer(project)) == 0) {
return server.getProjectCount();
}
}
return -1;
}
private List<ServerIdentifier> extractServersList(List<AbstractProject> projects) {
List<ServerIdentifier> servers = new ArrayList<ServerIdentifier>();
for (AbstractProject project: projects) {
ServerIdentifier server = findServer(project);
ServerIdentifier found = null;
for (ServerIdentifier existingServer: servers) {
if (server.compareTo(existingServer) == 0) {
found = existingServer;
break;
}
}
if (found == null) {
server.setNumber(servers.size());
servers.add(server);
} else {
found.setProjectCount(found.getProjectCount() + 1);
}
}
return servers;
}
@Exported
public double getRowsHeight() {
return 100 / new Double(getRows());
}
// @Exported
// public AbstractProject[][] getProjectsArray() {
// int rows = getRows();
// AbstractProject[][] result = new AbstractProject[rows][];
// List<AbstractProject> projects = getProjects();
// for (int i = 0; i < rows; i++) {
// AbstractProject[] row = result[i];
// if (row == null) {
// if (projects.size() <= 3) {
// row = new AbstractProject[1];
// row[0] = projects.get(i);
// //row[0].setDescription(description)
// }
// else {
// // last row and uneven
// if (((i + 1) == rows) && ((projects.size() % 2) != 0)) {
// row = new AbstractProject[1];
// row[0] = projects.get(i * COLUMNS);
// }
// else {
// row = new AbstractProject[COLUMNS];
// for (int j = 0; j < COLUMNS; j++) {
// row[j] = projects.get((i * COLUMNS) + j);
// }
// }
// }
// result[i] = row;
// }
// }
// return result;
// }
@Exported
public AbstractProject[][] getProjectsArray() {
int rows = getRows();
AbstractProject[][] result = new AbstractProject[rows][];
List<AbstractProject> projects = getProjects();
List<ServerIdentifier> servers = extractServersList(projects);
for (int i = 0; i < servers.size(); i++) {
AbstractProject[] row = result[i];
if (row == null) {
/*if (projects.size() <= 3) {
row = new AbstractProject[1];
row[0] = projects.get(i);
}
else {
// last row and uneven
if (((i + 1) == rows) && ((projects.size() % 2) != 0)) {
row = new AbstractProject[1];
row[0] = projects.get(i * COLUMNS);
}
else {
row = new AbstractProject[COLUMNS];
for (int j = 0; j < COLUMNS; j++) {
row[j] = projects.get((i * COLUMNS) + j);
}
}
}*/
int projectsInServer = getProjectsInServerCount(projects.get(i));
row = new AbstractProject[projectsInServer];
//row = new AbstractProject[1];
int count = 0;
for (AbstractProject project: projects) {
if (findServer(project).compareTo(servers.get(i)) == 0) {
row[count] = project;
count++;
}
}
result[i] = row;
}
}
return result;
}
@Exported
public int getStyleId(LoopTagStatus varStatus, AbstractProject[][] projectsArray) {
boolean lastLine = varStatus.isLast() && (projectsArray.length > 1) && (projectsArray[projectsArray.length - 1].length == 1);
boolean oneDimenional = (projectsArray[0].length == 1);
if (oneDimenional || lastLine) {
return 1;
}
return 2;
}
public static class ServerIdentifier implements Comparable<ServerIdentifier> {
private String host;
private String port;
private String appName;
private int number;
private int projectCount;
public ServerIdentifier(String host, String port, String appName) {
this.host = host;
this.port = port;
this.appName = appName;
this.projectCount = 1;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public String getPort() {
return port;
}
public void setPort(String port) {
this.port = port;
}
public int getNumber() {
return number;
}
public void setNumber(int number) {
this.number = number;
}
public int getProjectCount() {
return projectCount;
}
public void setProjectCount(int projectCount) {
this.projectCount = projectCount;
}
public String getAppName() {
return appName;
}
public void setAppName(String appName) {
this.appName = appName;
}
public int compareTo(ServerIdentifier other) {
if (other.getHost().equals(getHost()) &&
other.getPort().equals(getPort()) &&
other.getAppName().equals(getAppName())) {
return 0;
}
return -1;
}
}
}
|
|
/**
* Copyright (c) 2016 - 2018 Syncleus, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
Copyright (c) 2010-2011, Advanced Micro Devices, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
following conditions are met:
Redistributions of source code must retain the above copyright notice, this list of conditions and the following
disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided with the distribution.
Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
If you use the software (in whole or in part), you shall adhere to all applicable U.S., European, and other export
laws, including but not limited to the U.S. Export Administration Regulations ("EAR"), (15 C.F.R. Sections 730 through
774), and E.U. Council Regulation (EC) No 1334/2000 of 22 June 2000. Further, pursuant to Section 740.6 of the EAR,
you hereby certify that, except pursuant to a license granted by the United States Department of Commerce Bureau of
Industry and Security or as otherwise permitted pursuant to a License Exception under the U.S. Export Administration
Regulations ("EAR"), you will not (1) export, re-export or release to a national of a country in Country Groups D:1,
E:1 or E:2 any restricted technology, software, or source code you receive hereunder, or (2) export to Country Groups
D:1, E:1 or E:2 the direct product of such technology or software, if such foreign produced direct product is subject
to national security controls as identified on the Commerce Control List (currently found in Supplement 1 to Part 774
of EAR). For the most current Country Group listings, or for additional information about the EAR or your obligations
under those regulations, please refer to the U.S. Bureau of Industry and Security's website at http://www.bis.doc.gov/.
*/
package com.aparapi.codegen;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Graphics;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTextPane;
import javax.swing.UIManager;
import javax.swing.UnsupportedLookAndFeelException;
import javax.swing.text.BadLocationException;
import javax.swing.text.DefaultStyledDocument;
import javax.swing.text.Style;
import javax.swing.text.StyleConstants;
import javax.swing.text.StyleContext;
public class SwingDiff{
JFrame frame;
public SwingDiff(Diff.DiffResult result) {
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
frame = new JFrame("SwingDiff");
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
JPanel panel = new JPanel(){
@Override public void paint(Graphics g) {
super.paint(g);
g.drawRect(10, 10, 100, 100);
}
};
panel.setLayout(new BorderLayout());
StyleContext sc = new StyleContext();
// Create and add the style
final Style rootStyle = sc.addStyle("Root", null);
rootStyle.addAttribute(StyleConstants.Foreground, Color.black);
rootStyle.addAttribute(StyleConstants.FontSize, new Integer(12));
rootStyle.addAttribute(StyleConstants.FontFamily, "serif");
rootStyle.addAttribute(StyleConstants.Bold, new Boolean(false));
final Style heading1Style = sc.addStyle("Heading1", rootStyle);
heading1Style.addAttribute(StyleConstants.Foreground, Color.blue);
final Style heading2Style = sc.addStyle("Heading2", rootStyle);
heading2Style.addAttribute(StyleConstants.Foreground, Color.red);
heading2Style.addAttribute(StyleConstants.Background, Color.green);
final DefaultStyledDocument lhsdoc = new DefaultStyledDocument(sc);
JTextPane lhs = new JTextPane(lhsdoc);
lhsdoc.insertString(0, arrayToString(result.getLhs()), null);
// Finally, apply the style to the heading
lhsdoc.setParagraphAttributes(4, 1, heading2Style, false);
lhsdoc.setParagraphAttributes(20, 5, heading1Style, false);
lhs.setPreferredSize(new Dimension(800, 800));
final DefaultStyledDocument rhsdoc = new DefaultStyledDocument(sc);
JTextPane rhs = new JTextPane(rhsdoc);
rhsdoc.insertString(0, arrayToString(result.getRhs()), null);
rhsdoc.setParagraphAttributes(4, 1, heading2Style, false);
rhsdoc.setParagraphAttributes(20, 5, heading1Style, false);
rhs.setPreferredSize(new Dimension(800, 800));
panel.add(new JScrollPane(lhs), BorderLayout.WEST);
panel.add(new JScrollPane(rhs), BorderLayout.EAST);
// frame.setBackground(background);
frame.getContentPane().add(panel);
frame.pack();
frame.setVisible(true);
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InstantiationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IllegalAccessException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (UnsupportedLookAndFeelException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (BadLocationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public static void main(String[] args) {
String[] lhs = getFileContents("expected.c");
String[] rhs = getFileContents("actual.c");
Diff.DiffResult result = Diff.diff(lhs, rhs);
System.out.println(result);
SwingDiff swingDiff = new SwingDiff(result);
}
private static String arrayToString(String[] array) {
StringBuilder stringBuilder = new StringBuilder();
for (String line : array) {
stringBuilder.append(line).append("\n");
}
return (stringBuilder.toString().trim());
}
private static String[] getFileContents(String string) {
String[] content = null;
try {
BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(string)));
List<String> lines = new ArrayList<String>();
for (String line = reader.readLine(); line != null; line = reader.readLine()) {
lines.add(line);
}
reader.close();
content = lines.toArray(new String[0]);
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return (content);
}
private static String getFileContent(String string) {
String content = null;
try {
BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(string)));
StringBuilder sb = new StringBuilder();
for (String line = reader.readLine(); line != null; line = reader.readLine()) {
sb.append(line).append("\n");
}
reader.close();
content = sb.toString();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return (content);
}
}
|
|
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class Main {
/// CHECK-START: void Main.staticNop() inliner (before)
/// CHECK: InvokeStaticOrDirect
/// CHECK-START: void Main.staticNop() inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
public static void staticNop() {
Second.staticNop(11);
}
/// CHECK-START: void Main.nop(Second) inliner (before)
/// CHECK: InvokeStaticOrDirect
/// CHECK-START: void Main.nop(Second) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
public static void nop(Second s) {
s.nop();
}
/// CHECK-START: java.lang.Object Main.staticReturnArg2(java.lang.String) inliner (before)
/// CHECK-DAG: <<Value:l\d+>> ParameterValue
/// CHECK-DAG: <<Ignored:i\d+>> IntConstant 77
/// CHECK-DAG: <<ClinitCk:l\d+>> ClinitCheck
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: <<Invoke:l\d+>> InvokeStaticOrDirect [<<Ignored>>,<<Value>>{{(,[ij]\d+)?}},<<ClinitCk>>]
/// CHECK-DAG: Return [<<Invoke>>]
/// CHECK-START: java.lang.Object Main.staticReturnArg2(java.lang.String) inliner (after)
/// CHECK-DAG: <<Value:l\d+>> ParameterValue
/// CHECK-DAG: Return [<<Value>>]
/// CHECK-START: java.lang.Object Main.staticReturnArg2(java.lang.String) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
public static Object staticReturnArg2(String value) {
return Second.staticReturnArg2(77, value);
}
/// CHECK-START: long Main.returnArg1(Second, long) inliner (before)
/// CHECK-DAG: <<Second:l\d+>> ParameterValue
/// CHECK-DAG: <<Value:j\d+>> ParameterValue
/// CHECK-DAG: <<NullCk:l\d+>> NullCheck [<<Second>>]
/// CHECK-DAG: <<Invoke:j\d+>> InvokeStaticOrDirect [<<NullCk>>,<<Value>>]
/// CHECK-DAG: Return [<<Invoke>>]
/// CHECK-START: long Main.returnArg1(Second, long) inliner (after)
/// CHECK-DAG: <<Value:j\d+>> ParameterValue
/// CHECK-DAG: Return [<<Value>>]
/// CHECK-START: long Main.returnArg1(Second, long) inliner (after)
/// CHECK-NOT: InvokeVirtual
public static long returnArg1(Second s, long value) {
return s.returnArg1(value);
}
/// CHECK-START: int Main.staticReturn9() inliner (before)
/// CHECK: {{i\d+}} InvokeStaticOrDirect
/// CHECK-START: int Main.staticReturn9() inliner (before)
/// CHECK-NOT: IntConstant 9
/// CHECK-START: int Main.staticReturn9() inliner (after)
/// CHECK-DAG: <<Const9:i\d+>> IntConstant 9
/// CHECK-DAG: Return [<<Const9>>]
/// CHECK-START: int Main.staticReturn9() inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
public static int staticReturn9() {
return Second.staticReturn9();
}
/// CHECK-START: int Main.return7(Second) inliner (before)
/// CHECK: {{i\d+}} InvokeStaticOrDirect
/// CHECK-START: int Main.return7(Second) inliner (before)
/// CHECK-NOT: IntConstant 7
/// CHECK-START: int Main.return7(Second) inliner (after)
/// CHECK-DAG: <<Const7:i\d+>> IntConstant 7
/// CHECK-DAG: Return [<<Const7>>]
/// CHECK-START: int Main.return7(Second) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
public static int return7(Second s) {
return s.return7(null);
}
/// CHECK-START: java.lang.String Main.staticReturnNull() inliner (before)
/// CHECK: {{l\d+}} InvokeStaticOrDirect
/// CHECK-START: java.lang.String Main.staticReturnNull() inliner (before)
/// CHECK-NOT: NullConstant
/// CHECK-START: java.lang.String Main.staticReturnNull() inliner (after)
/// CHECK-DAG: <<Null:l\d+>> NullConstant
/// CHECK-DAG: Return [<<Null>>]
/// CHECK-START: java.lang.String Main.staticReturnNull() inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
public static String staticReturnNull() {
return Second.staticReturnNull();
}
/// CHECK-START: java.lang.Object Main.returnNull(Second) inliner (before)
/// CHECK: {{l\d+}} InvokeStaticOrDirect
/// CHECK-START: java.lang.Object Main.returnNull(Second) inliner (before)
/// CHECK-NOT: NullConstant
/// CHECK-START: java.lang.Object Main.returnNull(Second) inliner (after)
/// CHECK-DAG: <<Null:l\d+>> NullConstant
/// CHECK-DAG: Return [<<Null>>]
/// CHECK-START: java.lang.Object Main.returnNull(Second) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
public static Object returnNull(Second s) {
return s.returnNull();
}
/// CHECK-START: int Main.getInt(Second) inliner (before)
/// CHECK: {{i\d+}} InvokeStaticOrDirect
/// CHECK-START: int Main.getInt(Second) inliner (after)
/// CHECK: {{i\d+}} InstanceFieldGet
/// CHECK-START: int Main.getInt(Second) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
public static int getInt(Second s) {
return s.getInstanceIntField();
}
/// CHECK-START: double Main.getDouble(Second) inliner (before)
/// CHECK: {{d\d+}} InvokeStaticOrDirect
/// CHECK-START: double Main.getDouble(Second) inliner (after)
/// CHECK: {{d\d+}} InstanceFieldGet
/// CHECK-START: double Main.getDouble(Second) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
public static double getDouble(Second s) {
return s.getInstanceDoubleField(22);
}
/// CHECK-START: java.lang.Object Main.getObject(Second) inliner (before)
/// CHECK: {{l\d+}} InvokeStaticOrDirect
/// CHECK-START: java.lang.Object Main.getObject(Second) inliner (after)
/// CHECK: {{l\d+}} InstanceFieldGet
/// CHECK-START: java.lang.Object Main.getObject(Second) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
public static Object getObject(Second s) {
return s.getInstanceObjectField(-1L);
}
/// CHECK-START: java.lang.String Main.getString(Second) inliner (before)
/// CHECK: {{l\d+}} InvokeStaticOrDirect
/// CHECK-START: java.lang.String Main.getString(Second) inliner (after)
/// CHECK: {{l\d+}} InstanceFieldGet
/// CHECK-START: java.lang.String Main.getString(Second) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
public static String getString(Second s) {
return s.getInstanceStringField(null, "whatever", 1234L);
}
/// CHECK-START: int Main.staticGetInt(Second) inliner (before)
/// CHECK: {{i\d+}} InvokeStaticOrDirect
/// CHECK-START: int Main.staticGetInt(Second) inliner (after)
/// CHECK: {{i\d+}} InvokeStaticOrDirect
/// CHECK-START: int Main.staticGetInt(Second) inliner (after)
/// CHECK-NOT: InstanceFieldGet
public static int staticGetInt(Second s) {
return Second.staticGetInstanceIntField(s);
}
/// CHECK-START: double Main.getDoubleFromParam(Second) inliner (before)
/// CHECK: {{d\d+}} InvokeStaticOrDirect
/// CHECK-START: double Main.getDoubleFromParam(Second) inliner (after)
/// CHECK: {{d\d+}} InvokeStaticOrDirect
/// CHECK-START: double Main.getDoubleFromParam(Second) inliner (after)
/// CHECK-NOT: InstanceFieldGet
public static double getDoubleFromParam(Second s) {
return s.getInstanceDoubleFieldFromParam(s);
}
/// CHECK-START: int Main.getStaticInt(Second) inliner (before)
/// CHECK: {{i\d+}} InvokeStaticOrDirect
/// CHECK-START: int Main.getStaticInt(Second) inliner (after)
/// CHECK: {{i\d+}} InvokeStaticOrDirect
/// CHECK-START: int Main.getStaticInt(Second) inliner (after)
/// CHECK-NOT: InstanceFieldGet
/// CHECK-NOT: StaticFieldGet
public static int getStaticInt(Second s) {
return s.getStaticIntField();
}
/// CHECK-START: long Main.setLong(Second, long) inliner (before)
/// CHECK: InvokeStaticOrDirect
/// CHECK-START: long Main.setLong(Second, long) inliner (after)
/// CHECK: InstanceFieldSet
/// CHECK-START: long Main.setLong(Second, long) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
public static long setLong(Second s, long value) {
s.setInstanceLongField(-1, value);
return s.instanceLongField;
}
/// CHECK-START: long Main.setLongReturnArg2(Second, long, int) inliner (before)
/// CHECK: InvokeStaticOrDirect
/// CHECK-START: long Main.setLongReturnArg2(Second, long, int) inliner (after)
/// CHECK-DAG: <<Second:l\d+>> ParameterValue
/// CHECK-DAG: <<Value:j\d+>> ParameterValue
/// CHECK-DAG: <<Arg2:i\d+>> ParameterValue
/// CHECK-DAG: <<NullCk:l\d+>> NullCheck [<<Second>>]
/// CHECK-DAG: InstanceFieldSet [<<NullCk>>,<<Value>>]
/// CHECK-DAG: <<NullCk2:l\d+>> NullCheck [<<Second>>]
/// CHECK-DAG: <<IGet:j\d+>> InstanceFieldGet [<<NullCk2>>]
/// CHECK-DAG: <<Conv:j\d+>> TypeConversion [<<Arg2>>]
/// CHECK-DAG: <<Add:j\d+>> Add [<<IGet>>,<<Conv>>]
/// CHECK-DAG: Return [<<Add>>]
/// CHECK-START: long Main.setLongReturnArg2(Second, long, int) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
public static long setLongReturnArg2(Second s, long value, int arg2) {
int result = s.setInstanceLongFieldReturnArg2(value, arg2);
return s.instanceLongField + result;
}
/// CHECK-START: long Main.staticSetLong(Second, long) inliner (before)
/// CHECK: InvokeStaticOrDirect
/// CHECK-START: long Main.staticSetLong(Second, long) inliner (after)
/// CHECK: InvokeStaticOrDirect
/// CHECK-START: long Main.staticSetLong(Second, long) inliner (after)
/// CHECK-NOT: InstanceFieldSet
public static long staticSetLong(Second s, long value) {
Second.staticSetInstanceLongField(s, value);
return s.instanceLongField;
}
/// CHECK-START: long Main.setLongThroughParam(Second, long) inliner (before)
/// CHECK: InvokeStaticOrDirect
/// CHECK-START: long Main.setLongThroughParam(Second, long) inliner (after)
/// CHECK: InvokeStaticOrDirect
/// CHECK-START: long Main.setLongThroughParam(Second, long) inliner (after)
/// CHECK-NOT: InstanceFieldSet
public static long setLongThroughParam(Second s, long value) {
s.setInstanceLongFieldThroughParam(s, value);
return s.instanceLongField;
}
/// CHECK-START: float Main.setStaticFloat(Second, float) inliner (before)
/// CHECK: InvokeStaticOrDirect
/// CHECK-START: float Main.setStaticFloat(Second, float) inliner (after)
/// CHECK: InvokeStaticOrDirect
/// CHECK-START: float Main.setStaticFloat(Second, float) inliner (after)
/// CHECK-NOT: InstanceFieldSet
/// CHECK-NOT: StaticFieldSet
public static float setStaticFloat(Second s, float value) {
s.setStaticFloatField(value);
return s.staticFloatField;
}
/// CHECK-START: java.lang.Object Main.newObject() inliner (before)
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>{{(,[ij]\d+)?}}] method_name:java.lang.Object.<init>
/// CHECK-START: java.lang.Object Main.newObject() inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
public static Object newObject() {
return new Object();
}
/// CHECK-START: double Main.constructBase() inliner (before)
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>{{(,[ij]\d+)?}}] method_name:Base.<init>
/// CHECK-START: double Main.constructBase() inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-NOT: InstanceFieldSet
public static double constructBase() {
Base b = new Base();
return b.intField + b.doubleField;
}
/// CHECK-START: double Main.constructBase(int) inliner (before)
/// CHECK-DAG: <<Value:i\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<Value>>{{(,[ij]\d+)?}}] method_name:Base.<init>
/// CHECK-START: double Main.constructBase(int) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-START: double Main.constructBase(int) inliner (after)
/// CHECK-DAG: <<Value:i\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
/// CHECK-DAG: InstanceFieldSet [<<Obj>>,<<Value>>]
/// CHECK-START: double Main.constructBase(int) inliner (after)
/// CHECK-DAG: InstanceFieldSet
/// CHECK-NOT: InstanceFieldSet
public static double constructBase(int intValue) {
Base b = new Base(intValue);
return b.intField + b.doubleField;
}
/// CHECK-START: double Main.constructBaseWith0() inliner (before)
/// CHECK-DAG: <<Value:i\d+>> IntConstant 0
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<Value>>{{(,[ij]\d+)?}}] method_name:Base.<init>
/// CHECK-START: double Main.constructBaseWith0() inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-NOT: InstanceFieldSet
public static double constructBaseWith0() {
Base b = new Base(0);
return b.intField + b.doubleField;
}
/// CHECK-START: java.lang.String Main.constructBase(java.lang.String) inliner (before)
/// CHECK-DAG: <<Value:l\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<Value>>{{(,[ij]\d+)?}}] method_name:Base.<init>
/// CHECK-START: java.lang.String Main.constructBase(java.lang.String) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-START: java.lang.String Main.constructBase(java.lang.String) inliner (after)
/// CHECK-DAG: <<Value:l\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
/// CHECK-DAG: InstanceFieldSet [<<Obj>>,<<Value>>]
/// CHECK-START: java.lang.String Main.constructBase(java.lang.String) inliner (after)
/// CHECK-DAG: InstanceFieldSet
/// CHECK-NOT: InstanceFieldSet
public static String constructBase(String stringValue) {
Base b = new Base(stringValue);
return b.stringField;
}
/// CHECK-START: java.lang.String Main.constructBaseWithNullString() inliner (before)
/// CHECK-DAG: <<Null:l\d+>> NullConstant
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<Null>>{{(,[ij]\d+)?}}] method_name:Base.<init>
/// CHECK-START: java.lang.String Main.constructBaseWithNullString() inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-START: java.lang.String Main.constructBaseWithNullString() inliner (after)
/// CHECK-NOT: InstanceFieldSet
public static String constructBaseWithNullString() {
String stringValue = null;
Base b = new Base(stringValue);
return b.stringField;
}
/// CHECK-START: double Main.constructBase(double, java.lang.Object) inliner (before)
/// CHECK-DAG: <<DValue:d\d+>> ParameterValue
/// CHECK-DAG: <<OValue:l\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<DValue>>,<<OValue>>{{(,[ij]\d+)?}}] method_name:Base.<init>
/// CHECK-START: double Main.constructBase(double, java.lang.Object) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-START: double Main.constructBase(double, java.lang.Object) inliner (after)
/// CHECK-DAG: <<DValue:d\d+>> ParameterValue
/// CHECK-DAG: <<OValue:l\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
/// CHECK-DAG: InstanceFieldSet [<<Obj>>,<<DValue>>]
/// CHECK-DAG: InstanceFieldSet [<<Obj>>,<<OValue>>]
/// CHECK-START: double Main.constructBase(double, java.lang.Object) inliner (after)
/// CHECK-DAG: InstanceFieldSet
/// CHECK-DAG: InstanceFieldSet
/// CHECK-NOT: InstanceFieldSet
public static double constructBase(double doubleValue, Object objectValue) {
Base b = new Base(doubleValue, objectValue);
return (b.objectField != null) ? b.doubleField : -b.doubleField;
}
/// CHECK-START: double Main.constructBase(int, double, java.lang.Object) inliner (before)
/// CHECK-DAG: <<IValue:i\d+>> ParameterValue
/// CHECK-DAG: <<DValue:d\d+>> ParameterValue
/// CHECK-DAG: <<OValue:l\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<IValue>>,<<DValue>>,<<OValue>>{{(,[ij]\d+)?}}] method_name:Base.<init>
/// CHECK-START: double Main.constructBase(int, double, java.lang.Object) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-START: double Main.constructBase(int, double, java.lang.Object) inliner (after)
/// CHECK-DAG: <<IValue:i\d+>> ParameterValue
/// CHECK-DAG: <<DValue:d\d+>> ParameterValue
/// CHECK-DAG: <<OValue:l\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
/// CHECK-DAG: InstanceFieldSet [<<Obj>>,<<IValue>>]
/// CHECK-DAG: InstanceFieldSet [<<Obj>>,<<DValue>>]
/// CHECK-DAG: InstanceFieldSet [<<Obj>>,<<OValue>>]
/// CHECK-START: double Main.constructBase(int, double, java.lang.Object) inliner (after)
/// CHECK-DAG: InstanceFieldSet
/// CHECK-DAG: InstanceFieldSet
/// CHECK-DAG: InstanceFieldSet
/// CHECK-NOT: InstanceFieldSet
public static double constructBase(int intValue, double doubleValue, Object objectValue) {
Base b = new Base(intValue, doubleValue, objectValue);
double tmp = b.intField + b.doubleField;
return (b.objectField != null) ? tmp : -tmp;
}
/// CHECK-START: double Main.constructBaseWith0DoubleNull(double) inliner (before)
/// CHECK-DAG: <<IValue:i\d+>> IntConstant 0
/// CHECK-DAG: <<DValue:d\d+>> ParameterValue
/// CHECK-DAG: <<OValue:l\d+>> NullConstant
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<IValue>>,<<DValue>>,<<OValue>>{{(,[ij]\d+)?}}] method_name:Base.<init>
/// CHECK-START: double Main.constructBaseWith0DoubleNull(double) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-START: double Main.constructBaseWith0DoubleNull(double) inliner (after)
/// CHECK-DAG: <<DValue:d\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
/// CHECK-DAG: InstanceFieldSet [<<Obj>>,<<DValue>>]
/// CHECK-START: double Main.constructBaseWith0DoubleNull(double) inliner (after)
/// CHECK-DAG: InstanceFieldSet
/// CHECK-NOT: InstanceFieldSet
public static double constructBaseWith0DoubleNull(double doubleValue) {
Base b = new Base(0, doubleValue, null);
double tmp = b.intField + b.doubleField;
return (b.objectField != null) ? tmp : -tmp;
}
/// CHECK-START: double Main.constructBase(int, double, java.lang.Object, java.lang.String) inliner (before)
/// CHECK-DAG: <<IValue:i\d+>> ParameterValue
/// CHECK-DAG: <<DValue:d\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<IValue>>,<<DValue>>,{{l\d+}},{{l\d+}}{{(,[ij]\d+)?}}] method_name:Base.<init>
/// CHECK-START: double Main.constructBase(int, double, java.lang.Object, java.lang.String) inliner (after)
/// CHECK-DAG: <<IValue:i\d+>> ParameterValue
/// CHECK-DAG: <<DValue:d\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<IValue>>,<<DValue>>,{{l\d+}},{{l\d+}}{{(,[ij]\d+)?}}] method_name:Base.<init>
/// CHECK-START: double Main.constructBase(int, double, java.lang.Object, java.lang.String) inliner (after)
/// CHECK-NOT: InstanceFieldSet
public static double constructBase(
int intValue, double doubleValue, Object objectValue, String stringValue) {
Base b = new Base(intValue, doubleValue, objectValue, stringValue);
double tmp = b.intField + b.doubleField;
tmp = (b.objectField != null) ? tmp : -tmp;
return (b.stringField != null) ? 2.0 * tmp : 0.5 * tmp;
}
/// CHECK-START: double Main.constructBase(double) inliner (before)
/// CHECK-DAG: <<Value:d\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<Value>>{{(,[ij]\d+)?}}] method_name:Base.<init>
/// CHECK-START: double Main.constructBase(double) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-START: double Main.constructBase(double) inliner (after)
/// CHECK-DAG: <<Value:d\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
/// CHECK-DAG: InstanceFieldSet [<<Obj>>,<<Value>>]
/// CHECK-START: double Main.constructBase(double) inliner (after)
/// CHECK-DAG: InstanceFieldSet
/// CHECK-NOT: InstanceFieldSet
public static double constructBase(double doubleValue) {
Base b = new Base(doubleValue);
return b.intField + b.doubleField;
}
/// CHECK-START: double Main.constructBaseWith0d() inliner (before)
/// CHECK-DAG: <<Value:d\d+>> DoubleConstant
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<Value>>{{(,[ij]\d+)?}}] method_name:Base.<init>
/// CHECK-START: double Main.constructBaseWith0d() inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-NOT: InstanceFieldSet
public static double constructBaseWith0d() {
Base b = new Base(0.0);
return b.intField + b.doubleField;
}
/// CHECK-START: double Main.constructBase(java.lang.Object) inliner (before)
/// CHECK-DAG: <<OValue:l\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<OValue>>{{(,[ij]\d+)?}}] method_name:Base.<init>
/// CHECK-START: double Main.constructBase(java.lang.Object) inliner (after)
/// CHECK-DAG: <<OValue:l\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<OValue>>{{(,[ij]\d+)?}}] method_name:Base.<init>
/// CHECK-START: double Main.constructBase(java.lang.Object) inliner (after)
/// CHECK-NOT: InstanceFieldSet
public static double constructBase(Object objectValue) {
Base b = new Base(objectValue);
double tmp = b.intField + b.doubleField;
return (b.objectField != null) ? tmp + 1.0 : tmp - 1.0;
}
/// CHECK-START: double Main.constructBase(int, long) inliner (before)
/// CHECK-DAG: <<IValue:i\d+>> ParameterValue
/// CHECK-DAG: <<JValue:j\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<IValue>>,<<JValue>>{{(,[ij]\d+)?}}] method_name:Base.<init>
/// CHECK-START: double Main.constructBase(int, long) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-START: double Main.constructBase(int, long) inliner (after)
/// CHECK-DAG: <<IValue:i\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
/// CHECK-DAG: InstanceFieldSet [<<Obj>>,<<IValue>>]
/// CHECK-START: double Main.constructBase(int, long) inliner (after)
/// CHECK-DAG: InstanceFieldSet
/// CHECK-NOT: InstanceFieldSet
public static double constructBase(int intValue, long dummy) {
Base b = new Base(intValue, dummy);
return b.intField + b.doubleField;
}
/// CHECK-START: double Main.constructDerived() inliner (before)
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>{{(,[ij]\d+)?}}] method_name:Derived.<init>
/// CHECK-START: double Main.constructDerived() inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-NOT: InstanceFieldSet
public static double constructDerived() {
Derived d = new Derived();
return d.intField + d.doubleField;
}
/// CHECK-START: double Main.constructDerived(int) inliner (before)
/// CHECK-DAG: <<Value:i\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<Value>>{{(,[ij]\d+)?}}] method_name:Derived.<init>
/// CHECK-START: double Main.constructDerived(int) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-START: double Main.constructDerived(int) inliner (after)
/// CHECK-DAG: <<Value:i\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
/// CHECK-DAG: InstanceFieldSet [<<Obj>>,<<Value>>]
/// CHECK-START: double Main.constructDerived(int) inliner (after)
/// CHECK-DAG: InstanceFieldSet
/// CHECK-NOT: InstanceFieldSet
public static double constructDerived(int intValue) {
Derived d = new Derived(intValue);
return d.intField + d.doubleField;
}
/// CHECK-START: double Main.constructDerivedWith0() inliner (before)
/// CHECK-DAG: <<Value:i\d+>> IntConstant 0
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<Value>>{{(,[ij]\d+)?}}] method_name:Derived.<init>
/// CHECK-START: double Main.constructDerivedWith0() inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-NOT: InstanceFieldSet
public static double constructDerivedWith0() {
Derived d = new Derived(0);
return d.intField + d.doubleField;
}
/// CHECK-START: java.lang.String Main.constructDerived(java.lang.String) inliner (before)
/// CHECK-DAG: <<Value:l\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<Value>>{{(,[ij]\d+)?}}] method_name:Derived.<init>
/// CHECK-START: java.lang.String Main.constructDerived(java.lang.String) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-START: java.lang.String Main.constructDerived(java.lang.String) inliner (after)
/// CHECK-NOT: InstanceFieldSet
public static String constructDerived(String stringValue) {
Derived d = new Derived(stringValue);
return d.stringField;
}
/// CHECK-START: double Main.constructDerived(double) inliner (before)
/// CHECK-DAG: <<Value:d\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<Value>>{{(,[ij]\d+)?}}] method_name:Derived.<init>
/// CHECK-START: double Main.constructDerived(double) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-START: double Main.constructDerived(double) inliner (after)
/// CHECK-DAG: <<Value:d\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
/// CHECK-DAG: InstanceFieldSet [<<Obj>>,<<Value>>]
/// CHECK-START: double Main.constructDerived(double) inliner (after)
/// CHECK-DAG: InstanceFieldSet
/// CHECK-NOT: InstanceFieldSet
public static double constructDerived(double doubleValue) {
Derived d = new Derived(doubleValue);
return d.intField + d.doubleField;
}
/// CHECK-START: double Main.constructDerivedWith0d() inliner (before)
/// CHECK-DAG: <<Value:d\d+>> DoubleConstant
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<Value>>{{(,[ij]\d+)?}}] method_name:Derived.<init>
/// CHECK-START: double Main.constructDerivedWith0d() inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-NOT: InstanceFieldSet
public static double constructDerivedWith0d() {
Derived d = new Derived(0.0);
return d.intField + d.doubleField;
}
/// CHECK-START: double Main.constructDerived(int, double, java.lang.Object) inliner (before)
/// CHECK-DAG: <<IValue:i\d+>> ParameterValue
/// CHECK-DAG: <<DValue:d\d+>> ParameterValue
/// CHECK-DAG: <<OValue:l\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<IValue>>,<<DValue>>,<<OValue>>{{(,[ij]\d+)?}}] method_name:Derived.<init>
/// CHECK-START: double Main.constructDerived(int, double, java.lang.Object) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-START: double Main.constructDerived(int, double, java.lang.Object) inliner (after)
/// CHECK-DAG: <<DValue:d\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
/// CHECK-DAG: InstanceFieldSet [<<Obj>>,<<DValue>>]
/// CHECK-START: double Main.constructDerived(int, double, java.lang.Object) inliner (after)
/// CHECK-DAG: InstanceFieldSet
/// CHECK-NOT: InstanceFieldSet
public static double constructDerived(int intValue, double doubleValue, Object objectValue) {
Derived d = new Derived(intValue, doubleValue, objectValue);
double tmp = d.intField + d.doubleField;
return (d.objectField != null) ? tmp : -tmp;
}
/// CHECK-START: double Main.constructDerived(int, double, java.lang.Object, java.lang.String) inliner (before)
/// CHECK-DAG: <<IValue:i\d+>> ParameterValue
/// CHECK-DAG: <<DValue:d\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<IValue>>,<<DValue>>,{{l\d+}},{{l\d+}}{{(,[ij]\d+)?}}] method_name:Derived.<init>
/// CHECK-START: double Main.constructDerived(int, double, java.lang.Object, java.lang.String) inliner (after)
/// CHECK-DAG: <<IValue:i\d+>> ParameterValue
/// CHECK-DAG: <<DValue:d\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<IValue>>,<<DValue>>,{{l\d+}},{{l\d+}}{{(,[ij]\d+)?}}] method_name:Derived.<init>
/// CHECK-START: double Main.constructDerived(int, double, java.lang.Object, java.lang.String) inliner (after)
/// CHECK-NOT: InstanceFieldSet
public static double constructDerived(
int intValue, double doubleValue, Object objectValue, String stringValue) {
Derived d = new Derived(intValue, doubleValue, objectValue, stringValue);
double tmp = d.intField + d.doubleField;
tmp = (d.objectField != null) ? tmp : -tmp;
return (d.stringField != null) ? 2.0 * tmp : 0.5 * tmp;
}
/// CHECK-START: double Main.constructDerived(float) inliner (before)
/// CHECK-DAG: <<Value:f\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<Value>>{{(,[ij]\d+)?}}] method_name:Derived.<init>
/// CHECK-START: double Main.constructDerived(float) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-START: double Main.constructDerived(float) inliner (after)
/// CHECK-DAG: <<Value:f\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
/// CHECK-DAG: InstanceFieldSet [<<Obj>>,<<Value>>]
/// CHECK-START: double Main.constructDerived(float) inliner (after)
/// CHECK-DAG: InstanceFieldSet
/// CHECK-NOT: InstanceFieldSet
public static double constructDerived(float floatValue) {
Derived d = new Derived(floatValue);
return d.intField + d.doubleField + d.floatField;
}
/// CHECK-START: double Main.constructDerived(int, double, java.lang.Object, float) inliner (before)
/// CHECK-DAG: <<IValue:i\d+>> ParameterValue
/// CHECK-DAG: <<DValue:d\d+>> ParameterValue
/// CHECK-DAG: <<OValue:l\d+>> ParameterValue
/// CHECK-DAG: <<FValue:f\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<IValue>>,<<DValue>>,<<OValue>>,<<FValue>>{{(,[ij]\d+)?}}] method_name:Derived.<init>
/// CHECK-START: double Main.constructDerived(int, double, java.lang.Object, float) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-START: double Main.constructDerived(int, double, java.lang.Object, float) inliner (after)
/// CHECK-DAG: <<IValue:i\d+>> ParameterValue
/// CHECK-DAG: <<DValue:d\d+>> ParameterValue
/// CHECK-DAG: <<FValue:f\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
/// CHECK-DAG: InstanceFieldSet [<<Obj>>,<<IValue>>]
/// CHECK-DAG: InstanceFieldSet [<<Obj>>,<<DValue>>]
/// CHECK-DAG: InstanceFieldSet [<<Obj>>,<<FValue>>]
/// CHECK-START: double Main.constructDerived(int, double, java.lang.Object, float) inliner (after)
/// CHECK-DAG: InstanceFieldSet
/// CHECK-DAG: InstanceFieldSet
/// CHECK-DAG: InstanceFieldSet
/// CHECK-NOT: InstanceFieldSet
public static double constructDerived(
int intValue, double doubleValue, Object objectValue, float floatValue) {
Derived d = new Derived(intValue, doubleValue, objectValue, floatValue);
double tmp = d.intField + d.doubleField + d.floatField;
return (d.objectField != null) ? tmp : -tmp;
}
/// CHECK-START: int Main.constructBaseWithFinalField() inliner (before)
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>{{(,[ij]\d+)?}}] method_name:BaseWithFinalField.<init>
/// CHECK-START: int Main.constructBaseWithFinalField() inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-NOT: InstanceFieldSet
public static int constructBaseWithFinalField() {
BaseWithFinalField b = new BaseWithFinalField();
return b.intField;
}
/// CHECK-START: int Main.constructBaseWithFinalField(int) inliner (before)
/// CHECK-DAG: <<Value:i\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<Value>>{{(,[ij]\d+)?}}] method_name:BaseWithFinalField.<init>
/// CHECK-START: int Main.constructBaseWithFinalField(int) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-START: int Main.constructBaseWithFinalField(int) inliner (after)
/// CHECK-DAG: <<Value:i\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
/// CHECK-DAG: InstanceFieldSet [<<Obj>>,<<Value>>]
/// CHECK-DAG: MemoryBarrier
/// CHECK-START: int Main.constructBaseWithFinalField(int) inliner (after)
/// CHECK-DAG: InstanceFieldSet
/// CHECK-NOT: InstanceFieldSet
public static int constructBaseWithFinalField(int intValue) {
BaseWithFinalField b = new BaseWithFinalField(intValue);
return b.intField;
}
/// CHECK-START: int Main.constructBaseWithFinalFieldWith0() inliner (before)
/// CHECK-DAG: <<Value:i\d+>> IntConstant 0
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<Value>>{{(,[ij]\d+)?}}] method_name:BaseWithFinalField.<init>
/// CHECK-START: int Main.constructBaseWithFinalFieldWith0() inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-NOT: InstanceFieldSet
public static int constructBaseWithFinalFieldWith0() {
BaseWithFinalField b = new BaseWithFinalField(0);
return b.intField;
}
/// CHECK-START: double Main.constructDerivedWithFinalField() inliner (before)
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>{{(,[ij]\d+)?}}] method_name:DerivedWithFinalField.<init>
/// CHECK-START: double Main.constructDerivedWithFinalField() inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-NOT: InstanceFieldSet
public static double constructDerivedWithFinalField() {
DerivedWithFinalField d = new DerivedWithFinalField();
return d.intField + d.doubleField;
}
/// CHECK-START: double Main.constructDerivedWithFinalField(int) inliner (before)
/// CHECK-DAG: <<Value:i\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<Value>>{{(,[ij]\d+)?}}] method_name:DerivedWithFinalField.<init>
/// CHECK-START: double Main.constructDerivedWithFinalField(int) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-START: double Main.constructDerivedWithFinalField(int) inliner (after)
/// CHECK-DAG: <<Value:i\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
/// CHECK-DAG: InstanceFieldSet [<<Obj>>,<<Value>>]
/// CHECK-DAG: MemoryBarrier
/// CHECK-START: double Main.constructDerivedWithFinalField(int) inliner (after)
/// CHECK-DAG: InstanceFieldSet
/// CHECK-NOT: InstanceFieldSet
public static double constructDerivedWithFinalField(int intValue) {
DerivedWithFinalField d = new DerivedWithFinalField(intValue);
return d.intField + d.doubleField;
}
/// CHECK-START: double Main.constructDerivedWithFinalFieldWith0() inliner (before)
/// CHECK-DAG: <<Value:i\d+>> IntConstant 0
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<Value>>{{(,[ij]\d+)?}}] method_name:DerivedWithFinalField.<init>
/// CHECK-START: double Main.constructDerivedWithFinalFieldWith0() inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-NOT: InstanceFieldSet
public static double constructDerivedWithFinalFieldWith0() {
DerivedWithFinalField d = new DerivedWithFinalField(0);
return d.intField + d.doubleField;
}
/// CHECK-START: double Main.constructDerivedWithFinalField(double) inliner (before)
/// CHECK-DAG: <<Value:d\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<Value>>{{(,[ij]\d+)?}}] method_name:DerivedWithFinalField.<init>
/// CHECK-START: double Main.constructDerivedWithFinalField(double) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-START: double Main.constructDerivedWithFinalField(double) inliner (after)
/// CHECK-DAG: <<Value:d\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
/// CHECK-DAG: InstanceFieldSet [<<Obj>>,<<Value>>]
/// CHECK-DAG: MemoryBarrier
/// CHECK-START: double Main.constructDerivedWithFinalField(double) inliner (after)
/// CHECK-DAG: InstanceFieldSet
/// CHECK-NOT: InstanceFieldSet
public static double constructDerivedWithFinalField(double doubleValue) {
DerivedWithFinalField d = new DerivedWithFinalField(doubleValue);
return d.intField + d.doubleField;
}
/// CHECK-START: double Main.constructDerivedWithFinalFieldWith0d() inliner (before)
/// CHECK-DAG: <<Value:d\d+>> DoubleConstant
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<Value>>{{(,[ij]\d+)?}}] method_name:DerivedWithFinalField.<init>
/// CHECK-START: double Main.constructDerivedWithFinalFieldWith0d() inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-NOT: InstanceFieldSet
public static double constructDerivedWithFinalFieldWith0d() {
DerivedWithFinalField d = new DerivedWithFinalField(0.0);
return d.intField + d.doubleField;
}
/// CHECK-START: double Main.constructDerivedWithFinalField(int, double) inliner (before)
/// CHECK-DAG: <<IValue:i\d+>> ParameterValue
/// CHECK-DAG: <<DValue:d\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<IValue>>,<<DValue>>{{(,[ij]\d+)?}}] method_name:DerivedWithFinalField.<init>
/// CHECK-START: double Main.constructDerivedWithFinalField(int, double) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-START: double Main.constructDerivedWithFinalField(int, double) inliner (after)
/// CHECK-DAG: <<Value:d\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
/// CHECK-DAG: InstanceFieldSet [<<Obj>>,<<Value>>]
/// CHECK-DAG: MemoryBarrier
/// CHECK-START: double Main.constructDerivedWithFinalField(int, double) inliner (after)
/// CHECK-DAG: InstanceFieldSet
/// CHECK-DAG: InstanceFieldSet
/// CHECK-NOT: InstanceFieldSet
/// CHECK-START: double Main.constructDerivedWithFinalField(int, double) inliner (after)
/// CHECK-DAG: MemoryBarrier
/// CHECK-NOT: MemoryBarrier
public static double constructDerivedWithFinalField(int intValue, double doubleValue) {
DerivedWithFinalField d = new DerivedWithFinalField(intValue, doubleValue);
return d.intField + d.doubleField;
}
/// CHECK-START: double Main.constructDerivedWithFinalFieldWith0And0d() inliner (before)
/// CHECK-DAG: <<IValue:i\d+>> IntConstant 0
/// CHECK-DAG: <<DValue:d\d+>> DoubleConstant
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<IValue>>,<<DValue>>{{(,[ij]\d+)?}}] method_name:DerivedWithFinalField.<init>
/// CHECK-START: double Main.constructDerivedWithFinalFieldWith0And0d() inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-NOT: InstanceFieldSet
public static double constructDerivedWithFinalFieldWith0And0d() {
DerivedWithFinalField d = new DerivedWithFinalField(0, 0.0);
return d.intField + d.doubleField;
}
/// CHECK-START: int Main.constructDerivedInSecondDex() inliner (before)
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>{{(,[ij]\d+)?}}] method_name:DerivedInSecondDex.<init>
/// CHECK-START: int Main.constructDerivedInSecondDex() inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-NOT: InstanceFieldSet
public static int constructDerivedInSecondDex() {
DerivedInSecondDex d = new DerivedInSecondDex();
return d.intField;
}
/// CHECK-START: int Main.constructDerivedInSecondDex(int) inliner (before)
/// CHECK-DAG: <<Value:i\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<Value>>{{(,[ij]\d+)?}}] method_name:DerivedInSecondDex.<init>
/// CHECK-START: int Main.constructDerivedInSecondDex(int) inliner (after)
/// CHECK-DAG: <<Value:i\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<Value>>{{(,[ij]\d+)?}}] method_name:DerivedInSecondDex.<init>
/// CHECK-START: int Main.constructDerivedInSecondDex(int) inliner (after)
/// CHECK-NOT: MemoryBarrier
/// CHECK-NOT: InstanceFieldSet
public static int constructDerivedInSecondDex(int intValue) {
DerivedInSecondDex d = new DerivedInSecondDex(intValue);
return d.intField;
}
/// CHECK-START: int Main.constructDerivedInSecondDexWith0() inliner (before)
/// CHECK-DAG: <<Value:i\d+>> IntConstant 0
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<Value>>{{(,[ij]\d+)?}}] method_name:DerivedInSecondDex.<init>
/// CHECK-START: int Main.constructDerivedInSecondDexWith0() inliner (after)
/// CHECK-DAG: <<Value:i\d+>> IntConstant 0
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<Value>>{{(,[ij]\d+)?}}] method_name:DerivedInSecondDex.<init>
/// CHECK-START: int Main.constructDerivedInSecondDexWith0() inliner (after)
/// CHECK-NOT: MemoryBarrier
/// CHECK-NOT: InstanceFieldSet
public static int constructDerivedInSecondDexWith0() {
DerivedInSecondDex d = new DerivedInSecondDex(0);
return d.intField;
}
/// CHECK-START: int Main.constructDerivedInSecondDex(long) inliner (before)
/// CHECK-DAG: <<Value:j\d+>> ParameterValue
/// CHECK-DAG: <<Obj:l\d+>> NewInstance
// Note: The ArtMethod* (typed as int or long) is optional after sharpening.
/// CHECK-DAG: InvokeStaticOrDirect [<<Obj>>,<<Value>>{{(,[ij]\d+)?}}] method_name:DerivedInSecondDex.<init>
/// CHECK-START: int Main.constructDerivedInSecondDex(long) inliner (after)
/// CHECK-NOT: InvokeStaticOrDirect
/// CHECK-NOT: MemoryBarrier
/// CHECK-NOT: InstanceFieldSet
public static int constructDerivedInSecondDex(long dummy) {
DerivedInSecondDex d = new DerivedInSecondDex(dummy);
return d.intField;
}
public static void main(String[] args) throws Exception {
Second s = new Second();
// Replaced NOP pattern.
staticNop();
nop(s);
// Replaced "return arg" pattern.
assertEquals("arbitrary string", staticReturnArg2("arbitrary string"));
assertEquals(4321L, returnArg1(s, 4321L));
// Replaced "return const" pattern.
assertEquals(9, staticReturn9());
assertEquals(7, return7(s));
assertEquals(null, staticReturnNull());
assertEquals(null, returnNull(s));
// Replaced IGET pattern.
assertEquals(42, getInt(s));
assertEquals(-42.0, getDouble(s));
assertEquals(null, getObject(s));
assertEquals("dummy", getString(s));
// Not replaced IGET pattern.
assertEquals(42, staticGetInt(s));
assertEquals(-42.0, getDoubleFromParam(s));
// SGET.
assertEquals(4242, getStaticInt(s));
// Replaced IPUT pattern.
assertEquals(111L, setLong(s, 111L));
assertEquals(345L, setLongReturnArg2(s, 222L, 123));
// Not replaced IPUT pattern.
assertEquals(222L, staticSetLong(s, 222L));
assertEquals(333L, setLongThroughParam(s, 333L));
// SPUT.
assertEquals(-11.5f, setStaticFloat(s, -11.5f));
if (newObject() == null) {
throw new AssertionError("new Object() cannot be null.");
}
assertEquals(0.0, constructBase());
assertEquals(42.0, constructBase(42));
assertEquals(0.0, constructBaseWith0());
assertEquals("something", constructBase("something"));
assertEquals(null, constructBaseWithNullString());
assertEquals(11.0, constructBase(11.0, new Object()));
assertEquals(-12.0, constructBase(12.0, null));
assertEquals(30.0, constructBase(17, 13.0, new Object()));
assertEquals(-34.0, constructBase(19, 15.0, null));
assertEquals(-22.5, constructBaseWith0DoubleNull(22.5));
assertEquals(-8.0, constructBase(2, 14.0, null, null));
assertEquals(-64.0, constructBase(4, 28.0, null, "dummy"));
assertEquals(13.0, constructBase(24, 2.0, new Object(), null));
assertEquals(30.0, constructBase(11, 4.0, new Object(), "dummy"));
assertEquals(43.0, constructBase(43.0));
assertEquals(0.0, constructBaseWith0d());
assertEquals(1.0, constructBase(new Object()));
assertEquals(-1.0, constructBase((Object) null));
assertEquals(123.0, constructBase(123, 65L));
assertEquals(0.0, constructDerived());
assertEquals(73.0, constructDerived(73));
assertEquals(0.0, constructDerivedWith0());
assertEquals(null, constructDerived("something else"));
assertEquals(18.0, constructDerived(18.0));
assertEquals(0.0, constructDerivedWith0d());
assertEquals(-7.0, constructDerived(5, 7.0, new Object()));
assertEquals(-4.0, constructDerived(9, 4.0, null));
assertEquals(0.0, constructDerived(1, 9.0, null, null));
assertEquals(0.0, constructDerived(2, 8.0, null, "dummy"));
assertEquals(0.0, constructDerived(3, 7.0, new Object(), null));
assertEquals(0.0, constructDerived(4, 6.0, new Object(), "dummy"));
assertEquals(17.0, constructDerived(17.0f));
assertEquals(-5.5, constructDerived(6, -7.0, new Object(), 6.5f));
assertEquals(0, constructBaseWithFinalField());
assertEquals(77, constructBaseWithFinalField(77));
assertEquals(0, constructBaseWithFinalFieldWith0());
assertEquals(0.0, constructDerivedWithFinalField());
assertEquals(-33.0, constructDerivedWithFinalField(-33));
assertEquals(0.0, constructDerivedWithFinalFieldWith0());
assertEquals(-44.0, constructDerivedWithFinalField(-44.0));
assertEquals(0.0, constructDerivedWithFinalFieldWith0d());
assertEquals(88, constructDerivedWithFinalField(22, 66.0));
assertEquals(0.0, constructDerivedWithFinalFieldWith0And0d());
assertEquals(0, constructDerivedInSecondDex());
assertEquals(123, constructDerivedInSecondDex(123));
assertEquals(0, constructDerivedInSecondDexWith0());
assertEquals(0, constructDerivedInSecondDex(7L));
}
private static void assertEquals(int expected, int actual) {
if (expected != actual) {
throw new AssertionError("Wrong result: " + expected + " != " + actual);
}
}
private static void assertEquals(double expected, double actual) {
if (expected != actual) {
throw new AssertionError("Wrong result: " + expected + " != " + actual);
}
}
private static void assertEquals(Object expected, Object actual) {
if (expected != actual && (expected == null || !expected.equals(actual))) {
throw new AssertionError("Wrong result: " + expected + " != " + actual);
}
}
}
|
|
/*=========================================================================
* Copyright (c) 2010-2014 Pivotal Software, Inc. All Rights Reserved.
* This product is protected by U.S. and international copyright
* and intellectual property laws. Pivotal products are covered by
* one or more patents listed at http://www.pivotal.io/patents.
*=========================================================================
*/
package com.gemstone.gemfire.internal.cache;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.logging.log4j.Logger;
import com.gemstone.gemfire.CancelCriterion;
import com.gemstone.gemfire.InternalGemFireError;
import com.gemstone.gemfire.cache.EvictionAttributes;
import com.gemstone.gemfire.cache.Region;
import com.gemstone.gemfire.cache.RegionAttributes;
import com.gemstone.gemfire.cache.RegionService;
import com.gemstone.gemfire.cache.persistence.PartitionOfflineException;
import com.gemstone.gemfire.distributed.DistributedLockService;
import com.gemstone.gemfire.distributed.DistributedMember;
import com.gemstone.gemfire.distributed.internal.DM;
import com.gemstone.gemfire.distributed.internal.DistributionAdvisee;
import com.gemstone.gemfire.distributed.internal.DistributionAdvisor;
import com.gemstone.gemfire.distributed.internal.DistributionAdvisor.Profile;
import com.gemstone.gemfire.distributed.internal.InternalDistributedSystem;
import com.gemstone.gemfire.distributed.internal.membership.InternalDistributedMember;
import com.gemstone.gemfire.internal.Assert;
import com.gemstone.gemfire.internal.cache.CacheDistributionAdvisor.CacheProfile;
import com.gemstone.gemfire.internal.cache.DiskInitFile.DiskRegionFlag;
import com.gemstone.gemfire.internal.cache.PartitionedRegion.BucketLock;
import com.gemstone.gemfire.internal.cache.PartitionedRegionDataStore.CreateBucketResult;
import com.gemstone.gemfire.internal.cache.partitioned.Bucket;
import com.gemstone.gemfire.internal.cache.persistence.PersistentMemberID;
import com.gemstone.gemfire.internal.cache.persistence.PersistentMemberManager;
import com.gemstone.gemfire.internal.cache.persistence.PersistentMembershipView;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.internal.logging.LogService;
/**
* Empty shell for {@link BucketRegion} which exists only to maintain metadata
* in the form of a {@link BucketAdvisor}
*
* @author Kirk Lund
* @since 5.1
*/
public final class ProxyBucketRegion implements Bucket {
private static final Logger logger = LogService.getLogger();
private final int serialNumber;
private final int bid;
private final PartitionedRegion partitionedRegion;
private final BucketAdvisor advisor;
private final BucketPersistenceAdvisor persistenceAdvisor;
private volatile BucketRegion realBucket = null;
private final AtomicBoolean bucketSick = new AtomicBoolean(false);
private final Set<DistributedMember> sickHosts = new HashSet<DistributedMember>();
private final DiskRegion diskRegion;
private final BucketLock bucketLock;
/**
* Note that LocalRegion has a version of this name spelled "NO_PARTITITON".
* So if code is written that compares to this constant make sure to also
* compare to the other one from LocalRegion.
* The one in LocalRegion is a typo but has already been persisted in older versions.
*/
public static final String NO_FIXED_PARTITION_NAME = "NO_PARTITION";
/**
* Constructs a new ProxyBucketRegion which has a BucketAdvisor.
* @param bid the bucket id
* @param partitionedRegion the PartitionedRegion that owns this bucket
* @param internalRegionArgs the internal args which includes RegionAdvisor
*/
public ProxyBucketRegion(int bid,
PartitionedRegion partitionedRegion,
InternalRegionArguments internalRegionArgs) {
this.serialNumber = DistributionAdvisor.createSerialNumber();
this.bid = bid;
this.partitionedRegion = partitionedRegion;
this.advisor = BucketAdvisor.createBucketAdvisor(this, internalRegionArgs.getPartitionedRegionAdvisor());
this.bucketLock = this.partitionedRegion.getBucketLock(this.bid);
if(this.partitionedRegion.getDataPolicy().withPersistence() ) {
String regionPath = getFullPath();
PersistentMemberManager memberManager = partitionedRegion.getGemFireCache().getPersistentMemberManager();
DiskRegionStats diskStats = partitionedRegion.getDiskRegionStats();
DistributedLockService dl = partitionedRegion.getGemFireCache().getPartitionedRegionLockService();
DiskStoreImpl ds = partitionedRegion.getDiskStore();
EvictionAttributes ea = partitionedRegion.getAttributes().getEvictionAttributes();
EnumSet<DiskRegionFlag> diskFlags = EnumSet.noneOf(DiskRegionFlag.class);
// Add flag if this region has versioning enabled
if(partitionedRegion.getConcurrencyChecksEnabled()) {
diskFlags.add(DiskRegionFlag.IS_WITH_VERSIONING);
}
boolean overflowEnabled = ea != null && ea.getAction().isOverflowToDisk();
int startingBucketID = -1;
String partitionName = NO_FIXED_PARTITION_NAME;
List<FixedPartitionAttributesImpl> fpaList = partitionedRegion.getFixedPartitionAttributesImpl();
if (fpaList != null) {
for (FixedPartitionAttributesImpl fpa : fpaList) {
if (fpa.hasBucket(bid)) {
startingBucketID = fpa.getStartingBucketID();
partitionName = fpa.getPartitionName();
break;
}
}
}
this.diskRegion = DiskRegion.create(ds,
regionPath, true, partitionedRegion.getPersistBackup(),
overflowEnabled, partitionedRegion.isDiskSynchronous(),
partitionedRegion.getDiskRegionStats(),
partitionedRegion.getCancelCriterion(),
partitionedRegion,
partitionedRegion.getAttributes(),
diskFlags, partitionName, startingBucketID,
partitionedRegion.getCompressor());
if (fpaList != null) {
for (FixedPartitionAttributesImpl fpa : fpaList) {
if (fpa.getPartitionName().equals(this.diskRegion.getPartitionName())
&& this.diskRegion.getStartingBucketId() != -1) {
fpa.setStartingBucketID(this.diskRegion.getStartingBucketId());
partitionedRegion.getPartitionsMap()
.put(
fpa.getPartitionName(),
new Integer[] { fpa.getStartingBucketID(),
fpa.getNumBuckets() });
}
}
}
this.persistenceAdvisor = new BucketPersistenceAdvisor(advisor, dl,
diskRegion, regionPath, diskStats, memberManager, bucketLock, this);
} else {
this.diskRegion = null;
this.persistenceAdvisor = null;
}
}
public CancelCriterion getCancelCriterion() {
return this.partitionedRegion.getCache().getCancelCriterion();
}
public void close() {
if(this.persistenceAdvisor != null) {
this.persistenceAdvisor.close();
}
this.advisor.closeAdvisor();
if(this.diskRegion != null) {
this.diskRegion.close(null);
}
}
public int getSerialNumber() {
// always return the serial number for this proxy, NOT the bucket region
return this.serialNumber;
}
public DM getDistributionManager() {
return getSystem().getDistributionManager();
}
public DistributionAdvisor getDistributionAdvisor() {
return this.advisor;
}
public CacheDistributionAdvisor getCacheDistributionAdvisor() {
return this.advisor;
}
public Profile getProfile() {
return this.advisor.createProfile();
}
public DistributionAdvisee getParentAdvisee() {
return this.partitionedRegion;
}
public PartitionedRegion getPartitionedRegion() {
return this.partitionedRegion;
}
public InternalDistributedSystem getSystem() {
return this.partitionedRegion.getCache().getDistributedSystem();
}
public String getName() {
return getPartitionedRegion().getBucketName(this.bid);
}
public String getFullPath() {
return Region.SEPARATOR + PartitionedRegionHelper.PR_ROOT_REGION_NAME + Region.SEPARATOR +
getPartitionedRegion().getBucketName(this.bid);
}
public GemFireCacheImpl getCache() {
return this.partitionedRegion.getCache();
}
public RegionService getCacheView() {
return this.partitionedRegion.getRegionService();
}
public RegionAttributes getAttributes() {
return this.partitionedRegion.getAttributes();
}
public final BucketAdvisor getBucketAdvisor() {
return this.advisor;
}
/**
* Notify this proxy of the real bucket as its target. Future calls to this instance will then proxy
* them back to the real bucket.
* @param br the real bucket which will be the target for this proxy
*/
public void setBucketRegion(BucketRegion br)
{
Assert.assertTrue(this.realBucket == null);
Assert.assertTrue( ! this.advisor.isHosting());
this.realBucket = br;
// fix several bugs including 36881... creation of BR may be occurring
// at same time another thread is destroying the PR and now that this
// BR is visible to the destroy thread we want to prevent sending bogus
// CreateRegion or profile update messages
this.partitionedRegion.checkReadiness();
this.partitionedRegion.checkClosed();
}
public void clearBucketRegion(BucketRegion br) {
Assert.assertTrue(this.realBucket == br);
this.realBucket = null;
}
public void setHosting(boolean value) {
if (value) {
PartitionedRegion region = this.getPartitionedRegion();
Assert.assertTrue(this.realBucket != null);
Assert.assertTrue( ! this.advisor.isHosting());
if (region.isFixedPartitionedRegion()) {
List<FixedPartitionAttributesImpl> list = region
.getFixedPartitionAttributesImpl();
if (list != null) {
for (FixedPartitionAttributesImpl info : list) {
if (info.hasBucket(bid)) {
this.advisor.setHosting(true);
break;
}
}
}
}
else { // normal PR
this.advisor.setHosting(true);
}
}
else {
// Assert.assertTrue(!getPartitionedRegion().getDataStore().isManagingBucket(this.bid));
this.advisor.setHosting(false);
this.realBucket = null;
}
}
public void removeBucket() {
this.realBucket.removeFromPeersAdvisors(true);
this.advisor.removeBucket();
this.realBucket = null;
}
/**
* Get the redundancy of the this bucket, taking into
* account the local bucket, if any.
* @return number of redundant copies for a given bucket, or -1 if
* there are no instances of the bucket.
*/
public int getBucketRedundancy() {
return getBucketAdvisor().getBucketRedundancy();
}
public boolean isPrimary()
{
return this.advisor.isPrimary();
}
/**
* Returns the real BucketRegion if one has been created. This call will
* return the bucket even if it is still being initialized. Returns null
* if the bucket has not been created locally.
*
* @return the real bucket if currently created or null
*/
public BucketRegion getCreatedBucketRegion() {
return this.realBucket;
}
/**
* Returns the real BucketRegion that is currently being locally hosted.
* Returns null if the real bucket is null or if it is still being
* initialized. After the bucket is intialized isHosting will be flagged
* true and future calls to this method will return the bucket.
*
* @return the real bucket if currently hosted or null
*/
public BucketRegion getHostedBucketRegion() {
if (this.advisor.isHosting()) {
return this.realBucket;
}
else {
return null;
}
}
public boolean isHosting() {
return this.advisor.isHosting();
}
public void fillInProfile(Profile profile) {
if (logger.isDebugEnabled()) {
logger.debug("ProxyBucketRegion filling in profile: {}", profile);
}
BucketRegion bucket = this.realBucket;
if (bucket != null) {
bucket.fillInProfile(profile);
}
}
public ProxyBucketRegion initialize()
{
// dead coded initializationGate to prevent profile exchange
//this.advisor.initializationGate();
this.advisor.setInitialized();
return this;
}
public Set<InternalDistributedMember> getBucketOwners()
{
Set<InternalDistributedMember> s = this.advisor.adviseInitialized();
if (s == Collections.<InternalDistributedMember>emptySet()) {
s = new HashSet<InternalDistributedMember>();
}
if (isHosting()) {
s.add(this.partitionedRegion.getDistributionManager().getId());
}
return s;
}
/**
* Returns the total number of datastores hosting an instance of this bucket.
*
* @return the total number of datastores hosting an instance of this bucket
*/
public int getBucketOwnersCount() {
return this.advisor.getBucketRedundancy() + 1;
}
public final int getBucketId() {
return this.bid;
}
public final int getId() {
return getBucketId();
}
public void setBucketSick(DistributedMember member, boolean sick) {
synchronized(this.sickHosts) {
if (sick) {
this.sickHosts.add(member);
} else {
this.sickHosts.remove(member);
}
this.bucketSick.set(this.sickHosts.size() > 0);
}
}
public boolean isBucketSick() {
return this.bucketSick.get();
}
public Set<DistributedMember> getSickMembers() {
synchronized(this.sickHosts) {
return Collections.unmodifiableSet(new HashSet<DistributedMember>(this.sickHosts));
}
}
public void recoverFromDiskRecursively() {
recoverFromDisk();
List<PartitionedRegion> colocatedWithList = ColocationHelper.getColocatedChildRegions(partitionedRegion);
for(PartitionedRegion childPR : colocatedWithList) {
if(childPR.getDataPolicy().withPersistence()) {
ProxyBucketRegion[] childBucketArray = childPR.getRegionAdvisor().getProxyBucketArray();
if(childBucketArray != null) {
ProxyBucketRegion childBucket = childBucketArray[getBucketId()];
childBucket.recoverFromDisk();
}
}
}
}
public void recoverFromDisk() {
final boolean isDebugEnabled = logger.isDebugEnabled();
RuntimeException exception = null;
if (isDebugEnabled) {
logger.debug("{} coming to recover from disk. wasHosting {}", getFullPath(), persistenceAdvisor.wasHosting());
}
try {
if(persistenceAdvisor.wasHosting()) {
if (isDebugEnabled) {
logger.debug("{} used to host data. Attempting to recover.", getFullPath());
}
CreateBucketResult result;
if(hasPersistentChildRegion()) {
// If this is a parent PR, create the bucket, possibly going over
// redundancy. We need to do this so that we can create the child
// region in this member. This member may have the latest data for the
// child region.
result = partitionedRegion.getDataStore()
.grabBucket(bid, getDistributionManager().getDistributionManagerId(),
true, true, false, null, true);
} else {
if (this.partitionedRegion.isShadowPR()
&& this.partitionedRegion.getColocatedWith() != null) {
PartitionedRegion colocatedRegion =
ColocationHelper.getColocatedRegion(this.partitionedRegion);
if(this.partitionedRegion.getDataPolicy().withPersistence() && !colocatedRegion.getDataPolicy().withPersistence()) {
result = colocatedRegion.getDataStore()
.grabBucket(bid, getDistributionManager().getDistributionManagerId(),
true, true, false, null, true);
if(result.nowExists()) {
result = partitionedRegion.getDataStore()
.grabBucket(bid, null, true, false, false, null, true);
}
}
else{
result = partitionedRegion.getDataStore()
.grabBucket(bid, null, true, false, false, null, true);
}
} else{
result = partitionedRegion.getDataStore()
.grabBucket(bid, null, true, false, false, null, true);
}
}
if(result.nowExists()) {
return;
} else if (result != CreateBucketResult.REDUNDANCY_ALREADY_SATISFIED) {
//TODO prpersist - check cache closure, create new error message
this.partitionedRegion.checkReadiness();
throw new InternalGemFireError("Unable to restore the persistent bucket " + this.getName());
}
if (isDebugEnabled) {
logger.debug("{} redundancy is already satisfied, so discarding persisted data. Current hosts {}",
getFullPath(), advisor.adviseReplicates());
}
//Destroy the data if we can't create the bucket, or if the redundancy is already satisfied
destroyOfflineData();
}
if (isDebugEnabled) {
logger.debug("{} initializing membership view from peers", getFullPath());
}
persistenceAdvisor.initializeMembershipView();
} catch(RuntimeException e) {
exception=e;
throw e;
} finally {
persistenceAdvisor.recoveryDone(exception);
}
}
boolean hasPersistentChildRegion() {
boolean hasPersistentChildRegion =
ColocationHelper.hasPersistentChildRegion(partitionedRegion);
return hasPersistentChildRegion;
}
/**
* Destroy the offline data just for this bucket.
*/
public void destroyOfflineData() {
Map<InternalDistributedMember, PersistentMemberID> onlineMembers = advisor.adviseInitializedPersistentMembers();
persistenceAdvisor.checkMyStateOnMembers(onlineMembers.keySet());
diskRegion.beginDestroyDataStorage();
persistenceAdvisor.finishPendingDestroy();
if (logger.isDebugEnabled()) {
logger.debug("destroyed persistent data for {}" + getFullPath());
}
}
public BucketPersistenceAdvisor getPersistenceAdvisor() {
return this.persistenceAdvisor;
}
public DiskRegion getDiskRegion() {
return this.diskRegion;
}
public void finishRemoveBucket() {
if(this.persistenceAdvisor != null) {
this.persistenceAdvisor.bucketRemoved();
}
}
public BucketLock getBucketLock() {
return bucketLock;
}
public void initializePersistenceAdvisor() {
persistenceAdvisor.initialize();
List<PartitionedRegion> colocatedWithList
= ColocationHelper.getColocatedChildRegions(partitionedRegion);
for(PartitionedRegion childPR : colocatedWithList) {
ProxyBucketRegion[] childBucketArray = childPR.getRegionAdvisor().getProxyBucketArray();
if(childBucketArray != null) {
ProxyBucketRegion childBucket = childBucketArray[getBucketId()];
if(childBucket.persistenceAdvisor != null) {
childBucket.persistenceAdvisor.initialize();
}
}
}
}
public boolean checkBucketRedundancyBeforeGrab(InternalDistributedMember moveSource, boolean replaceOfflineData) {
int redundancy = getBucketAdvisor().getBucketRedundancy();
//Skip any checks if this is a colocated bucket. We need to create
//the colocated bucket if we managed to create the parent bucket. There are
//race conditions where the parent region may know that a member is no longer
//hosting the bucket, but the child region doesn't know that yet.
PartitionedRegion colocatedRegion =
ColocationHelper.getColocatedRegion(this.partitionedRegion);
if(colocatedRegion != null) {
return true;
}
//Check for offline members, if the region has persistence
//Even if we intend to replace offline data, we still need to make
//sure the bucket isn't completely offline
if(!replaceOfflineData || redundancy == -1) {
BucketPersistenceAdvisor persistAdvisor = getPersistenceAdvisor();
if(persistAdvisor != null) {
//If we haven't finished recovering from disk, don't allow the bucket creation.
// if(persistAdvisor.isRecovering()) {
// return false;
// }
//If we previously hosted this bucket, go ahead and initialize
//If this bucket never had a primary, go ahead and initialize,
//any offline buckets should be empty
if(!persistAdvisor.wasHosting() && advisor.getHadPrimary()) {
final PersistentMembershipView membershipView = persistAdvisor.getMembershipView();
if(membershipView == null) {
//Fix for 42327 - There must be a race where we are being told to create a bucket
//before we recover from disk. In that case, the membership view can be null.
//Refuse to create the bucket if that is the case.
if (logger.isDebugEnabled()) {
logger.debug("grabFreeBucket: Can't create bucket because persistence is not yet initialized {}{}{}",
this.partitionedRegion.getPRId(), PartitionedRegion.BUCKET_ID_SEPARATOR, bid);
}
return false;
}
Set<PersistentMemberID> offlineMembers = membershipView.getOfflineMembers();
if (logger.isDebugEnabled()) {
logger.debug("We didn't host the bucket. Checking redundancy level before creating the bucket. Redundancy={} offline members={}",
redundancy, offlineMembers);
}
if(offlineMembers != null && !offlineMembers.isEmpty() && redundancy == -1) {
//If there are offline members, and no online members, throw
//an exception indicating that we can't create the bucket.
String message = LocalizedStrings.PartitionedRegionDataStore_DATA_OFFLINE_MESSAGE.toLocalizedString(partitionedRegion.getFullPath(), bid, offlineMembers);
throw new PartitionOfflineException((Set)offlineMembers, message);
} else {
//If there are online and offline members, add the offline
//members to the redundancy level. This way we won't create
//an extra copy of the bucket.
if(offlineMembers != null) {
redundancy += offlineMembers.size();
}
}
}
}
}
if (moveSource == null) {
if (redundancy
>= this.partitionedRegion.getRedundantCopies()) {
if (logger.isDebugEnabled()) {
logger.debug("grabFreeBucket: Bucket already meets redundancy level bucketId={}{}{}",
this.partitionedRegion.getPRId(), PartitionedRegion.BUCKET_ID_SEPARATOR, bid);
}
return false;
}
}
//Check to see if this bucket is allowed on this source. If this
//is a bucket move, we allow the source to be on the same host.
if (! PartitionedRegionBucketMgmtHelper.bucketIsAllowedOnThisHost(this, moveSource)) {
if (logger.isDebugEnabled()) {
logger.debug("grabFreeBucket: Bucket can't be recovered because we're enforcing that the bucket host must be unique {}{}{}",
this.partitionedRegion.getPRId(), PartitionedRegion.BUCKET_ID_SEPARATOR, bid);
}
return false;
}
return true;
}
public void waitForPrimaryPersistentRecovery() {
persistenceAdvisor.waitForPrimaryPersistentRecovery();
}
public void initializePrimaryElector(
InternalDistributedMember creationRequestor) {
advisor.initializePrimaryElector(creationRequestor);
if(persistenceAdvisor != null) {
persistenceAdvisor.setAtomicCreation(creationRequestor != null);
}
}
public void clearPrimaryElector() {
if(persistenceAdvisor != null) {
persistenceAdvisor.setAtomicCreation(false);
}
}
@Override
public void remoteRegionInitialized(CacheProfile profile) {
// no-op for proxy bucket regions, which have no region membership listeners to notify
}
}
|
|
/*
* Copyright Matt Palmer 2011-2012, All rights reserved.
*
* This code is licensed under a standard 3-clause BSD license:
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * The names of its contributors may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package net.byteseek.searcher.multisequence;
import java.io.IOException;
import java.util.List;
import net.byteseek.io.reader.Window;
import net.byteseek.io.reader.WindowReader;
import net.byteseek.matcher.multisequence.MultiSequenceMatcher;
import net.byteseek.matcher.sequence.SequenceMatcher;
import net.byteseek.searcher.AbstractSearcher;
import net.byteseek.searcher.SearchResult;
import net.byteseek.searcher.SearchUtils;
/**
* This abstract base class for multi-sequence searchers holds the collection of
* sequences to be searched for and provides generic implementations of:
* <ul>
* <li>{@link #searchForwards(net.byteseek.io.reader.WindowReader, long, long)}
* <li>{@link #searchBackwards(net.byteseek.io.reader.WindowReader, long, long)}
* </ul>
* These allocate searching for sequences efficiently between searching in the
* byte arrays provided by {@link Window}s when the sequence fits in a single window,
* only using the less efficient reader interface on the sequence for times when
* the sequence crosses over Window boundaries.
* <p>
* It defines two new abstract methods:
* <ul>
* <li>{@link #doSearchForwards(net.byteseek.io.reader.WindowReader, long, long) }
* <li>{@link #doSearchBackwards(net.byteseek.io.reader.WindowReader, long, long) }
* </ul>
* which require the implementor to use the reader interface on the sequence for
* matching (or otherwise provide for searching sequences which cross window boundaries).
*
* @author Matt Palmer
*/
public abstract class AbstractMultiSequenceSearcher extends AbstractSearcher<SequenceMatcher> {
/**
*
*/
protected final MultiSequenceMatcher sequences;
/**
* Constructs a sequence searcher given a {@link MultiSequenceMatcher}
* to search for.
*
* @param sequences The MultiSequenceMatcher to search for.
*/
public AbstractMultiSequenceSearcher(final MultiSequenceMatcher sequences) {
if (sequences == null) {
throw new IllegalArgumentException("Null sequences passed in to searcher.");
}
this.sequences = sequences;
}
/**
* Returns the {@link MultiSequenceMatcher} to be searched for.
*
* @return MultiSequenceMatcher the MultiSequenceMatcher to search for.
*/
public MultiSequenceMatcher getMatcher() {
return sequences;
}
/**
* {@inheritDoc}
* <p>
* This implementation of searchForwards allocates forward searching between
* searching directly on a window byte array when the multi-sequence fits inside
* a window, and using the abstract search method:
* {@link #doSearchForwards(net.byteseek.io.reader.WindowReader, long, long) }
* for searching across window boundaries.
* <p>
* This method does no searching itself - it simply calculates how to
* efficiently search using a multi-sequence, and calls the appropriate search
* methods on the search implementation. Therefore, this is entirely generic for
* any search algorithm that operates over multi-sequences.
*
* @throws IOException If the reader encounters a problem reading bytes.
*/
@Override
public List<SearchResult<SequenceMatcher>> searchForwards(final WindowReader reader,
final long fromPosition, final long toPosition) throws IOException {
// Initialise:
final int longestMatchEndPosition = sequences.getMaximumLength() - 1;
long searchPosition = fromPosition > 0?
fromPosition : 0;
// While there is data to search in:
Window window;
while (searchPosition <= toPosition &&
(window = reader.getWindow(searchPosition)) != null) {
// Does the sequence fit into the searchable bytes of this window?
// It may not if the start position of the window is already close
// to the end of the window, or the sequence is long (potentially
// could be longer than any single window - but mostly won't be):
final long windowStartPosition = window.getWindowPosition();
final int windowLength = window.length();
final int arrayStartPosition = reader.getWindowOffset(searchPosition);
final int arrayLastPosition = windowLength - 1;
if (arrayStartPosition + longestMatchEndPosition <= arrayLastPosition) {
// Find the last point in the array where the sequence still fits
// inside the array, or the toPosition if it is smaller.
final int lastMatchingPosition = arrayLastPosition - longestMatchEndPosition;
final long distanceToEnd = toPosition - windowStartPosition;
final int arrayMaxPosition = distanceToEnd < lastMatchingPosition?
(int) distanceToEnd : lastMatchingPosition;
// Search forwards in the byte array of the window:
final List<SearchResult<SequenceMatcher>> arrayResult =
searchForwards(window.getArray(), arrayStartPosition, arrayMaxPosition);
// Did we find a match?
if (!arrayResult.isEmpty()) {
final long readerOffset = searchPosition - arrayStartPosition;
return SearchUtils.addPositionToResults(arrayResult, readerOffset);
}
// Continue the search one on from where we last looked:
searchPosition += (arrayMaxPosition - arrayStartPosition + 1);
// Did we pass the final toPosition? In which case, we're finished.
if (searchPosition > toPosition) {
return SearchUtils.noResults();
}
}
// From the current search position, the sequence could cross over in to
// the next window, so we can't search directly in the window byte array.
// We must use the reader interface on the sequence to let it match
// over more bytes than this window potentially has available.
// Search up to the last position in the window, or the toPosition,
// whichever comes first:
final long lastWindowPosition = windowStartPosition + arrayLastPosition;
final long lastSearchPosition = toPosition < lastWindowPosition?
toPosition : lastWindowPosition;
final List<SearchResult<SequenceMatcher>> readerResult =
doSearchForwards(reader, searchPosition, lastSearchPosition);
// Did we find a match?
if (!readerResult.isEmpty()) {
return readerResult;
}
// Continue the search one on from where we last looked:
searchPosition = lastSearchPosition + 1;
}
return SearchUtils.noResults();
}
/**
* This method searches forwards crossing window boundaries. It is
* called by the {@link #searchForwards(net.byteseek.io.reader.WindowReader, long, long)}
* method when it encounters a multi-sequence which crosses from one window to another.
* <p>
* A simple way to implement this method is to use the WindowReader interface on the
* sequences multi-sequence. This at least removes window boundaries from validating
* that a match exists. It will still be necessary to deal with window management
* in the operation of the search algorithm itself.
* <p>
* Implementations of this method do not need to worry about whether the search
* position parameters are within the reader, as this bounds checking is done
* by the searchForwards method which calls it.
*
* @param reader The reader providing bytes to search in.
* @param fromPosition The search position to search from.
* @param toPosition The search position to search to.
* @return A list of search results.
* If there are no results, then the list is empty (not null).
* @throws IOException If the reader encounters difficulties reading bytes.
*/
protected abstract List<SearchResult<SequenceMatcher>> doSearchForwards(WindowReader reader,
long fromPosition, long toPosition) throws IOException;
/**
* {@inheritDoc}
* <p>
* This implementation of searchBackwards allocates backwards searching between
* searching directly on a window byte array when the multi-sequence fits inside
* a window, and using the abstract search method:
* {@link #doSearchBackwards(net.byteseek.io.reader.WindowReader, long, long) }
* for searching across window boundaries.
* <p>
* This method does no searching itself - it simply calculates how to
* efficiently search using a multi-sequence, and calls the appropriate search
* methods on the search implementation. Therefore, this is entirely generic for
* any search algorithm that operates over sequences.
*
* @throws IOException If the reader encounters a problem reading bytes.
*/
@Override
public List<SearchResult<SequenceMatcher>> searchBackwards(final WindowReader reader,
final long fromPosition, final long toPosition) throws IOException {
// Initialise:
final int smallestMatchEndPosition = sequences.getMinimumLength() - 1;
final int longestMatchEndPosition = sequences.getMaximumLength() - 1;
final long finalSearchPosition = toPosition > 0?
toPosition : 0;
long searchPosition = withinLength(reader, fromPosition);
// While there is data to search in:
Window window;
while (searchPosition >= finalSearchPosition &&
(window = reader.getWindow(searchPosition)) != null) {
// Calculate first search start position
final int searchStartPosition = reader.getWindowOffset(searchPosition);
final long windowStartPosition = window.getWindowPosition();
final long distanceToEnd = finalSearchPosition - windowStartPosition;
final int searchEndPosition = distanceToEnd > 0?
(int) distanceToEnd : 0;
// Can the multi-sequence fit into the searchable bytes of this window?
// It may not if the start position of the window is already close
// to the end of the window, or the sequence is long (potentially
// could be longer than any single window - but mostly won't be):
if (searchStartPosition - smallestMatchEndPosition >= searchEndPosition) {
// Search backwards in the byte array of the window:
final byte[] array = window.getArray();
final List<SearchResult<SequenceMatcher>> arrayResult =
searchBackwards(array, searchStartPosition, searchEndPosition);
// Did we find a match?
if (!arrayResult.isEmpty()) {
final long readerOffset = searchPosition - searchStartPosition;
return SearchUtils.addPositionToResults(arrayResult, readerOffset);
}
// Continue the search one on from where we last looked:
final int bytesSearched = searchStartPosition - searchEndPosition;
searchPosition -= (bytesSearched + 1);
// Did we pass the final search position? In which case, we're finished.
if (searchPosition < finalSearchPosition) {
return SearchUtils.noResults();
}
}
// From the current search position, the multi-sequence crosses over in to
// the previous window, so we can't search directly in the window byte array.
// We must use the reader interface on the sequence to let it match
// over more bytes than this window has available.
// Search back to the first position in the previous window where any
// of the sequences might still cross over into the current window.
final long lastCrossingPosition = windowStartPosition - longestMatchEndPosition;
final List<SearchResult<SequenceMatcher>> readerResult =
doSearchBackwards(reader, searchPosition, lastCrossingPosition);
// Did we find a match?
if (!readerResult.isEmpty()) {
return readerResult;
}
// Continue the search one on from where we last looked:
searchPosition = lastCrossingPosition - 1;
}
return SearchUtils.noResults();
}
/**
* This abstract method searches backwards crossing window boundaries. It is
* called by the {@link #searchBackwards(net.byteseek.io.reader.WindowReader, long, long)}
* method when it encounters a multi-sequence which crosses from one window to another.
* <p>
* A simple way to implement this method is to use the WindowReader interface on the
* sequences multi-sequence. This at least removes window boundaries from validating
* that a match exists. It may still be necessary to deal with window management
* in the operation of the search algorithm itself.
*
* @param reader The reader providing bytes to search in.
* @param fromPosition The search position to search from.
* @param toPosition The search position to search to.
* @return A list of search results.
* If there are no results, the list is empty (not null).
* @throws IOException If the reader encounters difficulties reading bytes.
*/
protected abstract List<SearchResult<SequenceMatcher>> doSearchBackwards(WindowReader reader,
long fromPosition, long toPosition) throws IOException;
/**
* Returns a string representation of this searcher. The format is subject
* to change, but it will generally return the name of the searcher class,
* the sequences class used in the search algorithm, and regular expressions
* defining the sequences matched by the searcher.
*
* @return A string representing this searcher.
*/
@Override
public String toString() {
return getClass().getSimpleName() + "(" + sequences + ")";
}
}
|
|
/*
* Copyright (c) 2015, IponWeb (http://www.iponweb.com)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the <organization> nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package load_engine.runner;
import com.beust.jcommander.internal.Lists;
import com.codahale.metrics.MetricRegistry;
import load_engine.Generator;
import load_engine.Loader;
import load_engine.Metrics;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.*;
import java.util.function.BooleanSupplier;
public class LoadGenerator<Task> {
private static final Logger LOGGER = LoggerFactory.getLogger(LoadGenerator.class);
private static final BooleanSupplier UNLIMITED_QUERIES = () -> true;
private final int maxDuration;
private final int queriesLimit;
private final int qpsLimit;
private final Metrics metrics;
private MainThread mainThread;
public LoadGenerator(int maxDuration, int queriesLimit, int qpsLimit, MetricRegistry registry) {
this(maxDuration, queriesLimit, qpsLimit, new Metrics(registry));
}
public LoadGenerator(int maxDuration, int queriesLimit, int qpsLimit, Metrics metrics) {
this.maxDuration = maxDuration;
this.queriesLimit = queriesLimit;
this.qpsLimit = qpsLimit;
this.metrics = metrics;
}
private static void waitForFinish(Iterable<? extends Thread> threads) {
for (Thread t : threads) {
try {
t.join();
} catch (InterruptedException e) {
// ignore
}
}
}
public static <T> Collection<T> many(T obj, int count) {
List<T> result = Lists.newArrayList();
for (int i = 0; i < count; ++i) {
result.add(obj);
}
return result;
}
public int getMaxDuration() {
return maxDuration;
}
public int getQueriesLimit() {
return queriesLimit;
}
public int getQpsLimit() {
return qpsLimit;
}
public Metrics getMetrics() {
return metrics;
}
public void start(Collection<? extends Generator<Task>> generators, Collection<? extends Loader<Task>> loaders, Properties props) {
if (mainThread != null) {
throw new IllegalStateException("Load generator can't be executed twice");
}
mainThread = new MainThread(generators, loaders, props);
mainThread.start();
}
public boolean isRunning() {
return mainThread != null && mainThread.isAlive();
}
public void join() throws InterruptedException {
if (!isRunning()) {
throw new IllegalStateException("LoadGenerator is not running");
}
mainThread.join();
}
public void interrupt() {
if (!isRunning()) {
throw new IllegalStateException("LoadGenerator is not running");
}
mainThread.interrupt();
}
public void doTest(
Collection<? extends Generator<Task>> generators,
Collection<? extends Loader<Task>> loaders,
Properties props
) throws InterruptedException {
this.start(generators, loaders, props);
this.join();
}
private class MainThread extends Thread {
private final List<SchedulerThread<Task>> schedulers = new ArrayList<>();
private final List<LoadThread<Task>> loadThreads = new ArrayList<>();
private final BlockingQueue<ScheduledTask<Task>> queue = new ArrayBlockingQueue<>(10000);
public MainThread(
Collection<? extends Generator<Task>> generators,
Collection<? extends Loader<Task>> loaders,
Properties props
) {
props = (Properties) props.clone();
props.setProperty("generators", Integer.toString(generators.size()));
props.setProperty("loaders", Integer.toString(loaders.size()));
QpsScheduler scheduler = new QpsScheduler(qpsLimit);
BooleanSupplier canSchedule = UNLIMITED_QUERIES;
if (queriesLimit > 0) {
canSchedule = new QueriesLimit(queriesLimit);
}
int loaderIndex = 0;
for (Loader<Task> l : loaders) {
Properties loaderProps = (Properties) props.clone();
loaderProps.setProperty("loaderIndex", Integer.toString(loaderIndex++));
l.init(loaderProps, metrics.registry);
LoadThread<Task> thread = new LoadThread<>(queue, l, metrics);
thread.setName("LoadThread-" + loaderIndex);
loadThreads.add(thread);
}
LoadThreadsFinalizer<Task> loadThreadsFinalizer = new LoadThreadsFinalizer<>(generators.size(), queue);
int generatorIndex = 0;
for (Generator<Task> g : generators) {
Properties generatorProps = (Properties) props.clone();
generatorProps.setProperty("generatorIndex", Integer.toString(generatorIndex++));
g.init(generatorProps, metrics.registry);
SchedulerThread<Task> thread = new SchedulerThread<>(
queue,
g,
scheduler,
canSchedule,
loadThreadsFinalizer,
metrics
);
thread.setName("GeneratorThread-" + generatorIndex);
schedulers.add(thread);
}
}
@Override
public void run() {
metrics.markStart();
loadThreads.forEach(Thread::start);
schedulers.forEach(Thread::start);
ScheduledExecutorService executorService = null;
if (maxDuration > 0) {
executorService = new ScheduledThreadPoolExecutor(1);
executorService.schedule(
this::interrupt,
maxDuration,
TimeUnit.SECONDS
);
executorService.shutdown();
}
waitForFinish(schedulers);
LOGGER.trace("All scheduler exited");
waitForFinish(loadThreads);
LOGGER.trace("All loader exited");
if (executorService != null) {
executorService.shutdownNow();
}
metrics.markEnd();
}
@Override
public void interrupt() {
LOGGER.trace("About to interrupt schedulers");
schedulers.forEach(Thread::interrupt);
LOGGER.trace("Waiting schedulers to exit");
waitForFinish(schedulers);
LOGGER.trace("All schedulers exited - clear queue");
queue.clear();
ScheduledTask.addFinalizer(queue);
}
}
}
|
|
/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.jvm.java.intellij;
import static com.facebook.buck.testutil.MoreAsserts.assertListEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertThat;
import com.facebook.buck.android.AndroidBinary;
import com.facebook.buck.android.AndroidBinaryBuilder;
import com.facebook.buck.android.AndroidLibraryBuilder;
import com.facebook.buck.android.AndroidResourceRuleBuilder;
import com.facebook.buck.android.NdkLibrary;
import com.facebook.buck.android.NdkLibraryBuilder;
import com.facebook.buck.cli.FakeBuckConfig;
import com.facebook.buck.io.MorePaths;
import com.facebook.buck.io.PathOrGlobMatcher;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.jvm.core.JavaPackageFinder;
import com.facebook.buck.jvm.java.FakeJavaPackageFinder;
import com.facebook.buck.jvm.java.JavaLibraryBuilder;
import com.facebook.buck.jvm.java.JavaTestBuilder;
import com.facebook.buck.jvm.java.KeystoreBuilder;
import com.facebook.buck.jvm.java.PrebuiltJarBuilder;
import com.facebook.buck.jvm.java.intellij.SerializableModule.SourceFolder;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.model.InMemoryBuildFileTree;
import com.facebook.buck.model.Pair;
import com.facebook.buck.parser.NoSuchBuildTargetException;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.DefaultTargetNodeToBuildRuleTransformer;
import com.facebook.buck.rules.FakeBuildRule;
import com.facebook.buck.rules.FakeSourcePath;
import com.facebook.buck.rules.ProjectConfig;
import com.facebook.buck.rules.ProjectConfigBuilder;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.step.ExecutionContext;
import com.facebook.buck.step.TestExecutionContext;
import com.facebook.buck.testutil.BuckTestConstant;
import com.facebook.buck.testutil.FakeProjectFilesystem;
import com.facebook.buck.util.ObjectMappers;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import org.easymock.EasyMock;
import org.hamcrest.Matchers;
import org.junit.Test;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import javax.annotation.Nullable;
public class ProjectTest {
private static final Path PATH_TO_GUAVA_JAR = Paths.get("third_party/guava/guava-10.0.1.jar");
@SuppressWarnings("PMD.UnusedPrivateField")
private BuildRule guava;
/**
* Creates an ActionGraph with two android_binary rules, each of which depends on the same
* android_library. The difference between the two is that one lists Guava in its no_dx list and
* the other does not.
* <p>
* The ActionGraph also includes three project_config rules: one for the android_library, and one
* for each of the android_binary rules.
*/
public Pair<ProjectWithModules, BuildRuleResolver> createActionGraphForTesting(
@Nullable JavaPackageFinder javaPackageFinder) throws Exception {
BuildRuleResolver ruleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
// prebuilt_jar //third_party/guava:guava
guava = PrebuiltJarBuilder
.createBuilder(BuildTargetFactory.newInstance("//third_party/guava:guava"))
.setBinaryJar(PATH_TO_GUAVA_JAR)
.build(ruleResolver);
// android_resouce android_res/base:res
BuildRule androidResRule = ruleResolver.addToIndex(
AndroidResourceRuleBuilder.newBuilder()
.setResolver(new SourcePathResolver(ruleResolver))
.setBuildTarget(BuildTargetFactory.newInstance("//android_res/base:res"))
.setRes(new FakeSourcePath("android_res/base/res"))
.setRDotJavaPackage("com.facebook")
.build());
// project_config android_res/base:res
ProjectConfig projectConfigForResource = (ProjectConfig) ProjectConfigBuilder
.createBuilder(
BuildTargetFactory.newInstance("//android_res/base:project_config"))
.setSrcRule(androidResRule.getBuildTarget())
.setSrcRoots(ImmutableList.of("res"))
.build(ruleResolver);
// java_library //java/src/com/facebook/grandchild:grandchild
BuildTarget grandchildTarget =
BuildTargetFactory.newInstance("//java/src/com/facebook/grandchild:grandchild");
BuildRule grandchild = JavaLibraryBuilder
.createBuilder(grandchildTarget)
.addSrc(Paths.get("Grandchild.java"))
.build(ruleResolver);
// java_library //java/src/com/facebook/child:child
BuildRule childRule = JavaLibraryBuilder
.createBuilder(BuildTargetFactory.newInstance("//java/src/com/facebook/child:child"))
.addSrc(Paths.get("Child.java"))
.addDep(grandchild.getBuildTarget())
.build(ruleResolver);
// java_library //java/src/com/facebook/exportlib:exportlib
BuildRule exportLib = JavaLibraryBuilder
.createBuilder(
BuildTargetFactory.newInstance("//java/src/com/facebook/exportlib:exportlib"))
.addSrc(Paths.get("ExportLib.java"))
.addDep(guava.getBuildTarget())
.addExportedDep(guava.getBuildTarget())
.build(ruleResolver);
// android_library //java/src/com/facebook/base:base
BuildRule baseRule = AndroidLibraryBuilder
.createBuilder(BuildTargetFactory.newInstance("//java/src/com/facebook/base:base"))
.addSrc(Paths.get("Base.java"))
.addDep(exportLib.getBuildTarget())
.addDep(childRule.getBuildTarget())
.addDep(androidResRule.getBuildTarget())
.build(ruleResolver);
// project_config //java/src/com/facebook/base:project_config
ProjectConfig projectConfigForLibrary = (ProjectConfig) ProjectConfigBuilder
.createBuilder(
BuildTargetFactory.newInstance(
"//java/src/com/facebook/base:project_config"))
.setSrcRule(baseRule.getBuildTarget())
.setSrcRoots(ImmutableList.of("src", "src-gen"))
.build(ruleResolver);
ProjectConfig projectConfigForExportLibrary = (ProjectConfig) ProjectConfigBuilder
.createBuilder(
BuildTargetFactory.newInstance("//java/src/com/facebook/exportlib:project_config"))
.setSrcRule(exportLib.getBuildTarget())
.setSrcRoots(ImmutableList.of("src")).build(ruleResolver);
// keystore //keystore:debug
BuildTarget keystoreTarget = BuildTargetFactory.newInstance("//keystore:debug");
BuildRule keystore = KeystoreBuilder.createBuilder(keystoreTarget)
.setStore(new FakeSourcePath("keystore/debug.keystore"))
.setProperties(new FakeSourcePath("keystore/debug.keystore.properties"))
.build(ruleResolver);
// android_binary //foo:app
ImmutableSortedSet<BuildTarget> androidBinaryRuleDepsTarget =
ImmutableSortedSet.of(baseRule.getBuildTarget());
AndroidBinary androidBinaryRule = (AndroidBinary) AndroidBinaryBuilder.createBuilder(
BuildTargetFactory.newInstance("//foo:app"))
.setOriginalDeps(androidBinaryRuleDepsTarget)
.setManifest(new FakeSourcePath("foo/AndroidManifest.xml"))
.setKeystore(keystore.getBuildTarget())
.setBuildTargetsToExcludeFromDex(
ImmutableSet.of(
BuildTargetFactory.newInstance("//third_party/guava:guava")))
.build(ruleResolver);
// project_config //foo:project_config
ProjectConfig projectConfigUsingNoDx = (ProjectConfig) ProjectConfigBuilder
.createBuilder(BuildTargetFactory.newInstance("//foo:project_config"))
.setSrcRule(androidBinaryRule.getBuildTarget())
.build(ruleResolver);
// android_binary //bar:app
ImmutableSortedSet<BuildTarget> barAppBuildRuleDepsTarget =
ImmutableSortedSet.of(baseRule.getBuildTarget());
AndroidBinary barAppBuildRule = (AndroidBinary) AndroidBinaryBuilder.createBuilder(
BuildTargetFactory.newInstance("//bar:app"))
.setOriginalDeps(barAppBuildRuleDepsTarget)
.setManifest(new FakeSourcePath("foo/AndroidManifest.xml"))
.setKeystore(keystore.getBuildTarget())
.build(ruleResolver);
// project_config //bar:project_config
ProjectConfig projectConfig = (ProjectConfig) ProjectConfigBuilder
.createBuilder(BuildTargetFactory.newInstance("//bar:project_config"))
.setSrcRule(barAppBuildRule.getBuildTarget())
.build(ruleResolver);
return new Pair<>(getModulesForActionGraph(
ruleResolver,
ImmutableSortedSet.of(
projectConfigForExportLibrary,
projectConfigForLibrary,
projectConfigForResource,
projectConfigUsingNoDx,
projectConfig),
javaPackageFinder,
null /* intellijConfig */),
ruleResolver);
}
@Test
public void testGenerateRelativeGenPath() {
ProjectFilesystem filesystem = new FakeProjectFilesystem();
Path basePathOfModule = Paths.get("android_res/com/facebook/gifts/");
Path expectedRelativePathToGen =
Paths.get("../../../../buck-out/android/android_res/com/facebook/gifts/gen");
assertEquals(
expectedRelativePathToGen,
Project.generateRelativeGenPath(filesystem, basePathOfModule));
}
/**
* This is an important test that verifies that the {@code no_dx} argument for an
* {@code android_binary} is handled appropriately when generating an IntelliJ project.
*/
@Test
public void testProject() throws Exception {
JavaPackageFinder javaPackageFinder = EasyMock.createMock(JavaPackageFinder.class);
EasyMock
.expect(javaPackageFinder.findJavaPackage(Paths.get("foo/module_foo.iml")))
.andReturn("");
EasyMock
.expect(javaPackageFinder.findJavaPackage(Paths.get("bar/module_bar.iml")))
.andReturn("");
EasyMock.replay(javaPackageFinder);
Pair<ProjectWithModules, BuildRuleResolver> projectWithModules =
createActionGraphForTesting(javaPackageFinder);
Project project = projectWithModules.getFirst().project;
BuildRuleResolver resolver = projectWithModules.getSecond();
List<SerializableModule> modules = projectWithModules.getFirst().modules;
assertEquals("Should be one module for the java_library, one for the android_library, " +
"one module for the android_resource, and one for each android_binary",
5,
modules.size());
// Check the values of the module that corresponds to the android_library.
SerializableModule javaLibraryModule = modules.get(4);
assertSame(
getRuleByBuildTarget("//java/src/com/facebook/exportlib:exportlib", resolver),
javaLibraryModule.srcRule);
assertEquals("module_java_src_com_facebook_exportlib", javaLibraryModule.name);
assertEquals(
Paths.get("java/src/com/facebook/exportlib/module_java_src_com_facebook_exportlib.iml"),
javaLibraryModule.pathToImlFile);
assertListEquals(
ImmutableList.of(SerializableModule.SourceFolder.SRC),
javaLibraryModule.sourceFolders);
// Check the dependencies.
SerializableDependentModule inheritedJdk = SerializableDependentModule.newInheritedJdk();
SerializableDependentModule guavaAsProvidedDep = SerializableDependentModule.newLibrary(
guava.getBuildTarget(), "buck_out_gen_third_party_guava___guava___guava_10_0_1_jar");
guavaAsProvidedDep.scope = "PROVIDED";
assertListEquals(
ImmutableList.of(
SerializableDependentModule.newSourceFolder(),
guavaAsProvidedDep,
SerializableDependentModule.newStandardJdk(
Optional.<String>absent(),
Optional.<String>absent())),
javaLibraryModule.getDependencies());
// Check the values of the module that corresponds to the android_library.
SerializableModule androidLibraryModule = modules.get(3);
assertSame(
getRuleByBuildTarget("//java/src/com/facebook/base:base", resolver),
androidLibraryModule.srcRule);
assertEquals("module_java_src_com_facebook_base", androidLibraryModule.name);
assertEquals(
Paths.get("java/src/com/facebook/base/module_java_src_com_facebook_base.iml"),
androidLibraryModule.pathToImlFile);
assertListEquals(
ImmutableList.of(
SerializableModule.SourceFolder.SRC,
new SourceFolder("file://$MODULE_DIR$/src-gen", false /* isTestSource */),
SerializableModule.SourceFolder.GEN),
androidLibraryModule.sourceFolders);
assertEquals(Boolean.TRUE, androidLibraryModule.hasAndroidFacet);
assertEquals(Boolean.TRUE, androidLibraryModule.isAndroidLibraryProject);
assertEquals(null, androidLibraryModule.proguardConfigPath);
assertEquals(null, androidLibraryModule.resFolder);
// Check the dependencies.
SerializableDependentModule androidResourceAsProvidedDep =
SerializableDependentModule.newModule(
BuildTargetFactory.newInstance("//android_res/base:res"),
"module_android_res_base");
SerializableDependentModule childAsProvidedDep = SerializableDependentModule.newModule(
BuildTargetFactory.newInstance("//java/src/com/facebook/child:child"),
"module_java_src_com_facebook_child");
SerializableDependentModule exportDepsAsProvidedDep = SerializableDependentModule.newModule(
BuildTargetFactory.newInstance("//java/src/com/facebook/exportlib:exportlib"),
"module_java_src_com_facebook_exportlib");
assertListEquals(
ImmutableList.of(
SerializableDependentModule.newSourceFolder(),
guavaAsProvidedDep,
androidResourceAsProvidedDep,
childAsProvidedDep,
exportDepsAsProvidedDep,
inheritedJdk),
androidLibraryModule.getDependencies());
// Check the values of the module that corresponds to the android_binary that uses no_dx.
SerializableModule androidResourceModule = modules.get(0);
assertSame(
getRuleByBuildTarget("//android_res/base:res", resolver),
androidResourceModule.srcRule);
assertEquals(Paths.get("res"), androidResourceModule.resFolder);
// Check the values of the module that corresponds to the android_binary that uses no_dx.
SerializableModule androidBinaryModuleNoDx = modules.get(2);
assertSame(getRuleByBuildTarget("//foo:app", resolver), androidBinaryModuleNoDx.srcRule);
assertEquals("module_foo", androidBinaryModuleNoDx.name);
assertEquals(Paths.get("foo/module_foo.iml"), androidBinaryModuleNoDx.pathToImlFile);
assertListEquals(
ImmutableList.of(SerializableModule.SourceFolder.GEN),
androidBinaryModuleNoDx.sourceFolders);
assertEquals(Boolean.TRUE, androidBinaryModuleNoDx.hasAndroidFacet);
assertEquals(Boolean.FALSE, androidBinaryModuleNoDx.isAndroidLibraryProject);
assertEquals(null, androidBinaryModuleNoDx.proguardConfigPath);
assertEquals(null, androidBinaryModuleNoDx.resFolder);
assertEquals(Paths.get("../keystore/debug.keystore"), androidBinaryModuleNoDx.keystorePath);
// Check the moduleDependencies.
SerializableDependentModule grandchildAsProvidedDep = SerializableDependentModule.newModule(
BuildTargetFactory.newInstance("//java/src/com/facebook/grandchild:grandchild"),
"module_java_src_com_facebook_grandchild");
SerializableDependentModule androidLibraryDep = SerializableDependentModule.newModule(
androidLibraryModule.srcRule.getBuildTarget(), "module_java_src_com_facebook_base");
assertEquals(
ImmutableList.of(
SerializableDependentModule.newSourceFolder(),
guavaAsProvidedDep,
androidLibraryDep,
androidResourceAsProvidedDep,
childAsProvidedDep,
exportDepsAsProvidedDep,
grandchildAsProvidedDep,
inheritedJdk),
androidBinaryModuleNoDx.getDependencies());
// Check the values of the module that corresponds to the android_binary with an empty no_dx.
SerializableModule androidBinaryModuleEmptyNoDx = modules.get(1);
assertSame(getRuleByBuildTarget("//bar:app", resolver), androidBinaryModuleEmptyNoDx.srcRule);
assertEquals("module_bar", androidBinaryModuleEmptyNoDx.name);
assertEquals(Paths.get("bar/module_bar.iml"), androidBinaryModuleEmptyNoDx.pathToImlFile);
assertListEquals(
ImmutableList.of(SerializableModule.SourceFolder.GEN),
androidBinaryModuleEmptyNoDx.sourceFolders);
assertEquals(Boolean.TRUE, androidBinaryModuleEmptyNoDx.hasAndroidFacet);
assertEquals(Boolean.FALSE, androidBinaryModuleEmptyNoDx.isAndroidLibraryProject);
assertEquals(null, androidBinaryModuleEmptyNoDx.proguardConfigPath);
assertEquals(null, androidBinaryModuleEmptyNoDx.resFolder);
assertEquals(
Paths.get("../keystore/debug.keystore"),
androidBinaryModuleEmptyNoDx.keystorePath);
// Check the moduleDependencies.
SerializableDependentModule guavaAsCompiledDep = SerializableDependentModule.newLibrary(
guava.getBuildTarget(), "buck_out_gen_third_party_guava___guava___guava_10_0_1_jar");
assertEquals("Important that Guava is listed as a 'COMPILED' dependency here because it is " +
"only listed as a 'PROVIDED' dependency earlier.",
ImmutableList.of(
SerializableDependentModule.newSourceFolder(),
guavaAsCompiledDep,
androidLibraryDep,
androidResourceAsProvidedDep,
childAsProvidedDep,
exportDepsAsProvidedDep,
grandchildAsProvidedDep,
inheritedJdk),
androidBinaryModuleEmptyNoDx.getDependencies());
// Check that the correct data was extracted to populate the .idea/libraries directory.
BuildRule guava = getRuleByBuildTarget("//third_party/guava:guava", resolver);
assertSame(guava, Iterables.getOnlyElement(project.getLibraryJars()));
}
@Test
public void testPrebuiltJarIncludesDeps() throws Exception {
BuildRuleResolver ruleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
// Build up a the graph that corresponds to:
//
// android_library(
// name = 'example',
// deps = [
// ':easymock',
// ],
// )
//
// prebuilt_jar(
// name = 'easymock',
// binary_jar = 'easymock.jar',
// deps = [
// ':cglib',
// ':objenesis',
// ],
// )
//
// prebuilt_jar(
// name = 'cglib',
// binary_jar = 'cglib.jar',
// )
//
// prebuilt_jar(
// name = 'objenesis',
// binary_jar = 'objenesis.jar',
// )
//
// project_config(
// src_target = ':example',
// )
BuildRule cglib = PrebuiltJarBuilder
.createBuilder(BuildTargetFactory.newInstance("//third_party/java/easymock:cglib"))
.setBinaryJar(Paths.get("third_party/java/easymock/cglib.jar"))
.build(ruleResolver);
BuildRule objenesis = PrebuiltJarBuilder
.createBuilder(BuildTargetFactory.newInstance("//third_party/java/easymock:objenesis"))
.setBinaryJar(Paths.get("third_party/java/easymock/objenesis.jar"))
.build(ruleResolver);
BuildRule easymock = PrebuiltJarBuilder
.createBuilder(BuildTargetFactory.newInstance("//third_party/java/easymock:easymock"))
.setBinaryJar(Paths.get("third_party/java/easymock/easymock.jar"))
.addDep(cglib.getBuildTarget())
.addDep(objenesis.getBuildTarget())
.build(ruleResolver);
BuildTarget easyMockExampleTarget = BuildTargetFactory.newInstance(
"//third_party/java/easymock:example");
BuildRule mockRule = AndroidLibraryBuilder.createBuilder(easyMockExampleTarget)
.addDep(easymock.getBuildTarget())
.build(ruleResolver);
ProjectConfig projectConfig = (ProjectConfig) ProjectConfigBuilder
.createBuilder(
BuildTargetFactory.newInstance("//third_party/java/easymock:project_config"))
.setSrcRule(mockRule.getBuildTarget())
.build(ruleResolver);
ProjectWithModules projectWithModules = getModulesForActionGraph(
ruleResolver,
ImmutableSortedSet.of(projectConfig),
null /* javaPackageFinder */,
null /* intellijConfig */);
List<SerializableModule> modules = projectWithModules.modules;
// Verify that the single Module that is created transitively includes all JAR files.
assertEquals("Should be one module for the android_library", 1, modules.size());
SerializableModule androidLibraryModule = Iterables.getOnlyElement(modules);
assertThat(
androidLibraryModule.getDependencies(),
Matchers.containsInAnyOrder(
SerializableDependentModule.newSourceFolder(),
SerializableDependentModule.newLibrary(
easymock.getBuildTarget(),
"buck_out_gen_third_party_java_easymock___easymock___easymock_jar"),
SerializableDependentModule.newLibrary(
cglib.getBuildTarget(),
"buck_out_gen_third_party_java_easymock___cglib___cglib_jar"),
SerializableDependentModule.newLibrary(
objenesis.getBuildTarget(),
"buck_out_gen_third_party_java_easymock___objenesis___objenesis_jar"),
SerializableDependentModule.newInheritedJdk()));
}
@Test
public void testIfModuleIsBothTestAndCompileDepThenTreatAsCompileDep() throws Exception {
BuildRuleResolver ruleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
// Create a java_library() and a java_test() that both depend on Guava.
// When they are part of the same project_config() rule, then the resulting module should
// include Guava as scope="COMPILE" in IntelliJ.
BuildRule guava = PrebuiltJarBuilder
.createBuilder(BuildTargetFactory.newInstance("//third_party/java/guava:guava"))
.setBinaryJar(Paths.get("third_party/java/guava.jar"))
.build(ruleResolver);
BuildRule baseBuildRule = JavaLibraryBuilder
.createBuilder(BuildTargetFactory.newInstance("//java/com/example/base:base"))
.addDep(guava.getBuildTarget())
.build(ruleResolver);
BuildRule testBuildRule = JavaTestBuilder
.createBuilder(BuildTargetFactory.newInstance("//java/com/example/base:tests"))
.addDep(guava.getBuildTarget())
.build(ruleResolver);
ProjectConfig projectConfig = (ProjectConfig) ProjectConfigBuilder
.createBuilder(
BuildTargetFactory.newInstance("//java/com/example/base:project_config"))
.setSrcRule(baseBuildRule.getBuildTarget())
.setTestRule(testBuildRule.getBuildTarget())
.setTestRoots(ImmutableList.of("tests"))
.build(ruleResolver);
ProjectWithModules projectWithModules = getModulesForActionGraph(
ruleResolver,
ImmutableSortedSet.of(projectConfig),
null /* javaPackageFinder */,
null /*intellijConfig */);
List<SerializableModule> modules = projectWithModules.modules;
assertEquals(1, modules.size());
SerializableModule comExampleBaseModule = Iterables.getOnlyElement(modules);
assertListEquals(
ImmutableList.of(
SerializableDependentModule.newSourceFolder(),
SerializableDependentModule.newLibrary(
guava.getBuildTarget(),
"buck_out_gen_third_party_java_guava___guava___guava_jar"),
SerializableDependentModule.newStandardJdk(
Optional.<String>absent(),
Optional.<String>absent())),
comExampleBaseModule.getDependencies());
}
/**
* In the context of Robolectric, httpcore-4.0.1.jar needs to be loaded before the android.jar
* associated with the Android SDK. Both httpcore-4.0.1.jar and android.jar define
* org.apache.http.params.BasicHttpParams; however, only httpcore-4.0.1.jar contains a real
* implementation of BasicHttpParams whereas android.jar contains a stub implementation of
* BasicHttpParams.
* <p>
* One way to fix this problem would be to "tag" httpcore-4.0.1.jar to indicate that it must
* appear before the Android SDK (or anything that transitively depends on the Android SDK) when
* listing dependencies for IntelliJ. This would be a giant kludge to the prebuilt_jar rule, so
* instead we just list jars before modules within an <orderEntry scope="TEST"/> or an
* <orderEntry scope="COMPILE"/> group.
*/
@Test
public void testThatJarsAreListedBeforeModules() throws Exception {
BuildRuleResolver ruleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
BuildRule supportV4 = JavaLibraryBuilder
.createBuilder(BuildTargetFactory.newInstance("//java/com/android/support/v4:v4"))
.build(ruleResolver);
BuildRule httpCore = PrebuiltJarBuilder
.createBuilder(BuildTargetFactory.newInstance("//third_party/java/httpcore:httpcore"))
.setBinaryJar(Paths.get("httpcore-4.0.1.jar"))
.build(ruleResolver);
// The support-v4 library is loaded as a java_library() rather than a prebuilt_jar() because it
// contains our local changes to the library.
BuildTarget robolectricTarget =
BuildTargetFactory.newInstance("//third_party/java/robolectric:robolectric");
BuildRule robolectricRule = JavaLibraryBuilder
.createBuilder(robolectricTarget)
.addDep(supportV4.getBuildTarget())
.addDep(httpCore.getBuildTarget())
.build(ruleResolver);
ProjectConfig projectConfig = (ProjectConfig) ProjectConfigBuilder
.createBuilder(
BuildTargetFactory.newInstance("//third_party/java/robolectric:project_config"))
.setSrcRule(robolectricRule.getBuildTarget())
.setSrcRoots(ImmutableList.of("src/main/java"))
.build(ruleResolver);
ProjectWithModules projectWithModules = getModulesForActionGraph(
ruleResolver,
ImmutableSortedSet.of(projectConfig),
null /* javaPackageFinder */,
null /* intellijConfig */);
List<SerializableModule> modules = projectWithModules.modules;
assertEquals("Should be one module for the android_library", 1, modules.size());
SerializableModule robolectricModule = Iterables.getOnlyElement(modules);
assertListEquals(
"It is imperative that httpcore-4.0.1.jar be listed before the support v4 library, " +
"or else when robolectric is listed as a dependency, " +
"org.apache.http.params.BasicHttpParams will be loaded from android.jar instead of " +
"httpcore-4.0.1.jar.",
ImmutableList.of(
SerializableDependentModule.newSourceFolder(),
SerializableDependentModule.newLibrary(
httpCore.getBuildTarget(),
"buck_out_gen_third_party_java_httpcore___httpcore___httpcore_4_0_1_jar"),
SerializableDependentModule.newModule(
supportV4.getBuildTarget(), "module_java_com_android_support_v4"),
SerializableDependentModule.newStandardJdk(
Optional.<String>absent(),
Optional.<String>absent())),
robolectricModule.getDependencies());
}
@Test
public void testCreatePathToProjectDotPropertiesFileForModule() {
SerializableModule rootModule = new SerializableModule(null /* buildRule */,
BuildTargetFactory.newInstance("//:project_config"));
rootModule.pathToImlFile = Paths.get("fb4a.iml");
assertEquals("project.properties", Project.createPathToProjectDotPropertiesFileFor(rootModule));
SerializableModule someModule = new SerializableModule(null /* buildRule */,
BuildTargetFactory.newInstance("//java/com/example/base:project_config"));
someModule.pathToImlFile = Paths.get("java/com/example/base/base.iml");
assertEquals("java/com/example/base/project.properties",
Project.createPathToProjectDotPropertiesFileFor(someModule));
}
/**
* A project_config()'s src_roots argument can be {@code None}, {@code []}, or a non-empty array.
* Each of these should be treated differently.
*/
@Test
public void testSrcRoots() throws Exception {
// Create a project_config() with src_roots=None.
BuildRuleResolver ruleResolver1 =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
BuildRule resBuildRule = ruleResolver1.addToIndex(
AndroidResourceRuleBuilder.newBuilder()
.setResolver(new SourcePathResolver(ruleResolver1))
.setBuildTarget(BuildTargetFactory.newInstance("//resources/com/example:res"))
.build());
ProjectConfig projectConfigNullSrcRoots = (ProjectConfig) ProjectConfigBuilder
.createBuilder(
BuildTargetFactory.newInstance("//resources/com/example:project_config"))
.setSrcRule(resBuildRule.getBuildTarget())
.setSrcRoots(null)
.build(ruleResolver1);
ProjectWithModules projectWithModules1 = getModulesForActionGraph(
ruleResolver1,
ImmutableSortedSet.of(projectConfigNullSrcRoots),
null /* javaPackageFinder */,
null /* intellijConfig */);
// Verify that the correct source folders are created.
assertEquals(1, projectWithModules1.modules.size());
SerializableModule moduleNoJavaSource = projectWithModules1.modules.get(0);
assertListEquals(
"Only source tmp should be gen/ when setSrcRoots(null) is specified.",
ImmutableList.of(SerializableModule.SourceFolder.GEN),
moduleNoJavaSource.sourceFolders);
// Create a project_config() with src_roots=[].
BuildRuleResolver ruleResolver2 =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
BuildRule baseBuildRule = AndroidLibraryBuilder
.createBuilder(BuildTargetFactory.newInstance("//java/com/example/base:base"))
.build(ruleResolver2);
ProjectConfig inPackageProjectConfig = (ProjectConfig) ProjectConfigBuilder
.createBuilder(
BuildTargetFactory.newInstance("//java/com/example/base:project_config"))
.setSrcRule(baseBuildRule.getBuildTarget())
.setSrcRoots(ImmutableList.<String>of())
.build(ruleResolver2);
// Verify that the correct source folders are created.
JavaPackageFinder javaPackageFinder = EasyMock.createMock(JavaPackageFinder.class);
EasyMock
.expect(
javaPackageFinder.findJavaPackage(
Paths.get("java/com/example/base/module_java_com_example_base.iml")))
.andReturn("com.example.base");
EasyMock.replay(javaPackageFinder);
ProjectWithModules projectWithModules2 = getModulesForActionGraph(
ruleResolver2,
ImmutableSortedSet.of(inPackageProjectConfig),
javaPackageFinder,
null /* intellijConfig */);
EasyMock.verify(javaPackageFinder);
assertEquals(1, projectWithModules2.modules.size());
SerializableModule moduleWithPackagePrefix = projectWithModules2.modules.get(0);
assertListEquals(
"The current directory should be a source tmp with a package prefix " +
"as well as the gen/ directory.",
ImmutableList.of(
new SourceFolder("file://$MODULE_DIR$", false /* isTestSource */, "com.example.base"),
SerializableModule.SourceFolder.GEN),
moduleWithPackagePrefix.sourceFolders);
// Create a project_config() with src_roots=['src'].
BuildRuleResolver ruleResolver3 =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
BuildRule baseBuildRule3 = AndroidLibraryBuilder
.createBuilder(BuildTargetFactory.newInstance("//java/com/example/base:base"))
.build(ruleResolver3);
ProjectConfig hasSrcFolderProjectConfig = (ProjectConfig) ProjectConfigBuilder
.createBuilder(
BuildTargetFactory.newInstance("//java/com/example/base:project_config"))
.setSrcRule(baseBuildRule3.getBuildTarget())
.setSrcRoots(ImmutableList.of("src"))
.build(ruleResolver3);
ProjectWithModules projectWithModules3 = getModulesForActionGraph(
ruleResolver3,
ImmutableSortedSet.of(hasSrcFolderProjectConfig),
null /* javaPackageFinder */,
null /* intellijConfig */);
// Verify that the correct source folders are created.
assertEquals(1, projectWithModules3.modules.size());
SerializableModule moduleHasSrcFolder = projectWithModules3.modules.get(0);
assertListEquals(
"Both src/ and gen/ should be source folders.",
ImmutableList.of(
new SourceFolder("file://$MODULE_DIR$/src", false /* isTestSource */),
SerializableModule.SourceFolder.GEN),
moduleHasSrcFolder.sourceFolders);
}
@Test
public void testIntellijJdkConfig() throws Exception {
IntellijConfig intellijConfig = new IntellijConfig(
FakeBuckConfig.builder().setSections(
ImmutableMap.of("intellij", ImmutableMap.of("jdk_name", "1.8")))
.build()
);
BuildRuleResolver ruleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
BuildRule baseBuildRule = JavaLibraryBuilder
.createBuilder(BuildTargetFactory.newInstance("//java/com/example/base:base"))
.build(ruleResolver);
ProjectConfig packageProjectConfig = (ProjectConfig) ProjectConfigBuilder
.createBuilder(
BuildTargetFactory.newInstance("//java/com/example/base:project_config"))
.setSrcRule(baseBuildRule.getBuildTarget())
.setSrcRoots(ImmutableList.<String>of())
.build(ruleResolver);
ProjectWithModules projectWithJdkOverride = getModulesForActionGraph(
ruleResolver,
ImmutableSortedSet.of(packageProjectConfig),
null /* javaPackageFinder */,
intellijConfig);
SerializableModule moduleWithJdkOverride = projectWithJdkOverride.modules.get(0);
assertListEquals(
ImmutableList.of(
SerializableDependentModule.newSourceFolder(),
SerializableDependentModule.newStandardJdk(Optional.of("1.8"), Optional.of("JavaSDK"))),
moduleWithJdkOverride.getDependencies());
ProjectWithModules projectWithDefaultJdk = getModulesForActionGraph(
ruleResolver,
ImmutableSortedSet.of(packageProjectConfig),
null /* javaPackageFinder */,
null /* intellijConfig */);
SerializableModule moduleWithDefaultJdk = projectWithDefaultJdk.modules.get(0);
assertListEquals(
ImmutableList.of(
SerializableDependentModule.newSourceFolder(),
SerializableDependentModule.newStandardJdk(Optional.of("1.7"), Optional.of("JavaSDK"))),
moduleWithDefaultJdk.getDependencies());
}
private static class ProjectWithModules {
private final Project project;
private final ImmutableList<SerializableModule> modules;
private ProjectWithModules(Project project, ImmutableList<SerializableModule> modules) {
this.project = project;
this.modules = modules;
}
}
private ProjectWithModules getModulesForActionGraph(
BuildRuleResolver ruleResolver,
ImmutableSortedSet<ProjectConfig> projectConfigs,
@Nullable JavaPackageFinder javaPackageFinder,
@Nullable IntellijConfig intellijConfig) throws IOException {
if (javaPackageFinder == null) {
javaPackageFinder = new FakeJavaPackageFinder();
}
if (intellijConfig == null) {
intellijConfig = new IntellijConfig(FakeBuckConfig.builder().build());
}
// Create the Project.
ExecutionContext executionContext = TestExecutionContext.newInstance();
ProjectFilesystem projectFilesystem = new FakeProjectFilesystem();
Properties keystoreProperties = new Properties();
keystoreProperties.put("key.alias", "androiddebugkey");
keystoreProperties.put("key.store.password", "android");
keystoreProperties.put("key.alias.password", "android");
try (OutputStream output =
projectFilesystem.newFileOutputStream(
Paths.get("keystore/debug.keystore.properties"))) {
keystoreProperties.store(output, "");
}
ImmutableMap<Path, String> basePathToAliasMap = ImmutableMap.of();
Project project = new Project(
new SourcePathResolver(ruleResolver),
projectConfigs,
basePathToAliasMap,
javaPackageFinder,
executionContext,
new InMemoryBuildFileTree(
Iterables.transform(
ruleResolver.getBuildRules(),
BuildTarget.TO_TARGET)),
projectFilesystem,
/* pathToDefaultAndroidManifest */ Optional.<String>absent(),
intellijConfig,
/* pathToPostProcessScript */ Optional.<String>absent(),
BuckTestConstant.PYTHON_INTERPRETER,
ObjectMappers.newDefaultInstance(),
true);
// Execute Project's business logic.
List<SerializableModule> modules = new ArrayList<>(project.createModulesForProjectConfigs());
return new ProjectWithModules(project, ImmutableList.copyOf(modules));
}
private static BuildRule getRuleByBuildTarget(String buildTarget, BuildRuleResolver resolver)
throws NoSuchBuildTargetException {
BuildRule rule = resolver.requireRule(BuildTargetFactory.newInstance(buildTarget));
Preconditions.checkNotNull(rule, "No rule for %s", buildTarget);
return rule;
}
@Test
public void testNdkLibraryHasCorrectPath() throws Exception {
BuildRuleResolver ruleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
// Build up a the graph that corresponds to:
//
// ndk_library(
// name = 'foo-jni'
// )
//
// project_config(
// src_target = ':foo-jni',
// )
ProjectFilesystem projectFilesystem = new FakeProjectFilesystem();
BuildTarget fooJni = BuildTargetFactory.newInstance("//third_party/java/foo/jni:foo-jni");
NdkLibrary ndkLibrary =
(NdkLibrary) new NdkLibraryBuilder(fooJni)
.build(ruleResolver, projectFilesystem);
ProjectConfig ndkProjectConfig = (ProjectConfig) ProjectConfigBuilder
.createBuilder(
BuildTargetFactory.newInstance(
"//third_party/java/foo/jni:project_config"))
.setSrcRule(ndkLibrary.getBuildTarget())
.build(ruleResolver);
ProjectWithModules projectWithModules = getModulesForActionGraph(
ruleResolver,
ImmutableSortedSet.of(ndkProjectConfig),
null /* javaPackageFinder */,
null /* intellijConfig */);
List<SerializableModule> modules = projectWithModules.modules;
assertEquals("Should be one module for the ndk_library.", 1, modules.size());
SerializableModule androidLibraryModule = Iterables.getOnlyElement(modules);
assertListEquals(ImmutableList.of(
SerializableDependentModule.newSourceFolder(),
SerializableDependentModule.newInheritedJdk()),
androidLibraryModule.getDependencies());
assertEquals(
Paths.get(String.format("../../../../%s", ndkLibrary.getLibraryPath())),
androidLibraryModule.nativeLibs);
}
@Test
public void testDoNotIgnoreAllOfBuckOut() {
SourcePathResolver resolver = new SourcePathResolver(
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer())
);
ProjectFilesystem projectFilesystem =
FakeProjectFilesystem.createJavaOnlyFilesystem("/opt/src/buck");
BuildTarget buildTarget = BuildTarget.builder(
projectFilesystem.getRootPath(),
"//",
"base").build();
BuildRule buildRule = new FakeBuildRule(buildTarget, resolver);
SerializableModule module = new SerializableModule(buildRule, buildTarget);
Project.addRootExcludes(module, buildRule, projectFilesystem);
ImmutableSortedSet.Builder<SourceFolder> expectedExcludeFolders =
ImmutableSortedSet.orderedBy(SerializableModule.ALPHABETIZER);
expectedExcludeFolders.add(
new SourceFolder("file://$MODULE_DIR$/buck-out/bin", /* isTestSource */ false));
expectedExcludeFolders.add(
new SourceFolder("file://$MODULE_DIR$/buck-out/log", /* isTestSource */ false));
expectedExcludeFolders.add(
new SourceFolder("file://$MODULE_DIR$/buck-out/tmp", /* isTestSource */ false));
for (Path ignorePath : FluentIterable.from(projectFilesystem.getIgnorePaths())
.filter(PathOrGlobMatcher.isPath())
.transform(PathOrGlobMatcher.toPath())) {
if (!ignorePath.equals(projectFilesystem.getBuckPaths().getBuckOut()) &&
!ignorePath.equals(projectFilesystem.getBuckPaths().getGenDir())) {
expectedExcludeFolders.add(
new SourceFolder("file://$MODULE_DIR$/" + MorePaths.pathWithUnixSeparators(ignorePath),
/* isTestSource */ false));
}
}
assertEquals(
"Specific subfolders of buck-out should be excluded rather than all of buck-out.",
expectedExcludeFolders.build(),
module.excludeFolders);
}
}
|
|
package io.swagger.codegen.languages;
import io.swagger.codegen.CliOption;
import io.swagger.codegen.CodegenConfig;
import io.swagger.codegen.CodegenConstants;
import io.swagger.codegen.CodegenModel;
import io.swagger.codegen.CodegenParameter;
import io.swagger.codegen.CodegenProperty;
import io.swagger.codegen.CodegenType;
import io.swagger.codegen.DefaultCodegen;
import io.swagger.codegen.SupportingFile;
import io.swagger.models.properties.*;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
public class PythonClientCodegen extends DefaultCodegen implements CodegenConfig {
protected String packageName;
protected String packageVersion;
protected String apiDocPath = "docs/";
protected String modelDocPath = "docs/";
protected Map<Character, String> regexModifiers;
private String testFolder;
public PythonClientCodegen() {
super();
// clear import mapping (from default generator) as python does not use it
// at the moment
importMapping.clear();
modelPackage = "models";
apiPackage = "api";
outputFolder = "generated-code" + File.separatorChar + "python";
modelTemplateFiles.put("model.mustache", ".py");
apiTemplateFiles.put("api.mustache", ".py");
modelTestTemplateFiles.put("model_test.mustache", ".py");
apiTestTemplateFiles.put("api_test.mustache", ".py");
embeddedTemplateDir = templateDir = "python";
modelDocTemplateFiles.put("model_doc.mustache", ".md");
apiDocTemplateFiles.put("api_doc.mustache", ".md");
testFolder = "test";
languageSpecificPrimitives.clear();
languageSpecificPrimitives.add("int");
languageSpecificPrimitives.add("float");
languageSpecificPrimitives.add("list");
languageSpecificPrimitives.add("bool");
languageSpecificPrimitives.add("str");
languageSpecificPrimitives.add("datetime");
languageSpecificPrimitives.add("date");
languageSpecificPrimitives.add("object");
typeMapping.clear();
typeMapping.put("integer", "int");
typeMapping.put("float", "float");
typeMapping.put("number", "float");
typeMapping.put("long", "int");
typeMapping.put("double", "float");
typeMapping.put("array", "list");
typeMapping.put("map", "dict");
typeMapping.put("boolean", "bool");
typeMapping.put("string", "str");
typeMapping.put("date", "date");
typeMapping.put("DateTime", "datetime");
typeMapping.put("object", "object");
typeMapping.put("file", "file");
// TODO binary should be mapped to byte array
// mapped to String as a workaround
typeMapping.put("binary", "str");
typeMapping.put("ByteArray", "str");
// map uuid to string for the time being
typeMapping.put("UUID", "str");
// from https://docs.python.org/release/2.5.4/ref/keywords.html
setReservedWordsLowerCase(
Arrays.asList(
// local variable name used in API methods (endpoints)
"all_params", "resource_path", "path_params", "query_params",
"header_params", "form_params", "local_var_files", "body_params", "auth_settings",
// @Property
"Property",
// python reserved words
"and", "del", "from", "not", "while", "as", "elif", "global", "or", "with",
"assert", "else", "if", "pass", "yield", "break", "except", "import",
"print", "class", "exec", "in", "raise", "continue", "finally", "is",
"return", "def", "for", "lambda", "try", "self"));
regexModifiers = new HashMap<Character, String>();
regexModifiers.put('i', "IGNORECASE");
regexModifiers.put('l', "LOCALE");
regexModifiers.put('m', "MULTILINE");
regexModifiers.put('s', "DOTALL");
regexModifiers.put('u', "UNICODE");
regexModifiers.put('x', "VERBOSE");
cliOptions.clear();
cliOptions.add(new CliOption(CodegenConstants.PACKAGE_NAME, "python package name (convention: snake_case).")
.defaultValue("swagger_client"));
cliOptions.add(new CliOption(CodegenConstants.PACKAGE_VERSION, "python package version.")
.defaultValue("1.0.0"));
cliOptions.add(CliOption.newBoolean(CodegenConstants.SORT_PARAMS_BY_REQUIRED_FLAG,
CodegenConstants.SORT_PARAMS_BY_REQUIRED_FLAG_DESC).defaultValue(Boolean.TRUE.toString()));
cliOptions.add(new CliOption(CodegenConstants.HIDE_GENERATION_TIMESTAMP, "hides the timestamp when files were generated")
.defaultValue(Boolean.TRUE.toString()));
}
@Override
public void processOpts() {
super.processOpts();
Boolean excludeTests = false;
if(additionalProperties.containsKey(CodegenConstants.EXCLUDE_TESTS)) {
excludeTests = Boolean.valueOf(additionalProperties.get(CodegenConstants.EXCLUDE_TESTS).toString());
}
if (additionalProperties.containsKey(CodegenConstants.PACKAGE_NAME)) {
setPackageName((String) additionalProperties.get(CodegenConstants.PACKAGE_NAME));
}
else {
setPackageName("swagger_client");
}
if (additionalProperties.containsKey(CodegenConstants.PACKAGE_VERSION)) {
setPackageVersion((String) additionalProperties.get(CodegenConstants.PACKAGE_VERSION));
}
else {
setPackageVersion("1.0.0");
}
// default HIDE_GENERATION_TIMESTAMP to true
if (!additionalProperties.containsKey(CodegenConstants.HIDE_GENERATION_TIMESTAMP)) {
additionalProperties.put(CodegenConstants.HIDE_GENERATION_TIMESTAMP, Boolean.TRUE.toString());
} else {
additionalProperties.put(CodegenConstants.HIDE_GENERATION_TIMESTAMP,
Boolean.valueOf(additionalProperties().get(CodegenConstants.HIDE_GENERATION_TIMESTAMP).toString()));
}
additionalProperties.put(CodegenConstants.PACKAGE_NAME, packageName);
additionalProperties.put(CodegenConstants.PACKAGE_VERSION, packageVersion);
// make api and model doc path available in mustache template
additionalProperties.put("apiDocPath", apiDocPath);
additionalProperties.put("modelDocPath", modelDocPath);
String swaggerFolder = packageName;
modelPackage = swaggerFolder + File.separatorChar + "models";
apiPackage = swaggerFolder + File.separatorChar + "apis";
supportingFiles.add(new SupportingFile("README.mustache", "", "README.md"));
supportingFiles.add(new SupportingFile("LICENSE", "", "LICENSE"));
supportingFiles.add(new SupportingFile("setup.mustache", "", "setup.py"));
supportingFiles.add(new SupportingFile("tox.mustache", "", "tox.ini"));
supportingFiles.add(new SupportingFile("test-requirements.mustache", "", "test-requirements.txt"));
supportingFiles.add(new SupportingFile("requirements.mustache", "", "requirements.txt"));
supportingFiles.add(new SupportingFile("api_client.mustache", swaggerFolder, "api_client.py"));
supportingFiles.add(new SupportingFile("rest.mustache", swaggerFolder, "rest.py"));
supportingFiles.add(new SupportingFile("configuration.mustache", swaggerFolder, "configuration.py"));
supportingFiles.add(new SupportingFile("__init__package.mustache", swaggerFolder, "__init__.py"));
supportingFiles.add(new SupportingFile("__init__model.mustache", modelPackage, "__init__.py"));
supportingFiles.add(new SupportingFile("__init__api.mustache", apiPackage, "__init__.py"));
if(Boolean.FALSE.equals(excludeTests)) {
supportingFiles.add(new SupportingFile("__init__test.mustache", testFolder, "__init__.py"));
}
supportingFiles.add(new SupportingFile("git_push.sh.mustache", "", "git_push.sh"));
supportingFiles.add(new SupportingFile("gitignore.mustache", "", ".gitignore"));
supportingFiles.add(new SupportingFile("travis.mustache", "", ".travis.yml"));
}
private static String dropDots(String str) {
return str.replaceAll("\\.", "_");
}
@Override
public void postProcessParameter(CodegenParameter parameter){
postProcessPattern(parameter.pattern, parameter.vendorExtensions);
}
@Override
public void postProcessModelProperty(CodegenModel model, CodegenProperty property) {
postProcessPattern(property.pattern, property.vendorExtensions);
}
/*
* The swagger pattern spec follows the Perl convention and style of modifiers. Python
* does not support this in as natural a way so it needs to convert it. See
* https://docs.python.org/2/howto/regex.html#compilation-flags for details.
*/
public void postProcessPattern(String pattern, Map<String, Object> vendorExtensions){
if(pattern != null) {
int i = pattern.lastIndexOf('/');
//Must follow Perl /pattern/modifiers convention
if(pattern.charAt(0) != '/' || i < 2) {
throw new IllegalArgumentException("Pattern must follow the Perl "
+ "/pattern/modifiers convention. "+pattern+" is not valid.");
}
String regex = pattern.substring(1, i).replace("'", "\\'");
List<String> modifiers = new ArrayList<String>();
for(char c : pattern.substring(i).toCharArray()) {
if(regexModifiers.containsKey(c)) {
String modifier = regexModifiers.get(c);
modifiers.add(modifier);
}
}
vendorExtensions.put("x-regex", regex);
vendorExtensions.put("x-modifiers", modifiers);
}
}
@Override
public CodegenType getTag() {
return CodegenType.CLIENT;
}
@Override
public String getName() {
return "python";
}
@Override
public String getHelp() {
return "Generates a Python client library.";
}
@Override
public String escapeReservedWord(String name) {
return "_" + name;
}
@Override
public String apiDocFileFolder() {
return (outputFolder + "/" + apiDocPath);
}
@Override
public String modelDocFileFolder() {
return (outputFolder + "/" + modelDocPath);
}
@Override
public String toModelDocFilename(String name) {
return toModelName(name);
}
@Override
public String toApiDocFilename(String name) {
return toApiName(name);
}
@Override
public String apiFileFolder() {
return outputFolder + File.separatorChar + apiPackage().replace('.', File.separatorChar);
}
@Override
public String modelFileFolder() {
return outputFolder + File.separatorChar + modelPackage().replace('.', File.separatorChar);
}
@Override
public String apiTestFileFolder() {
return outputFolder + File.separatorChar + testFolder;
}
@Override
public String modelTestFileFolder() {
return outputFolder + File.separatorChar + testFolder;
}
@Override
public String getTypeDeclaration(Property p) {
if (p instanceof ArrayProperty) {
ArrayProperty ap = (ArrayProperty) p;
Property inner = ap.getItems();
return getSwaggerType(p) + "[" + getTypeDeclaration(inner) + "]";
} else if (p instanceof MapProperty) {
MapProperty mp = (MapProperty) p;
Property inner = mp.getAdditionalProperties();
return getSwaggerType(p) + "(str, " + getTypeDeclaration(inner) + ")";
}
return super.getTypeDeclaration(p);
}
@Override
public String getSwaggerType(Property p) {
String swaggerType = super.getSwaggerType(p);
String type = null;
if (typeMapping.containsKey(swaggerType)) {
type = typeMapping.get(swaggerType);
if (languageSpecificPrimitives.contains(type)) {
return type;
}
} else {
type = toModelName(swaggerType);
}
return type;
}
@Override
public String toVarName(String name) {
// sanitize name
name = sanitizeName(name); // FIXME: a parameter should not be assigned. Also declare the methods parameters as 'final'.
// remove dollar sign
name = name.replaceAll("$", "");
// if it's all uppper case, convert to lower case
if (name.matches("^[A-Z_]*$")) {
name = name.toLowerCase();
}
// underscore the variable name
// petId => pet_id
name = underscore(name);
// remove leading underscore
name = name.replaceAll("^_*", "");
// for reserved word or word starting with number, append _
if (isReservedWord(name) || name.matches("^\\d.*")) {
name = escapeReservedWord(name);
}
return name;
}
@Override
public String toParamName(String name) {
// should be the same as variable name
return toVarName(name);
}
@Override
public String toModelName(String name) {
name = sanitizeName(name); // FIXME: a parameter should not be assigned. Also declare the methods parameters as 'final'.
// remove dollar sign
name = name.replaceAll("$", "");
// model name cannot use reserved keyword, e.g. return
if (isReservedWord(name)) {
LOGGER.warn(name + " (reserved word) cannot be used as model name. Renamed to " + camelize("model_" + name));
name = "model_" + name; // e.g. return => ModelReturn (after camelize)
}
// model name starts with number
if (name.matches("^\\d.*")) {
LOGGER.warn(name + " (model name starts with number) cannot be used as model name. Renamed to " + camelize("model_" + name));
name = "model_" + name; // e.g. 200Response => Model200Response (after camelize)
}
if (!StringUtils.isEmpty(modelNamePrefix)) {
name = modelNamePrefix + "_" + name;
}
if (!StringUtils.isEmpty(modelNameSuffix)) {
name = name + "_" + modelNameSuffix;
}
// camelize the model name
// phone_number => PhoneNumber
return camelize(name);
}
@Override
public String toModelFilename(String name) {
name = sanitizeName(name); // FIXME: a parameter should not be assigned. Also declare the methods parameters as 'final'.
// remove dollar sign
name = name.replaceAll("$", "");
// model name cannot use reserved keyword, e.g. return
if (isReservedWord(name)) {
LOGGER.warn(name + " (reserved word) cannot be used as model filename. Renamed to " + underscore(dropDots("model_" + name)));
name = "model_" + name; // e.g. return => ModelReturn (after camelize)
}
// model name starts with number
if (name.matches("^\\d.*")) {
LOGGER.warn(name + " (model name starts with number) cannot be used as model name. Renamed to " + underscore("model_" + name));
name = "model_" + name; // e.g. 200Response => Model200Response (after camelize)
}
if (!StringUtils.isEmpty(modelNamePrefix)) {
name = modelNamePrefix + "_" + name;
}
if (!StringUtils.isEmpty(modelNameSuffix)) {
name = name + "_" + modelNameSuffix;
}
// underscore the model file name
// PhoneNumber => phone_number
return underscore(dropDots(name));
}
@Override
public String toModelTestFilename(String name) {
return "test_" + toModelFilename(name);
};
@Override
public String toApiFilename(String name) {
// replace - with _ e.g. created-at => created_at
name = name.replaceAll("-", "_");
// e.g. PhoneNumberApi.rb => phone_number_api.rb
return underscore(name) + "_api";
}
@Override
public String toApiTestFilename(String name) {
return "test_" + toApiFilename(name);
}
@Override
public String toApiName(String name) {
if (name.length() == 0) {
return "DefaultApi";
}
// e.g. phone_number_api => PhoneNumberApi
return camelize(name) + "Api";
}
@Override
public String toApiVarName(String name) {
if (name.length() == 0) {
return "default_api";
}
return underscore(name) + "_api";
}
@Override
public String toOperationId(String operationId) {
// throw exception if method name is empty (should not occur as an auto-generated method name will be used)
if (StringUtils.isEmpty(operationId)) {
throw new RuntimeException("Empty method name (operationId) not allowed");
}
// method name cannot use reserved keyword, e.g. return
if (isReservedWord(operationId)) {
LOGGER.warn(operationId + " (reserved word) cannot be used as method name. Renamed to " + underscore(sanitizeName("call_" + operationId)));
operationId = "call_" + operationId;
}
return underscore(sanitizeName(operationId));
}
public void setPackageName(String packageName) {
this.packageName = packageName;
}
public void setPackageVersion(String packageVersion) {
this.packageVersion = packageVersion;
}
/**
* Generate Python package name from String `packageName`
*
* (PEP 0008) Python packages should also have short, all-lowercase names,
* although the use of underscores is discouraged.
*
* @param packageName Package name
* @return Python package name that conforms to PEP 0008
*/
@SuppressWarnings("static-method")
public String generatePackageName(String packageName) {
return underscore(packageName.replaceAll("[^\\w]+", ""));
}
/**
* Return the default value of the Property
*
* @param p Swagger Property object
* @return string presentation of the default value of the Property
*/
@Override
public String toDefaultValue(Property p) {
if (p instanceof StringProperty) {
StringProperty dp = (StringProperty) p;
if (dp.getDefault() != null) {
return "'" + dp.getDefault() + "'";
}
} else if (p instanceof BooleanProperty) {
BooleanProperty dp = (BooleanProperty) p;
if (dp.getDefault() != null) {
if (dp.getDefault().toString().equalsIgnoreCase("false"))
return "False";
else
return "True";
}
} else if (p instanceof DateProperty) {
// TODO
} else if (p instanceof DateTimeProperty) {
// TODO
} else if (p instanceof DoubleProperty) {
DoubleProperty dp = (DoubleProperty) p;
if (dp.getDefault() != null) {
return dp.getDefault().toString();
}
} else if (p instanceof FloatProperty) {
FloatProperty dp = (FloatProperty) p;
if (dp.getDefault() != null) {
return dp.getDefault().toString();
}
} else if (p instanceof IntegerProperty) {
IntegerProperty dp = (IntegerProperty) p;
if (dp.getDefault() != null) {
return dp.getDefault().toString();
}
} else if (p instanceof LongProperty) {
LongProperty dp = (LongProperty) p;
if (dp.getDefault() != null) {
return dp.getDefault().toString();
}
}
return null;
}
@Override
public void setParameterExampleValue(CodegenParameter p) {
String example;
if (p.defaultValue == null) {
example = p.example;
} else {
example = p.defaultValue;
}
String type = p.baseType;
if (type == null) {
type = p.dataType;
}
if ("String".equalsIgnoreCase(type) || "str".equalsIgnoreCase(type)) {
if (example == null) {
example = p.paramName + "_example";
}
example = "'" + escapeText(example) + "'";
} else if ("Integer".equals(type) || "int".equals(type)) {
if (example == null) {
example = "56";
}
} else if ("Float".equalsIgnoreCase(type) || "Double".equalsIgnoreCase(type)) {
if (example == null) {
example = "3.4";
}
} else if ("BOOLEAN".equalsIgnoreCase(type) || "bool".equalsIgnoreCase(type)) {
if (example == null) {
example = "True";
}
} else if ("file".equalsIgnoreCase(type)) {
if (example == null) {
example = "/path/to/file";
}
example = "'" + escapeText(example) + "'";
} else if ("Date".equalsIgnoreCase(type)) {
if (example == null) {
example = "2013-10-20";
}
example = "'" + escapeText(example) + "'";
} else if ("DateTime".equalsIgnoreCase(type)) {
if (example == null) {
example = "2013-10-20T19:20:30+01:00";
}
example = "'" + escapeText(example) + "'";
} else if (!languageSpecificPrimitives.contains(type)) {
// type is a model class, e.g. User
example = this.packageName + "." + type + "()";
} else {
LOGGER.warn("Type " + type + " not handled properly in setParameterExampleValue");
}
if (example == null) {
example = "NULL";
} else if (Boolean.TRUE.equals(p.isListContainer)) {
example = "[" + example + "]";
} else if (Boolean.TRUE.equals(p.isMapContainer)) {
example = "{'key': " + example + "}";
}
p.example = example;
}
@Override
public String escapeQuotationMark(String input) {
// remove ' to avoid code injection
return input.replace("'", "");
}
@Override
public String escapeUnsafeCharacters(String input) {
// remove multiline comment
return input.replace("'''", "'_'_'");
}
}
|
|
/*******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.trans.steps.tableinput;
import java.io.IOException;
import java.io.StringReader;
import java.util.Hashtable;
import java.util.Map;
import java.util.Vector;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.LineStyleEvent;
import org.eclipse.swt.custom.LineStyleListener;
import org.eclipse.swt.custom.StyleRange;
import org.eclipse.swt.custom.StyledText;
import org.eclipse.swt.graphics.Color;
import org.pentaho.di.ui.core.gui.GUIResource;
public class SQLValuesHighlight implements LineStyleListener {
JavaScanner scanner = new JavaScanner();
int[] tokenColors;
Color[] colors;
Vector<int[]> blockComments = new Vector<int[]>();
public static final int EOF= -1;
public static final int EOL= 10;
public static final int WORD= 0;
public static final int WHITE= 1;
public static final int KEY= 2;
public static final int COMMENT= 3; // single line comment: //
public static final int STRING= 5;
public static final int OTHER= 6;
public static final int NUMBER= 7;
public static final int FUNCTIONS= 8;
public static final int MAXIMUM_TOKEN= 9;
public SQLValuesHighlight() {
initializeColors();
scanner = new JavaScanner();
}
public SQLValuesHighlight(String[] strArrSQLFunctions) {
initializeColors();
scanner = new JavaScanner();
scanner.setSQLKeywords(strArrSQLFunctions);
scanner.initializeSQLFunctions();
}
Color getColor(int type) {
if (type < 0 || type >= tokenColors.length) {
return null;
}
return colors[tokenColors[type]];
}
boolean inBlockComment(int start, int end) {
for (int i=0; i<blockComments.size(); i++) {
int[] offsets = (int[])blockComments.elementAt(i);
// start of comment in the line
if ((offsets[0] >= start) && (offsets[0] <= end)) return true;
// end of comment in the line
if ((offsets[1] >= start) && (offsets[1] <= end)) return true;
if ((offsets[0] <= start) && (offsets[1] >= end)) return true;
}
return false;
}
void initializeColors() {
// Display display = Display.getDefault();
colors= new Color[] {
GUIResource.getInstance().getColor(0, 0, 0), // black
GUIResource.getInstance().getColor(255,0,0), // red
GUIResource.getInstance().getColor(63, 127, 95), // green
GUIResource.getInstance().getColor(0, 0, 255), // blue
GUIResource.getInstance().getColor(255, 0, 255) // SQL Functions / Rose
};
tokenColors= new int[MAXIMUM_TOKEN];
tokenColors[WORD]= 0;
tokenColors[WHITE]= 0;
tokenColors[KEY]= 3;
tokenColors[COMMENT]= 2;
tokenColors[STRING]= 1;
tokenColors[OTHER]= 0;
tokenColors[NUMBER]= 0;
tokenColors[FUNCTIONS]= 4;
}
/**
* Event.detail line start offset (input)
* Event.text line text (input)
* LineStyleEvent.styles Enumeration of StyleRanges, need to be in order. (output)
* LineStyleEvent.background line background color (output)
*/
public void lineGetStyle(LineStyleEvent event) {
Vector<StyleRange> styles = new Vector<StyleRange>();
int token;
StyleRange lastStyle;
if (inBlockComment(event.lineOffset, event.lineOffset + event.lineText.length())) {
styles.addElement(new StyleRange(event.lineOffset, event.lineText.length()+4, colors[1], null));
event.styles = new StyleRange[styles.size()];
styles.copyInto(event.styles);
return;
}
scanner.setRange(event.lineText);
String xs = ((StyledText)event.widget).getText();
if(xs!=null) parseBlockComments(xs);
token = scanner.nextToken();
while (token != EOF) {
if (token == OTHER) {
// do nothing
} else if ((token == WHITE) && (!styles.isEmpty())) {
int start = scanner.getStartOffset() + event.lineOffset;
lastStyle = (StyleRange)styles.lastElement();
if (lastStyle.fontStyle != SWT.NORMAL) {
if (lastStyle.start + lastStyle.length == start) {
// have the white space take on the style before it to minimize font style
// changes
lastStyle.length += scanner.getLength();
}
}
} else {
Color color = getColor(token);
if (color != colors[0]) { // hardcoded default foreground color, black
StyleRange style = new StyleRange(scanner.getStartOffset() + event.lineOffset, scanner.getLength(), color, null);
if (token == KEY) {
//style.fontStyle = SWT.BOLD;
}
if (styles.isEmpty()) {
styles.addElement(style);
} else {
lastStyle = (StyleRange)styles.lastElement();
if (lastStyle.similarTo(style) && (lastStyle.start + lastStyle.length == style.start)) {
lastStyle.length += style.length;
} else {
styles.addElement(style);
}
}
}
}
token= scanner.nextToken();
}
event.styles = new StyleRange[styles.size()];
styles.copyInto(event.styles);
}
public void parseBlockComments(String text) {
blockComments = new Vector<int[]>();
StringReader buffer = new StringReader(text);
int ch;
boolean blkComment = false;
int cnt = 0;
int[] offsets = new int[2];
boolean done = false;
try {
while (!done) {
switch (ch = buffer.read()) {
case -1 : {
if (blkComment) {
offsets[1] = cnt;
blockComments.addElement(offsets);
}
done = true;
break;
}
case '/' : {
ch = buffer.read();
if ((ch == '*') && (!blkComment)) {
offsets = new int[2];
offsets[0] = cnt;
blkComment = true;
cnt++;
} else {
cnt++;
}
cnt++;
break;
}
case '*' : {
if (blkComment) {
ch = buffer.read();
cnt++;
if (ch == '/') {
blkComment = false;
offsets[1] = cnt;
blockComments.addElement(offsets);
}
}
cnt++;
break;
}
default : {
cnt++;
break;
}
}
}
} catch(IOException e) {
// ignore errors
}
}
/**
* A simple fuzzy scanner for Java
*/
public class JavaScanner {
protected Map<String, Integer> fgKeys= null;
protected Map<?, ?> fgFunctions= null;
protected Map<String, Integer> kfKeys= null;
protected Map<?, ?> kfFunctions= null;
protected StringBuffer fBuffer= new StringBuffer();
protected String fDoc;
protected int fPos;
protected int fEnd;
protected int fStartToken;
protected boolean fEofSeen= false;
private String[] kfKeywords ={
"getdate","case","convert","left","right","isnumeric","isdate","isnumber",
"number", "finally", "cast","var","fetch_status","isnull","charindex",
"difference","len","nchar","quotename","replicate","reverse","str","stuff",
"unicode","ascii","char",
"to_char","to_date","to_number","nvl","sysdate","corr","count","grouping","max","min","stdev","sum",
"concat","length","locate","ltrim","posstr","repeat","replace","rtrim",
"soundex","space","substr","substring","trunc","nextval","currval","getclobval",
"char_length","compare","patindex","sortkey","uscalar",
"current_date","current_time","current_timestamp","current_user","session_user","system_user",
"curdate","curtime","database","now","sysdate","today","user","version","coalesce",
"nullif","octet_length","datalength","decode","greatest","ifnull","least","||",
"char_length","character_length","collate","concatenate","like","lower","position",
"translate","upper","char_octet_length","character_maximum_length","character_octet_length",
"ilike","initcap","instr","lcase","lpad","patindex","rpad","ucase","bit_length","&","|","^",
"%","+","-","*","/","(",")","abs","asin","atan","ceiling","cos","cot","exp","floor","ln","log",
"log10","mod","pi","power","rand","round","sign","sin","sqrt","tan","trunc","extract",
"interval","overlaps","adddate","age","date_add","dateformat","date_part","date_sub",
"datediff","dateadd","datename","datepart","day","dayname","dayofmonth","dayofweek",
"dayofyear","hour","last_day","minute","month","month_between","monthname","next_day","second",
"sub_date","week","year","dbo", "log","objectproperty"
};
private String[] fgKeywords= {
"create","procedure","as","set","nocount","on","declare","varchar","print","table","int","tintytext",
"select","from","where","and","or","insert","into","cursor","read_only","for","open","fetch",
"next","end","deallocate","table","drop","exec","begin","close","update","delete","truncate",
"inner","outer","join","union","all","float","when",
"nolock","with","false","datetime","dare","time","hour",
"array","minute","second","millisecond","view","function",
"catch", "const","continue","compute","browse","option",
"date", "default", "do","raw","auto","explicit","xmldata",
"elements","binary","base64","read","outfile","asc","desc",
"else","eval","escape","having","limit","offset","of","intersect","except","using",
"variance","specific","language","body","returns","specific","deterministic",
"not","external","action","reads","static","inherit","called","order","group","by",
"natural","full","exists","between","some","any","unique","match","value","limite",
"minus","references","grant","on","top","index",
"bigint","text","char","use","move",
"exec","init","name","noskip","skip","noformat","format","stats","disk","from","to","rownum",
"alter","add","remove",
"move","alter","add","remove","lineno","modify",
"if","else","in","is",
"new", "Number", "null",
"string", "switch",
"this","then","throw", "true","false","try",
"return",
"with", "while",
"start","connect","optimize","first","only","rows","sequence","blob","clob","image","binary",
"column","decimal","distinct","primary","key","timestamp","varbinary","nvarchar","nchar","longnvarchar",
"nclob","numeric", "constraint", "dbcc", "backup", "bit", "clustered","pad_index",
"off", "statistics_norecompute", "ignore_dup_key", "allow_row_locks", "allow_page_locks",
"textimage_on", "double"
};
public JavaScanner() {
initialize();
initializeSQLFunctions();
}
/**
* Returns the ending location of the current token in the document.
*/
public final int getLength() {
return fPos - fStartToken;
}
/**
* Initialize the lookup table.
*/
void initialize() {
fgKeys= new Hashtable<String, Integer>();
Integer k= new Integer(KEY);
for (int i= 0; i < fgKeywords.length; i++)
fgKeys.put(fgKeywords[i], k);
}
public void setSQLKeywords(String[]kfKeywords ){
this.kfKeywords = kfKeywords;
}
void initializeSQLFunctions(){
kfKeys = new Hashtable<String, Integer>();
Integer k = new Integer(FUNCTIONS);
for (int i= 0; i < kfKeywords.length; i++)
kfKeys.put(kfKeywords[i], k);
}
/**
* Returns the starting location of the current token in the document.
*/
public final int getStartOffset() {
return fStartToken;
}
/**
* Returns the next lexical token in the document.
*/
public int nextToken() {
int c;
fStartToken= fPos;
while (true) {
switch (c= read()) {
case EOF:
return EOF;
case '/': // comment
c= read();
if(c == '/') {
while (true) {
c= read();
if ((c == EOF) || (c == EOL)) {
unread(c);
return COMMENT;
}
}
} else {
unread(c);
}
return OTHER;
case '-': // comment
c= read();
if(c == '-') {
while (true) {
c= read();
if ((c == EOF) || (c == EOL)) {
unread(c);
return COMMENT;
}
}
} else {
unread(c);
}
return OTHER;
case '\'': // char const
for(;;) {
c= read();
switch (c) {
case '\'':
return STRING;
case EOF:
unread(c);
return STRING;
case '\\':
c= read();
break;
}
}
case '"': // string
for (;;) {
c= read();
switch (c) {
case '"':
return STRING;
case EOF:
unread(c);
return STRING;
case '\\':
c= read();
break;
}
}
case '0': case '1': case '2': case '3': case '4':
case '5': case '6': case '7': case '8': case '9':
do {
c= read();
} while(Character.isDigit((char)c));
unread(c);
return NUMBER;
default:
if (Character.isWhitespace((char)c)) {
do {
c= read();
} while(Character.isWhitespace((char)c));
unread(c);
return WHITE;
}
if (Character.isJavaIdentifierStart((char)c)) {
fBuffer.setLength(0);
do {
fBuffer.append((char)c);
c= read();
} while(Character.isJavaIdentifierPart((char)c));
unread(c);
Integer i= (Integer) fgKeys.get(fBuffer.toString());
if (i != null) return i.intValue();
i= (Integer) kfKeys.get(fBuffer.toString());
if (i != null) return i.intValue();
return WORD;
}
return OTHER;
}
}
}
/**
* Returns next character.
*/
protected int read() {
if (fPos <= fEnd) {
return fDoc.charAt(fPos++);
}
return EOF;
}
public void setRange(String text) {
fDoc= text.toLowerCase();
fPos= 0;
fEnd= fDoc.length() -1;
}
protected void unread(int c) {
if (c != EOF)
fPos--;
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sling.testing.clients.osgi;
import org.apache.sling.testing.clients.ClientException;
import org.apache.sling.testing.clients.util.poller.AbstractPoller;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.util.List;
/**
* Utility for installing and starting additional bundles for testing
*/
public class BundlesInstaller {
private final Logger log = LoggerFactory.getLogger(getClass());
private final OsgiConsoleClient osgiConsoleClient;
public static final String ACTIVE_STATE = "active";
public BundlesInstaller(OsgiConsoleClient cc) {
osgiConsoleClient = cc;
}
/**
* Checks if a bundle is installed or not. Does not retry.
* @param bundleFile
* @return
* @throws ClientException
* @throws IOException
* @throws InterruptedException
*/
public boolean isInstalled(File bundleFile) throws InterruptedException, IOException {
final String bundleSymbolicName = OsgiConsoleClient.getBundleSymbolicName(bundleFile);
log.debug("Checking if installed: " + bundleSymbolicName);
boolean installed = osgiConsoleClient.checkBundleInstalled(bundleSymbolicName, 1000, 1);
// if this succeeds, then there's no need to install again
if (installed) {
log.debug("Already installed: " + bundleSymbolicName);
return true;
} else {
log.debug("Not yet installed: " + bundleSymbolicName);
return false;
}
}
/**
* Check if the installed version matches the one of the bundle (file)
* @param bundleFile
* @return
* @throws Exception
*/
public boolean isInstalledWithSameVersion(File bundleFile) throws ClientException, IOException {
final String bundleSymbolicName = OsgiConsoleClient.getBundleSymbolicName(bundleFile);
final String versionOnServer = osgiConsoleClient.getBundleVersion(bundleSymbolicName);
final String versionInBundle = OsgiConsoleClient.getBundleVersionFromFile(bundleFile);
if (versionOnServer.equals(versionInBundle)) {
return true;
} else {
log.warn("Installed bundle doesn't match: {}, versionOnServer={}, versionInBundle={}",
bundleSymbolicName, versionOnServer, versionInBundle);
return false;
}
}
/**
* Install a list of bundles supplied as Files
* @param toInstall
* @param startBundles
* @throws Exception
*/
public void installBundles(List<File> toInstall, boolean startBundles) throws ClientException, IOException, InterruptedException {
for(File f : toInstall) {
final String bundleSymbolicName = OsgiConsoleClient.getBundleSymbolicName(f);
if (isInstalled(f)) {
if (f.getName().contains("SNAPSHOT")) {
log.info("Reinstalling (due to SNAPSHOT version): {}", bundleSymbolicName);
osgiConsoleClient.uninstallBundle(bundleSymbolicName);
} else if (!isInstalledWithSameVersion(f)) {
log.info("Reinstalling (due to version mismatch): {}", bundleSymbolicName);
osgiConsoleClient.uninstallBundle(bundleSymbolicName);
} else {
log.info("Not reinstalling: {}", bundleSymbolicName);
continue;
}
}
osgiConsoleClient.installBundle(f, startBundles);
log.info("Installed: {}", bundleSymbolicName);
}
// ensure that bundles are re-wired esp. if an existing bundle was updated
osgiConsoleClient.refreshPackages();
log.info("{} additional bundles installed", toInstall.size());
}
/**
* Uninstall a list of bundles supplied as Files
* @param toUninstall
* @throws ClientException
* @throws IOException
* @throws InterruptedException
*/
public void uninstallBundles(List<File> toUninstall) throws ClientException, IOException, InterruptedException {
for(File f : toUninstall) {
final String bundleSymbolicName = OsgiConsoleClient.getBundleSymbolicName(f);
if (isInstalled(f)) {
log.info("Uninstalling bundle: {}", bundleSymbolicName);
osgiConsoleClient.uninstallBundle(bundleSymbolicName);
} else {
log.info("Could not uninstall: {} as it never was installed", bundleSymbolicName);
}
}
// ensure that bundles are re-wired esp. if an existing bundle was updated
osgiConsoleClient.refreshPackages();
log.info("{} additional bundles uninstalled", toUninstall.size());
}
/**
* Wait for all bundles specified in symbolicNames list to be installed in the OSGi web console.
* @param symbolicNames the list of names for the bundles
* @param timeoutSeconds how many seconds to wait
* @return
* @throws Exception
*/
public boolean waitForBundlesInstalled(List<String> symbolicNames, int timeoutSeconds) throws ClientException, InterruptedException {
log.info("Checking that the following bundles are installed (timeout {} seconds): {}", timeoutSeconds, symbolicNames);
for (String symbolicName : symbolicNames) {
boolean started = osgiConsoleClient.checkBundleInstalled(symbolicName, 500, 2 * timeoutSeconds);
if (!started) return false;
}
return true;
}
/**
* Start all the bundles in a {{List}}
* @param symbolicNames the list of bundles to start
* @param timeoutSeconds number of seconds until it times out
* @throws ClientException
* @throws InterruptedException
*/
public void startAllBundles(final List<String> symbolicNames, int timeoutSeconds) throws ClientException, InterruptedException {
log.info("Starting bundles (timeout {} seconds): {}", timeoutSeconds, symbolicNames);
class StartAllBundlesPoller extends AbstractPoller {
private ClientException exception;
public StartAllBundlesPoller(List<String> symbolicNames, long waitInterval, long waitCount) {
super(waitInterval, waitCount);
}
@Override
public boolean call() {
for (String bundle : symbolicNames) {
final String state;
try {
state = osgiConsoleClient.getBundleState(bundle);
if (!state.equalsIgnoreCase(ACTIVE_STATE)) {
osgiConsoleClient.startBundle(bundle);
}
} catch (ClientException e) {
this.exception = e;
return false;
}
}
return true;
}
@Override
public boolean condition() {
for (String bundle : symbolicNames) {
final String state;
try {
state = osgiConsoleClient.getBundleState(bundle);
if (!state.equalsIgnoreCase(ACTIVE_STATE)) {
return false;
}
} catch (ClientException e) {
this.exception = e;
return false;
}
}
return true;
}
public ClientException getException() {
return exception;
}
}
StartAllBundlesPoller poller = new StartAllBundlesPoller(symbolicNames, 1000, timeoutSeconds);
if (!poller.callUntilCondition()) {
throw new ClientException("Some bundles did not start or timed out", poller.getException());
}
}
}
|
|
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package git4idea.branch;
import com.intellij.dvcs.DvcsUtil;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.VcsException;
import com.intellij.openapi.vcs.VcsNotifier;
import com.intellij.openapi.vcs.changes.Change;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.Function;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.MultiMap;
import git4idea.GitLocalBranch;
import git4idea.GitPlatformFacade;
import git4idea.GitUtil;
import git4idea.commands.Git;
import git4idea.commands.GitMessageWithFilesDetector;
import git4idea.config.GitVcsSettings;
import git4idea.repo.GitRepository;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import static com.intellij.openapi.util.text.StringUtil.pluralize;
/**
* Common class for Git operations with branches aware of multi-root configuration,
* which means showing combined error information, proposing to rollback, etc.
*/
abstract class GitBranchOperation {
protected static final Logger LOG = Logger.getInstance(GitBranchOperation.class);
@NotNull protected final Project myProject;
@NotNull protected final GitPlatformFacade myFacade;
@NotNull protected final Git myGit;
@NotNull protected final GitBranchUiHandler myUiHandler;
@NotNull private final Collection<GitRepository> myRepositories;
@NotNull protected final Map<GitRepository, String> myCurrentHeads;
private final GitVcsSettings mySettings;
@NotNull private final Collection<GitRepository> mySuccessfulRepositories;
@NotNull private final Collection<GitRepository> myRemainingRepositories;
protected GitBranchOperation(@NotNull Project project, @NotNull GitPlatformFacade facade, @NotNull Git git,
@NotNull GitBranchUiHandler uiHandler, @NotNull Collection<GitRepository> repositories) {
myProject = project;
myFacade = facade;
myGit = git;
myUiHandler = uiHandler;
myRepositories = repositories;
myCurrentHeads = ContainerUtil.map2Map(repositories, new Function<GitRepository, Pair<GitRepository, String>>() {
@Override
public Pair<GitRepository, String> fun(GitRepository repository) {
GitLocalBranch currentBranch = repository.getCurrentBranch();
return Pair.create(repository, currentBranch == null ? repository.getCurrentRevision() : currentBranch.getName());
}
});
mySuccessfulRepositories = new ArrayList<GitRepository>();
myRemainingRepositories = new ArrayList<GitRepository>(myRepositories);
mySettings = myFacade.getSettings(myProject);
}
protected abstract void execute();
protected abstract void rollback();
@NotNull
public abstract String getSuccessMessage();
@NotNull
protected abstract String getRollbackProposal();
/**
* Returns a short downcased name of the operation.
* It is used by some dialogs or notifications which are common to several operations.
* Some operations (like checkout new branch) can be not mentioned in these dialogs, so their operation names would be not used.
*/
@NotNull
protected abstract String getOperationName();
/**
* @return next repository that wasn't handled (e.g. checked out) yet.
*/
@NotNull
protected GitRepository next() {
return myRemainingRepositories.iterator().next();
}
/**
* @return true if there are more repositories on which the operation wasn't executed yet.
*/
protected boolean hasMoreRepositories() {
return !myRemainingRepositories.isEmpty();
}
/**
* Marks repositories as successful, i.e. they won't be handled again.
*/
protected void markSuccessful(GitRepository... repositories) {
for (GitRepository repository : repositories) {
mySuccessfulRepositories.add(repository);
myRemainingRepositories.remove(repository);
}
}
/**
* @return true if the operation has already succeeded in at least one of repositories.
*/
protected boolean wereSuccessful() {
return !mySuccessfulRepositories.isEmpty();
}
@NotNull
protected Collection<GitRepository> getSuccessfulRepositories() {
return mySuccessfulRepositories;
}
@NotNull
protected String successfulRepositoriesJoined() {
return StringUtil.join(mySuccessfulRepositories, new Function<GitRepository, String>() {
@Override
public String fun(GitRepository repository) {
return repository.getPresentableUrl();
}
}, "<br/>");
}
@NotNull
protected Collection<GitRepository> getRepositories() {
return myRepositories;
}
@NotNull
protected Collection<GitRepository> getRemainingRepositories() {
return myRemainingRepositories;
}
@NotNull
protected List<GitRepository> getRemainingRepositoriesExceptGiven(@NotNull final GitRepository currentRepository) {
List<GitRepository> repositories = new ArrayList<GitRepository>(myRemainingRepositories);
repositories.remove(currentRepository);
return repositories;
}
protected void notifySuccess(@NotNull String message) {
VcsNotifier.getInstance(myProject).notifySuccess(message);
}
protected final void notifySuccess() {
notifySuccess(getSuccessMessage());
}
protected final void saveAllDocuments() {
myFacade.saveAllDocuments();
}
/**
* Show fatal error as a notification or as a dialog with rollback proposal.
*/
protected void fatalError(@NotNull String title, @NotNull String message) {
if (wereSuccessful()) {
showFatalErrorDialogWithRollback(title, message);
}
else {
showFatalNotification(title, message);
}
}
protected void showFatalErrorDialogWithRollback(@NotNull final String title, @NotNull final String message) {
boolean rollback = myUiHandler.notifyErrorWithRollbackProposal(title, message, getRollbackProposal());
if (rollback) {
rollback();
}
}
protected void showFatalNotification(@NotNull String title, @NotNull String message) {
notifyError(title, message);
}
protected void notifyError(@NotNull String title, @NotNull String message) {
VcsNotifier.getInstance(myProject).notifyError(title, message);
}
@NotNull
protected ProgressIndicator getIndicator() {
return myUiHandler.getProgressIndicator();
}
/**
* Display the error saying that the operation can't be performed because there are unmerged files in a repository.
* Such error prevents checking out and creating new branch.
*/
protected void fatalUnmergedFilesError() {
if (wereSuccessful()) {
showUnmergedFilesDialogWithRollback();
}
else {
showUnmergedFilesNotification();
}
}
@NotNull
protected String repositories() {
return pluralize("repository", getSuccessfulRepositories().size());
}
/**
* Updates the recently visited branch in the settings.
* This is to be performed after successful checkout operation.
*/
protected void updateRecentBranch() {
if (getRepositories().size() == 1) {
GitRepository repository = myRepositories.iterator().next();
mySettings.setRecentBranchOfRepository(repository.getRoot().getPath(), myCurrentHeads.get(repository));
}
else {
String recentCommonBranch = getRecentCommonBranch();
if (recentCommonBranch != null) {
mySettings.setRecentCommonBranch(recentCommonBranch);
}
}
}
@Nullable
private String getRecentCommonBranch() {
String recentCommonBranch = null;
for (String branch : myCurrentHeads.values()) {
if (recentCommonBranch == null) {
recentCommonBranch = branch;
}
else if (!recentCommonBranch.equals(branch)) {
return null;
}
}
return recentCommonBranch;
}
private void showUnmergedFilesDialogWithRollback() {
boolean ok = myUiHandler.showUnmergedFilesMessageWithRollback(getOperationName(), getRollbackProposal());
if (ok) {
rollback();
}
}
private void showUnmergedFilesNotification() {
myUiHandler.showUnmergedFilesNotification(getOperationName(), getRepositories());
}
/**
* Asynchronously refreshes the VFS root directory of the given repository.
*/
protected void refreshRoot(@NotNull GitRepository repository) {
// marking all files dirty, because sometimes FileWatcher is unable to process such a large set of changes that can happen during
// checkout on a large repository: IDEA-89944
myFacade.hardRefresh(repository.getRoot());
}
protected void fatalLocalChangesError(@NotNull String reference) {
String title = String.format("Couldn't %s %s", getOperationName(), reference);
if (wereSuccessful()) {
showFatalErrorDialogWithRollback(title, "");
}
}
/**
* Shows the error "The following untracked working tree files would be overwritten by checkout/merge".
* If there were no repositories that succeeded the operation, shows a notification with a link to the list of these untracked files.
* If some repositories succeeded, shows a dialog with the list of these files and a proposal to rollback the operation of those
* repositories.
*/
protected void fatalUntrackedFilesError(@NotNull VirtualFile root, @NotNull Collection<String> relativePaths) {
if (wereSuccessful()) {
showUntrackedFilesDialogWithRollback(root, relativePaths);
}
else {
showUntrackedFilesNotification(root, relativePaths);
}
}
private void showUntrackedFilesNotification(@NotNull VirtualFile root, @NotNull Collection<String> relativePaths) {
myUiHandler.showUntrackedFilesNotification(getOperationName(), root, relativePaths);
}
private void showUntrackedFilesDialogWithRollback(@NotNull VirtualFile root, @NotNull Collection<String> relativePaths) {
boolean ok = myUiHandler.showUntrackedFilesDialogWithRollback(getOperationName(), getRollbackProposal(), root, relativePaths);
if (ok) {
rollback();
}
}
/**
* TODO this is non-optimal and even incorrect, since such diff shows the difference between committed changes
* For each of the given repositories looks to the diff between current branch and the given branch and converts it to the list of
* local changes.
*/
@NotNull
Map<GitRepository, List<Change>> collectLocalChangesConflictingWithBranch(@NotNull Collection<GitRepository> repositories,
@NotNull String currentBranch, @NotNull String otherBranch) {
Map<GitRepository, List<Change>> changes = new HashMap<GitRepository, List<Change>>();
for (GitRepository repository : repositories) {
try {
Collection<String> diff = GitUtil.getPathsDiffBetweenRefs(myGit, repository, currentBranch, otherBranch);
List<Change> changesInRepo = GitUtil.findLocalChangesForPaths(myProject, repository.getRoot(), diff, false);
if (!changesInRepo.isEmpty()) {
changes.put(repository, changesInRepo);
}
}
catch (VcsException e) {
// ignoring the exception: this is not fatal if we won't collect such a diff from other repositories.
// At worst, use will get double dialog proposing the smart checkout.
LOG.warn(String.format("Couldn't collect diff between %s and %s in %s", currentBranch, otherBranch, repository.getRoot()), e);
}
}
return changes;
}
/**
* When checkout or merge operation on a repository fails with the error "local changes would be overwritten by...",
* affected local files are captured by the {@link git4idea.commands.GitMessageWithFilesDetector detector}.
* Then all remaining (non successful repositories) are searched if they are about to fail with the same problem.
* All collected local changes which prevent the operation, together with these repositories, are returned.
* @param currentRepository The first repository which failed the operation.
* @param localChangesOverwrittenBy The detector of local changes would be overwritten by merge/checkout.
* @param currentBranch Current branch.
* @param nextBranch Branch to compare with (the branch to be checked out, or the branch to be merged).
* @return Repositories that have failed or would fail with the "local changes" error, together with these local changes.
*/
@NotNull
protected Pair<List<GitRepository>, List<Change>> getConflictingRepositoriesAndAffectedChanges(
@NotNull GitRepository currentRepository, @NotNull GitMessageWithFilesDetector localChangesOverwrittenBy,
String currentBranch, String nextBranch) {
// get changes overwritten by checkout from the error message captured from Git
List<Change> affectedChanges = GitUtil.findLocalChangesForPaths(myProject, currentRepository.getRoot(),
localChangesOverwrittenBy.getRelativeFilePaths(), true
);
// get all other conflicting changes
// get changes in all other repositories (except those which already have succeeded) to avoid multiple dialogs proposing smart checkout
Map<GitRepository, List<Change>> conflictingChangesInRepositories =
collectLocalChangesConflictingWithBranch(getRemainingRepositoriesExceptGiven(currentRepository), currentBranch, nextBranch);
Set<GitRepository> otherProblematicRepositories = conflictingChangesInRepositories.keySet();
List<GitRepository> allConflictingRepositories = new ArrayList<GitRepository>(otherProblematicRepositories);
allConflictingRepositories.add(currentRepository);
for (List<Change> changes : conflictingChangesInRepositories.values()) {
affectedChanges.addAll(changes);
}
return Pair.create(allConflictingRepositories, affectedChanges);
}
@NotNull
protected static String stringifyBranchesByRepos(@NotNull Map<GitRepository, String> heads) {
MultiMap<String, VirtualFile> grouped = groupByBranches(heads);
if (grouped.size() == 1) {
return grouped.keySet().iterator().next();
}
return StringUtil.join(grouped.entrySet(), new Function<Map.Entry<String, Collection<VirtualFile>>, String>() {
@Override
public String fun(Map.Entry<String, Collection<VirtualFile>> entry) {
String roots = StringUtil.join(entry.getValue(), new Function<VirtualFile, String>() {
@Override
public String fun(VirtualFile file) {
return file.getName();
}
}, ", ");
return entry.getKey() + " (in " + roots + ")";
}
}, "<br/>");
}
@NotNull
private static MultiMap<String, VirtualFile> groupByBranches(@NotNull Map<GitRepository, String> heads) {
MultiMap<String, VirtualFile> result = MultiMap.createLinked();
List<GitRepository> sortedRepos = DvcsUtil.sortRepositories(heads.keySet());
for (GitRepository repo : sortedRepos) {
result.putValue(heads.get(repo), repo.getRoot());
}
return result;
}
}
|
|
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2014.09.07 at 08:01:35 PM IST
//
package com.mozu.qbintegration.model.qbmodel.allgen;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for ItemSubtotalQueryRqType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="ItemSubtotalQueryRqType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <group ref="{}ListQueryWithOwnerID"/>
* </sequence>
* <attribute name="requestID" type="{}STRTYPE" />
* <attribute name="metaData" default="NoMetaData">
* <simpleType>
* <restriction base="{}STRTYPE">
* <enumeration value="NoMetaData"/>
* <enumeration value="MetaDataOnly"/>
* <enumeration value="MetaDataAndResponseData"/>
* </restriction>
* </simpleType>
* </attribute>
* <attribute name="iterator">
* <simpleType>
* <restriction base="{}STRTYPE">
* <enumeration value="Start"/>
* <enumeration value="Continue"/>
* <enumeration value="Stop"/>
* </restriction>
* </simpleType>
* </attribute>
* <attribute name="iteratorID" type="{}STRTYPE" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "ItemSubtotalQueryRqType", propOrder = {
"listID",
"fullName",
"maxReturned",
"activeStatus",
"fromModifiedDate",
"toModifiedDate",
"nameFilter",
"nameRangeFilter",
"includeRetElement",
"ownerID"
})
public class ItemSubtotalQueryRqType {
@XmlElement(name = "ListID")
protected List<String> listID;
@XmlElement(name = "FullName")
protected List<String> fullName;
@XmlElement(name = "MaxReturned")
protected BigInteger maxReturned;
@XmlElement(name = "ActiveStatus", defaultValue = "ActiveOnly")
protected String activeStatus;
@XmlElement(name = "FromModifiedDate")
protected String fromModifiedDate;
@XmlElement(name = "ToModifiedDate")
protected String toModifiedDate;
@XmlElement(name = "NameFilter")
protected NameFilter nameFilter;
@XmlElement(name = "NameRangeFilter")
protected NameRangeFilter nameRangeFilter;
@XmlElement(name = "IncludeRetElement")
protected List<String> includeRetElement;
@XmlElement(name = "OwnerID")
protected List<String> ownerID;
@XmlAttribute(name = "requestID")
protected String requestID;
@XmlAttribute(name = "metaData")
protected String metaData;
@XmlAttribute(name = "iterator")
protected String iterator;
@XmlAttribute(name = "iteratorID")
protected String iteratorID;
/**
* Gets the value of the listID property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the listID property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getListID().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getListID() {
if (listID == null) {
listID = new ArrayList<String>();
}
return this.listID;
}
/**
* Gets the value of the fullName property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the fullName property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getFullName().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getFullName() {
if (fullName == null) {
fullName = new ArrayList<String>();
}
return this.fullName;
}
/**
* Gets the value of the maxReturned property.
*
* @return
* possible object is
* {@link BigInteger }
*
*/
public BigInteger getMaxReturned() {
return maxReturned;
}
/**
* Sets the value of the maxReturned property.
*
* @param value
* allowed object is
* {@link BigInteger }
*
*/
public void setMaxReturned(BigInteger value) {
this.maxReturned = value;
}
/**
* Gets the value of the activeStatus property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getActiveStatus() {
return activeStatus;
}
/**
* Sets the value of the activeStatus property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setActiveStatus(String value) {
this.activeStatus = value;
}
/**
* Gets the value of the fromModifiedDate property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getFromModifiedDate() {
return fromModifiedDate;
}
/**
* Sets the value of the fromModifiedDate property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setFromModifiedDate(String value) {
this.fromModifiedDate = value;
}
/**
* Gets the value of the toModifiedDate property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getToModifiedDate() {
return toModifiedDate;
}
/**
* Sets the value of the toModifiedDate property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setToModifiedDate(String value) {
this.toModifiedDate = value;
}
/**
* Gets the value of the nameFilter property.
*
* @return
* possible object is
* {@link NameFilter }
*
*/
public NameFilter getNameFilter() {
return nameFilter;
}
/**
* Sets the value of the nameFilter property.
*
* @param value
* allowed object is
* {@link NameFilter }
*
*/
public void setNameFilter(NameFilter value) {
this.nameFilter = value;
}
/**
* Gets the value of the nameRangeFilter property.
*
* @return
* possible object is
* {@link NameRangeFilter }
*
*/
public NameRangeFilter getNameRangeFilter() {
return nameRangeFilter;
}
/**
* Sets the value of the nameRangeFilter property.
*
* @param value
* allowed object is
* {@link NameRangeFilter }
*
*/
public void setNameRangeFilter(NameRangeFilter value) {
this.nameRangeFilter = value;
}
/**
* Gets the value of the includeRetElement property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the includeRetElement property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getIncludeRetElement().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getIncludeRetElement() {
if (includeRetElement == null) {
includeRetElement = new ArrayList<String>();
}
return this.includeRetElement;
}
/**
* Gets the value of the ownerID property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the ownerID property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getOwnerID().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getOwnerID() {
if (ownerID == null) {
ownerID = new ArrayList<String>();
}
return this.ownerID;
}
/**
* Gets the value of the requestID property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getRequestID() {
return requestID;
}
/**
* Sets the value of the requestID property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setRequestID(String value) {
this.requestID = value;
}
/**
* Gets the value of the metaData property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getMetaData() {
if (metaData == null) {
return "NoMetaData";
} else {
return metaData;
}
}
/**
* Sets the value of the metaData property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setMetaData(String value) {
this.metaData = value;
}
/**
* Gets the value of the iterator property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getIterator() {
return iterator;
}
/**
* Sets the value of the iterator property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setIterator(String value) {
this.iterator = value;
}
/**
* Gets the value of the iteratorID property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getIteratorID() {
return iteratorID;
}
/**
* Sets the value of the iteratorID property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setIteratorID(String value) {
this.iteratorID = value;
}
}
|
|
/**
* Project: richContentMediaSearchService
* ROLE-Project
* authors: [email protected], [email protected]
* This software uses the GNU GPL
*/
package de.imc.advancedMediaSearch.target;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import de.imc.advancedMediaSearch.http.ApacheHttpClient;
import de.imc.advancedMediaSearch.http.RESTHttpClient;
import de.imc.advancedMediaSearch.lom.LomHelper;
import de.imc.advancedMediaSearch.properties.AMSPropertyManager;
import de.imc.advancedMediaSearch.result.MediaType;
import de.imc.advancedMediaSearch.result.ResultEntity;
import de.imc.advancedMediaSearch.result.ResultSet;
/**
* @author [email protected]
*
*/
public class IcoperTarget extends Target {
public static final String ID = "icoper.org";
// TODO:
private static final String DEFAULTAPIURL = "http://oics.icoper.org/oics.atom";
// TODO
private String apiurl = AMSPropertyManager.getInstance().getStringValue(
"de.imc.advancedMediaSearch.baseurls.icoper", DEFAULTAPIURL);
private static Logger logger = Logger.getLogger(IcoperTarget.class);
public IcoperTarget() {
super();
initializeMetaData();
}
public void initializeMetaData() {
name = "iCoper";
url = "http://www.icoper.org";
mediaTypeIconUrl = "";
// TODO
description = "ICOPER is a Best Practice Network co-funded by the eContentplus programme of the European Community. The 30-months-project started in September 2008 and has the mission to collect and further develop best practices for higher education tackling issues like creating learning designs and teaching methods, authoring content for re-use, transferring knowledge in an outcome-oriented way and assessing it, or evaluating learning activities.";
String[] mTypes = { MediaType.TEXT.toString() };
mediaTypes = mTypes;
iconUrl = AMSPropertyManager
.getInstance()
.getStringValue("de.imc.advancedMediaSearch.iconurls.icoper",
"http://role-demo.de:8080/richMediaContentSearchResources/icons/icoper.ico");
}
/*
* (non-Javadoc)
*
* @see
* de.imc.advancedMediaSearch.target.Target#searchByTags(java.lang.String,
* de.imc.advancedMediaSearch.target.QueryArguments)
*/
@Override
public ResultSet searchByTags(String tagQuery, QueryArguments args) {
String url = apiurl;
url += "?query=" + encodeQueryString(tagQuery);
url += "&page_size=" + getMaxQueryResults();
if (args.isLanguageSet()) {
url += "&filter_expression=general.language=" + args.getLanguage();
}
url+="=&filter=general.keyword";
return parseResult(executeQueryUrl(url), tagQuery);
}
/*
* (non-Javadoc)
*
* @see
* de.imc.advancedMediaSearch.target.Target#searchByFullTextQuery(java.lang
* .String, de.imc.advancedMediaSearch.target.QueryArguments)
*/
@Override
public ResultSet searchByFullTextQuery(String searchTermQuery,
QueryArguments args) {
String url = apiurl;
url += "?query=" + encodeQueryString(searchTermQuery);
url += "&page_size=" + getMaxQueryResults();
if (args.isLanguageSet()) {
url += "&filter_expression=general.language=" + args.getLanguage();
}
return parseResult(executeQueryUrl(url), searchTermQuery);
}
/*
* (non-Javadoc)
*
* @see
* de.imc.advancedMediaSearch.target.Target#searchByAuthor(java.lang.String,
* de.imc.advancedMediaSearch.target.QueryArguments)
*/
@Override
public ResultSet searchByAuthor(String authorQuery, QueryArguments args) {
String url = apiurl;
url += "?query=" + encodeQueryString(authorQuery);
url += "&page_size=" + getMaxQueryResults();
if (args.isLanguageSet()) {
url += "&filter_expression=general.language=" + args.getLanguage();
}
url+="=&filter=lifeCycle.contribute";
return parseResult(executeQueryUrl(url), authorQuery);
}
/*
* (non-Javadoc)
*
* @see de.imc.advancedMediaSearch.target.Target#getId()
*/
@Override
public String getId() {
return ID;
}
private ResultSet parseResult(Document doc, String queryString) {
ResultSet results = new ResultSet();
results.addSourceRepository(ID);
results.setSearchQuery(queryString);
try {
// get all entry nodes with xmlns=atom
NodeList entries = doc.getElementsByTagName("entry");
for (int i = 0; i < entries.getLength(); i++) {
Node curEntry = entries.item(i);
if (curEntry.getAttributes() != null) {
if (curEntry.getAttributes().getNamedItem("xmlns") != null) {
if (curEntry
.getAttributes()
.getNamedItem("xmlns")
.getNodeValue()
.equalsIgnoreCase("http://www.w3.org/2005/Atom")) {
// we are in the correct entry node now
Element curEntryElement = (Element) curEntry;
// pick the metadata node
if (curEntryElement
.getElementsByTagName("metadata") != null) {
Node metaDataNode = curEntryElement
.getElementsByTagName("metadata").item(
0);
// switch to the lom node now
Element metaDataElement = (Element) metaDataNode;
if (metaDataElement.getElementsByTagName("lom") != null) {
Node lomNode = metaDataElement
.getElementsByTagName("lom")
.item(0);
// step into the lom main structure now
// we are inside the lom main node now
try {
ResultEntity et = new ResultEntity();
et.setSource(ID);
NodeList lomNodes = lomNode
.getChildNodes();
// parse entries lom sub-nodes
for (int j = 0; j < lomNodes
.getLength(); j++) {
Node lomSubNode = lomNodes.item(j);
if (lomSubNode
.getNodeName()
.equalsIgnoreCase("general")) {
et = LomHelper
.parseLomGeneralNode(
lomSubNode, et,
ID);
} else if (lomSubNode.getNodeName()
.equalsIgnoreCase(
"technical")) {
et = LomHelper
.parseLomTechnicalNode(
lomSubNode, et);
} else if (lomSubNode.getNodeName()
.equalsIgnoreCase(
"metametadata")) {
et = LomHelper
.parseLomMetaMetadataNode(
lomSubNode, et,
ID);
} else if (lomSubNode.getNodeName()
.equalsIgnoreCase(
"lifecycle")) {
et = LomHelper
.parseLomLifecycleNode(
lomSubNode, et,
ID);
}
}
results.add(et);
} catch (Exception err) {
logger.debug("An error occured while parsing: "
+ err.getMessage());
}
}
}
}
}
}
results = filterResult(results);
}
} catch (Exception e) {
logger.debug("An error occured while parsing: " + e.getMessage());
return results;
}
return results;
}
private Document executeQueryUrl(String url) {
RESTHttpClient client = new ApacheHttpClient();
try {
return client.executeGETURL(new URL(url));
} catch (MalformedURLException e) {
logger.debug(e.getMessage());
return null;
} catch (IOException e) {
logger.debug(e.getMessage());
return null;
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.rocketmq.broker.client.net;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.FileRegion;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.rocketmq.broker.BrokerController;
import org.apache.rocketmq.broker.client.ClientChannelInfo;
import org.apache.rocketmq.broker.client.ConsumerGroupInfo;
import org.apache.rocketmq.broker.pagecache.OneMessageTransfer;
import org.apache.rocketmq.common.MQVersion;
import org.apache.rocketmq.common.TopicConfig;
import org.apache.rocketmq.common.UtilAll;
import org.apache.rocketmq.common.constant.LoggerName;
import org.apache.rocketmq.common.message.MessageQueue;
import org.apache.rocketmq.common.message.MessageQueueForC;
import org.apache.rocketmq.common.protocol.RequestCode;
import org.apache.rocketmq.common.protocol.ResponseCode;
import org.apache.rocketmq.common.protocol.body.GetConsumerStatusBody;
import org.apache.rocketmq.common.protocol.body.ResetOffsetBody;
import org.apache.rocketmq.common.protocol.body.ResetOffsetBodyForC;
import org.apache.rocketmq.common.protocol.header.CheckTransactionStateRequestHeader;
import org.apache.rocketmq.common.protocol.header.GetConsumerStatusRequestHeader;
import org.apache.rocketmq.common.protocol.header.NotifyConsumerIdsChangedRequestHeader;
import org.apache.rocketmq.common.protocol.header.ResetOffsetRequestHeader;
import org.apache.rocketmq.remoting.common.RemotingHelper;
import org.apache.rocketmq.remoting.exception.RemotingSendRequestException;
import org.apache.rocketmq.remoting.exception.RemotingTimeoutException;
import org.apache.rocketmq.remoting.protocol.RemotingCommand;
import org.apache.rocketmq.store.SelectMappedBufferResult;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class Broker2Client {
private static final Logger log = LoggerFactory.getLogger(LoggerName.BROKER_LOGGER_NAME);
private final BrokerController brokerController;
public Broker2Client(BrokerController brokerController) {
this.brokerController = brokerController;
}
public void checkProducerTransactionState(
final Channel channel,
final CheckTransactionStateRequestHeader requestHeader,
final SelectMappedBufferResult selectMappedBufferResult) {
RemotingCommand request =
RemotingCommand.createRequestCommand(RequestCode.CHECK_TRANSACTION_STATE, requestHeader);
request.markOnewayRPC();
try {
FileRegion fileRegion =
new OneMessageTransfer(request.encodeHeader(selectMappedBufferResult.getSize()),
selectMappedBufferResult);
channel.writeAndFlush(fileRegion).addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
selectMappedBufferResult.release();
if (!future.isSuccess()) {
log.error("invokeProducer failed,", future.cause());
}
}
});
} catch (Throwable e) {
log.error("invokeProducer exception", e);
selectMappedBufferResult.release();
}
}
public RemotingCommand callClient(final Channel channel,
final RemotingCommand request
) throws RemotingSendRequestException, RemotingTimeoutException, InterruptedException {
return this.brokerController.getRemotingServer().invokeSync(channel, request, 10000);
}
public void notifyConsumerIdsChanged(
final Channel channel,
final String consumerGroup) {
if (null == consumerGroup) {
log.error("notifyConsumerIdsChanged consumerGroup is null");
return;
}
NotifyConsumerIdsChangedRequestHeader requestHeader = new NotifyConsumerIdsChangedRequestHeader();
requestHeader.setConsumerGroup(consumerGroup);
RemotingCommand request =
RemotingCommand.createRequestCommand(RequestCode.NOTIFY_CONSUMER_IDS_CHANGED, requestHeader);
try {
this.brokerController.getRemotingServer().invokeOneway(channel, request, 10);
} catch (Exception e) {
log.error("notifyConsumerIdsChanged exception, " + consumerGroup, e.getMessage());
}
}
public RemotingCommand resetOffset(String topic, String group, long timeStamp, boolean isForce) {
return resetOffset(topic, group, timeStamp, isForce, false);
}
public RemotingCommand resetOffset(String topic, String group, long timeStamp, boolean isForce,
boolean isC) {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
TopicConfig topicConfig = this.brokerController.getTopicConfigManager().selectTopicConfig(topic);
if (null == topicConfig) {
log.error("[reset-offset] reset offset failed, no topic in this broker. topic={}", topic);
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("[reset-offset] reset offset failed, no topic in this broker. topic=" + topic);
return response;
}
Map<MessageQueue, Long> offsetTable = new HashMap<MessageQueue, Long>();
for (int i = 0; i < topicConfig.getWriteQueueNums(); i++) {
MessageQueue mq = new MessageQueue();
mq.setBrokerName(this.brokerController.getBrokerConfig().getBrokerName());
mq.setTopic(topic);
mq.setQueueId(i);
long consumerOffset =
this.brokerController.getConsumerOffsetManager().queryOffset(group, topic, i);
if (-1 == consumerOffset) {
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark(String.format("THe consumer group <%s> not exist", group));
return response;
}
long timeStampOffset;
if (timeStamp == -1) {
timeStampOffset = this.brokerController.getMessageStore().getMaxOffsetInQuque(topic, i);
} else {
timeStampOffset = this.brokerController.getMessageStore().getOffsetInQueueByTime(topic, i, timeStamp);
}
if (timeStampOffset < 0) {
log.warn("reset offset is invalid. topic={}, queueId={}, timeStampOffset={}", topic, i, timeStampOffset);
timeStampOffset = 0;
}
if (isForce || timeStampOffset < consumerOffset) {
offsetTable.put(mq, timeStampOffset);
} else {
offsetTable.put(mq, consumerOffset);
}
}
ResetOffsetRequestHeader requestHeader = new ResetOffsetRequestHeader();
requestHeader.setTopic(topic);
requestHeader.setGroup(group);
requestHeader.setTimestamp(timeStamp);
RemotingCommand request =
RemotingCommand.createRequestCommand(RequestCode.RESET_CONSUMER_CLIENT_OFFSET, requestHeader);
if (isC) {
// c++ language
ResetOffsetBodyForC body = new ResetOffsetBodyForC();
List<MessageQueueForC> offsetList = convertOffsetTable2OffsetList(offsetTable);
body.setOffsetTable(offsetList);
request.setBody(body.encode());
} else {
// other language
ResetOffsetBody body = new ResetOffsetBody();
body.setOffsetTable(offsetTable);
request.setBody(body.encode());
}
ConsumerGroupInfo consumerGroupInfo =
this.brokerController.getConsumerManager().getConsumerGroupInfo(group);
if (consumerGroupInfo != null && !consumerGroupInfo.getAllChannel().isEmpty()) {
ConcurrentHashMap<Channel, ClientChannelInfo> channelInfoTable =
consumerGroupInfo.getChannelInfoTable();
for (Map.Entry<Channel, ClientChannelInfo> entry : channelInfoTable.entrySet()) {
int version = entry.getValue().getVersion();
if (version >= MQVersion.Version.V3_0_7_SNAPSHOT.ordinal()) {
try {
this.brokerController.getRemotingServer().invokeOneway(entry.getKey(), request, 5000);
log.info("[reset-offset] reset offset success. topic={}, group={}, clientId={}",
topic, group, entry.getValue().getClientId());
} catch (Exception e) {
log.error("[reset-offset] reset offset exception. topic={}, group={}",
new Object[] {topic, group}, e);
}
} else {
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("the client does not support this feature. version="
+ MQVersion.getVersionDesc(version));
log.warn("[reset-offset] the client does not support this feature. version={}",
RemotingHelper.parseChannelRemoteAddr(entry.getKey()), MQVersion.getVersionDesc(version));
return response;
}
}
} else {
String errorInfo =
String.format("Consumer not online, so can not reset offset, Group: %s Topic: %s Timestamp: %d",
requestHeader.getGroup(),
requestHeader.getTopic(),
requestHeader.getTimestamp());
log.error(errorInfo);
response.setCode(ResponseCode.CONSUMER_NOT_ONLINE);
response.setRemark(errorInfo);
return response;
}
response.setCode(ResponseCode.SUCCESS);
ResetOffsetBody resBody = new ResetOffsetBody();
resBody.setOffsetTable(offsetTable);
response.setBody(resBody.encode());
return response;
}
private List<MessageQueueForC> convertOffsetTable2OffsetList(Map<MessageQueue, Long> table) {
List<MessageQueueForC> list = new ArrayList<>();
for (Entry<MessageQueue, Long> entry : table.entrySet()) {
MessageQueue mq = entry.getKey();
MessageQueueForC tmp =
new MessageQueueForC(mq.getTopic(), mq.getBrokerName(), mq.getQueueId(), entry.getValue());
list.add(tmp);
}
return list;
}
public RemotingCommand getConsumeStatus(String topic, String group, String originClientId) {
final RemotingCommand result = RemotingCommand.createResponseCommand(null);
GetConsumerStatusRequestHeader requestHeader = new GetConsumerStatusRequestHeader();
requestHeader.setTopic(topic);
requestHeader.setGroup(group);
RemotingCommand request =
RemotingCommand.createRequestCommand(RequestCode.GET_CONSUMER_STATUS_FROM_CLIENT,
requestHeader);
Map<String, Map<MessageQueue, Long>> consumerStatusTable =
new HashMap<String, Map<MessageQueue, Long>>();
ConcurrentHashMap<Channel, ClientChannelInfo> channelInfoTable =
this.brokerController.getConsumerManager().getConsumerGroupInfo(group).getChannelInfoTable();
if (null == channelInfoTable || channelInfoTable.isEmpty()) {
result.setCode(ResponseCode.SYSTEM_ERROR);
result.setRemark(String.format("No Any Consumer online in the consumer group: [%s]", group));
return result;
}
for (Map.Entry<Channel, ClientChannelInfo> entry : channelInfoTable.entrySet()) {
int version = entry.getValue().getVersion();
String clientId = entry.getValue().getClientId();
if (version < MQVersion.Version.V3_0_7_SNAPSHOT.ordinal()) {
result.setCode(ResponseCode.SYSTEM_ERROR);
result.setRemark("the client does not support this feature. version="
+ MQVersion.getVersionDesc(version));
log.warn("[get-consumer-status] the client does not support this feature. version={}",
RemotingHelper.parseChannelRemoteAddr(entry.getKey()), MQVersion.getVersionDesc(version));
return result;
} else if (UtilAll.isBlank(originClientId) || originClientId.equals(clientId)) {
try {
RemotingCommand response =
this.brokerController.getRemotingServer().invokeSync(entry.getKey(), request, 5000);
assert response != null;
switch (response.getCode()) {
case ResponseCode.SUCCESS: {
if (response.getBody() != null) {
GetConsumerStatusBody body =
GetConsumerStatusBody.decode(response.getBody(),
GetConsumerStatusBody.class);
consumerStatusTable.put(clientId, body.getMessageQueueTable());
log.info(
"[get-consumer-status] get consumer status success. topic={}, group={}, channelRemoteAddr={}",
topic, group, clientId);
}
}
default:
break;
}
} catch (Exception e) {
log.error(
"[get-consumer-status] get consumer status exception. topic={}, group={}, offset={}",
new Object[] {topic, group}, e);
}
if (!UtilAll.isBlank(originClientId) && originClientId.equals(clientId)) {
break;
}
}
}
result.setCode(ResponseCode.SUCCESS);
GetConsumerStatusBody resBody = new GetConsumerStatusBody();
resBody.setConsumerTable(consumerStatusTable);
result.setBody(resBody.encode());
return result;
}
}
|
|
package com.java110.vo.api;
import com.java110.dto.RoomAttrDto;
import java.io.Serializable;
import java.util.List;
/**
* @ClassName ApiRoomVo
* @Description TODO
* @Author wuxw
* @Date 2019/5/8 0:26
* @Version 1.0
* add by wuxw 2019/5/8
**/
public class ApiRoomDataVo implements Serializable {
private String feeCoefficient;
private String section;
private String remark;
private String userName;
private String roomId;
private String layer;
private String builtUpArea;
private String roomNum;
private String unitId;
private String unitNum;
private String floorId;
private String floorNum;
private String state;
private String stateName;
private String apartment;
private String apartmentName;
private String roomType;
private String ownerId;
private String ownerName;
private String idCard;
private String link;
private List<RoomAttrDto> roomAttrDto;
public String getFeeCoefficient() {
return feeCoefficient;
}
public void setFeeCoefficient(String feeCoefficient) {
this.feeCoefficient = feeCoefficient;
}
public String getSection() {
return section;
}
public void setSection(String section) {
this.section = section;
}
public String getRemark() {
return remark;
}
public void setRemark(String remark) {
this.remark = remark;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getRoomId() {
return roomId;
}
public void setRoomId(String roomId) {
this.roomId = roomId;
}
public String getLayer() {
return layer;
}
public void setLayer(String layer) {
this.layer = layer;
}
public String getBuiltUpArea() {
return builtUpArea;
}
public void setBuiltUpArea(String builtUpArea) {
this.builtUpArea = builtUpArea;
}
public String getRoomNum() {
return roomNum;
}
public void setRoomNum(String roomNum) {
this.roomNum = roomNum;
}
public String getUnitId() {
return unitId;
}
public void setUnitId(String unitId) {
this.unitId = unitId;
}
public String getApartment() {
return apartment;
}
public void setApartment(String apartment) {
this.apartment = apartment;
}
public String getUnitNum() {
return unitNum;
}
public void setUnitNum(String unitNum) {
this.unitNum = unitNum;
}
public String getState() {
return state;
}
public void setState(String state) {
this.state = state;
}
public String getFloorId() {
return floorId;
}
public void setFloorId(String floorId) {
this.floorId = floorId;
}
public String getFloorNum() {
return floorNum;
}
public void setFloorNum(String floorNum) {
this.floorNum = floorNum;
}
public String getApartmentName() {
return apartmentName;
}
public void setApartmentName(String apartmentName) {
this.apartmentName = apartmentName;
}
public String getOwnerId() {
return ownerId;
}
public void setOwnerId(String ownerId) {
this.ownerId = ownerId;
}
public String getOwnerName() {
return ownerName;
}
public void setOwnerName(String ownerName) {
this.ownerName = ownerName;
}
public String getIdCard() {
return idCard;
}
public void setIdCard(String idCard) {
this.idCard = idCard;
}
public String getLink() {
return link;
}
public void setLink(String link) {
this.link = link;
}
public List<RoomAttrDto> getRoomAttrDto() {
return roomAttrDto;
}
public void setRoomAttrDto(List<RoomAttrDto> roomAttrDto) {
this.roomAttrDto = roomAttrDto;
}
public String getStateName() {
return stateName;
}
public void setStateName(String stateName) {
this.stateName = stateName;
}
public String getRoomType() {
return roomType;
}
public void setRoomType(String roomType) {
this.roomType = roomType;
}
}
|
|
/*
* Copyright (C) 2014 ohmage
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.viewpagerindicator;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Paint.Style;
import android.graphics.drawable.Drawable;
import android.os.Parcel;
import android.os.Parcelable;
import android.support.v4.view.MotionEventCompat;
import android.support.v4.view.ViewConfigurationCompat;
import android.support.v4.view.ViewPager;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewConfiguration;
import org.ohmage.app.R;
import org.ohmage.widget.VerticalViewPager;
import static android.graphics.Paint.ANTI_ALIAS_FLAG;
import static android.widget.LinearLayout.HORIZONTAL;
import static android.widget.LinearLayout.VERTICAL;
/**
* Draws circles (one for each view). The current view position is filled and
* others are only stroked.
*/
public class CirclePageIndicator extends View implements PageIndicator {
private static final int INVALID_POINTER = -1;
private float mRadius;
private final Paint mPaintPageFill = new Paint(ANTI_ALIAS_FLAG);
private final Paint mPaintStroke = new Paint(ANTI_ALIAS_FLAG);
private final Paint mPaintFill = new Paint(ANTI_ALIAS_FLAG);
private VerticalViewPager mViewPager;
private VerticalViewPager.OnPageChangeListener mListener;
private int mCurrentPage;
private int mSnapPage;
private double mPageOffset;
private int mScrollState;
private int mOrientation;
private boolean mCentered;
private boolean mSnap;
private int mTouchSlop;
private float mLastMotionX = -1;
private int mActivePointerId = INVALID_POINTER;
private boolean mIsDragging;
public CirclePageIndicator(Context context) {
this(context, null);
}
public CirclePageIndicator(Context context, AttributeSet attrs) {
this(context, attrs, R.attr.vpiCirclePageIndicatorStyle);
}
public CirclePageIndicator(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
if (isInEditMode()) return;
//Load defaults from resources
final Resources res = getResources();
final int defaultPageColor = res.getColor(R.color.default_circle_indicator_page_color);
final int defaultFillColor = res.getColor(R.color.default_circle_indicator_fill_color);
final int defaultOrientation = res.getInteger(R.integer.default_circle_indicator_orientation);
final int defaultStrokeColor = res.getColor(R.color.default_circle_indicator_stroke_color);
final float defaultStrokeWidth = res.getDimension(R.dimen.default_circle_indicator_stroke_width);
final float defaultRadius = res.getDimension(R.dimen.default_circle_indicator_radius);
final boolean defaultCentered = res.getBoolean(R.bool.default_circle_indicator_centered);
final boolean defaultSnap = res.getBoolean(R.bool.default_circle_indicator_snap);
//Retrieve styles attributes
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.CirclePageIndicator, defStyle, 0);
mCentered = a.getBoolean(R.styleable.CirclePageIndicator_centered, defaultCentered);
mOrientation = a.getInt(R.styleable.CirclePageIndicator_android_orientation, defaultOrientation);
mPaintPageFill.setStyle(Style.FILL);
mPaintPageFill.setColor(a.getColor(R.styleable.CirclePageIndicator_pageColor, defaultPageColor));
mPaintStroke.setStyle(Style.STROKE);
mPaintStroke.setColor(a.getColor(R.styleable.CirclePageIndicator_strokeColor, defaultStrokeColor));
mPaintStroke.setStrokeWidth(a.getDimension(R.styleable.CirclePageIndicator_strokeWidth, defaultStrokeWidth));
mPaintFill.setStyle(Style.FILL);
mPaintFill.setColor(a.getColor(R.styleable.CirclePageIndicator_fillColor, defaultFillColor));
mRadius = a.getDimension(R.styleable.CirclePageIndicator_radius, defaultRadius);
mSnap = a.getBoolean(R.styleable.CirclePageIndicator_snap, defaultSnap);
Drawable background = a.getDrawable(R.styleable.CirclePageIndicator_android_background);
if (background != null) {
setBackgroundDrawable(background);
}
a.recycle();
final ViewConfiguration configuration = ViewConfiguration.get(context);
mTouchSlop = ViewConfigurationCompat.getScaledPagingTouchSlop(configuration);
}
public void setCentered(boolean centered) {
mCentered = centered;
invalidate();
}
public boolean isCentered() {
return mCentered;
}
public void setPageColor(int pageColor) {
mPaintPageFill.setColor(pageColor);
invalidate();
}
public int getPageColor() {
return mPaintPageFill.getColor();
}
public void setFillColor(int fillColor) {
mPaintFill.setColor(fillColor);
invalidate();
}
public int getFillColor() {
return mPaintFill.getColor();
}
public void setOrientation(int orientation) {
switch (orientation) {
case HORIZONTAL:
case VERTICAL:
mOrientation = orientation;
requestLayout();
break;
default:
throw new IllegalArgumentException("Orientation must be either HORIZONTAL or VERTICAL.");
}
}
public int getOrientation() {
return mOrientation;
}
public void setStrokeColor(int strokeColor) {
mPaintStroke.setColor(strokeColor);
invalidate();
}
public int getStrokeColor() {
return mPaintStroke.getColor();
}
public void setStrokeWidth(float strokeWidth) {
mPaintStroke.setStrokeWidth(strokeWidth);
invalidate();
}
public float getStrokeWidth() {
return mPaintStroke.getStrokeWidth();
}
public void setRadius(float radius) {
mRadius = radius;
invalidate();
}
public float getRadius() {
return mRadius;
}
public void setSnap(boolean snap) {
mSnap = snap;
invalidate();
}
public boolean isSnap() {
return mSnap;
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (mViewPager == null) {
return;
}
final int count = mViewPager.getAdapter().getCount();
if (count == 0) {
return;
}
if (mCurrentPage >= count) {
setCurrentItem(count - 1);
return;
}
int longSize;
int longPaddingBefore;
int longPaddingAfter;
int shortPaddingBefore;
if (mOrientation == HORIZONTAL) {
longSize = getWidth();
longPaddingBefore = getPaddingLeft();
longPaddingAfter = getPaddingRight();
shortPaddingBefore = getPaddingTop();
} else {
longSize = getHeight();
longPaddingBefore = getPaddingTop();
longPaddingAfter = getPaddingBottom();
shortPaddingBefore = getPaddingLeft();
}
final float threeRadius = mRadius * 3;
final float shortOffset = shortPaddingBefore + mRadius;
float longOffset = longPaddingBefore + mRadius;
if (mCentered) {
longOffset += ((longSize - longPaddingBefore - longPaddingAfter) / 2.0f) - ((count * threeRadius) / 2.0f);
}
float dX;
float dY;
float pageFillRadius = mRadius;
if (mPaintStroke.getStrokeWidth() > 0) {
pageFillRadius -= mPaintStroke.getStrokeWidth() / 2.0f;
}
//Draw stroked circles
for (int iLoop = 0; iLoop < count; iLoop++) {
float drawLong = longOffset + (iLoop * threeRadius);
if (mOrientation == HORIZONTAL) {
dX = drawLong;
dY = shortOffset;
} else {
dX = shortOffset;
dY = drawLong;
}
// Only paint fill if not completely transparent
if (mPaintPageFill.getAlpha() > 0) {
canvas.drawCircle(dX, dY, pageFillRadius, mPaintPageFill);
}
// Only paint stroke if a stroke width was non-zero
if (pageFillRadius != mRadius) {
canvas.drawCircle(dX, dY, mRadius, mPaintStroke);
}
}
//Draw the filled circle according to the current scroll
float cx = (mSnap ? mSnapPage : mCurrentPage) * threeRadius;
if (!mSnap) {
cx += mPageOffset * threeRadius;
}
if (mOrientation == HORIZONTAL) {
dX = longOffset + cx;
dY = shortOffset;
} else {
dX = shortOffset;
dY = longOffset + cx;
}
canvas.drawCircle(dX, dY, mRadius, mPaintFill);
}
public boolean onTouchEvent(android.view.MotionEvent ev) {
if (super.onTouchEvent(ev)) {
return true;
}
if ((mViewPager == null) || (mViewPager.getAdapter().getCount() == 0)) {
return false;
}
final int action = ev.getAction() & MotionEventCompat.ACTION_MASK;
switch (action) {
case MotionEvent.ACTION_DOWN:
mActivePointerId = MotionEventCompat.getPointerId(ev, 0);
mLastMotionX = ev.getX();
break;
case MotionEvent.ACTION_MOVE: {
final int activePointerIndex = MotionEventCompat.findPointerIndex(ev, mActivePointerId);
final float x = MotionEventCompat.getX(ev, activePointerIndex);
final float deltaX = x - mLastMotionX;
if (!mIsDragging) {
if (Math.abs(deltaX) > mTouchSlop) {
mIsDragging = true;
}
}
if (mIsDragging) {
mLastMotionX = x;
if (mViewPager.isFakeDragging() || mViewPager.beginFakeDrag()) {
mViewPager.fakeDragBy(deltaX);
}
}
break;
}
case MotionEvent.ACTION_CANCEL:
case MotionEvent.ACTION_UP:
if (!mIsDragging) {
final int count = mViewPager.getAdapter().getCount();
final int width = getWidth();
final float halfWidth = width / 2f;
final float sixthWidth = width / 6f;
if ((mCurrentPage > 0) && (ev.getX() < halfWidth - sixthWidth)) {
if (action != MotionEvent.ACTION_CANCEL) {
mViewPager.setCurrentItem(mCurrentPage - 1);
}
return true;
} else if ((mCurrentPage < count - 1) && (ev.getX() > halfWidth + sixthWidth)) {
if (action != MotionEvent.ACTION_CANCEL) {
mViewPager.setCurrentItem(mCurrentPage + 1);
}
return true;
}
}
mIsDragging = false;
mActivePointerId = INVALID_POINTER;
if (mViewPager.isFakeDragging()) mViewPager.endFakeDrag();
break;
case MotionEventCompat.ACTION_POINTER_DOWN: {
final int index = MotionEventCompat.getActionIndex(ev);
mLastMotionX = MotionEventCompat.getX(ev, index);
mActivePointerId = MotionEventCompat.getPointerId(ev, index);
break;
}
case MotionEventCompat.ACTION_POINTER_UP:
final int pointerIndex = MotionEventCompat.getActionIndex(ev);
final int pointerId = MotionEventCompat.getPointerId(ev, pointerIndex);
if (pointerId == mActivePointerId) {
final int newPointerIndex = pointerIndex == 0 ? 1 : 0;
mActivePointerId = MotionEventCompat.getPointerId(ev, newPointerIndex);
}
mLastMotionX = MotionEventCompat.getX(ev, MotionEventCompat.findPointerIndex(ev, mActivePointerId));
break;
}
return true;
}
@Override
public void setViewPager(VerticalViewPager view) {
if (mViewPager == view) {
return;
}
if (mViewPager != null) {
mViewPager.setOnPageChangeListener(null);
}
if (view.getAdapter() == null) {
throw new IllegalStateException("ViewPager does not have adapter instance.");
}
mViewPager = view;
mViewPager.setOnPageChangeListener(this);
invalidate();
}
@Override
public void setViewPager(VerticalViewPager view, int initialPosition) {
setViewPager(view);
setCurrentItem(initialPosition);
}
@Override
public void setCurrentItem(int item) {
if (mViewPager == null) {
throw new IllegalStateException("ViewPager has not been bound.");
}
mViewPager.setCurrentItem(item);
mCurrentPage = item;
invalidate();
}
@Override
public void notifyDataSetChanged() {
invalidate();
}
@Override
public void onPageScrollStateChanged(int state) {
mScrollState = state;
if (mListener != null) {
mListener.onPageScrollStateChanged(state);
}
}
@Override
public void onPageScrolled(int position, double positionOffset, int positionOffsetPixels) {
mCurrentPage = position;
mPageOffset = positionOffset;
invalidate();
if (mListener != null) {
mListener.onPageScrolled(position, positionOffset, positionOffsetPixels);
}
}
@Override
public void onPageSelected(int position) {
if (mSnap || mScrollState == ViewPager.SCROLL_STATE_IDLE) {
mCurrentPage = position;
mSnapPage = position;
invalidate();
}
if (mListener != null) {
mListener.onPageSelected(position);
}
}
@Override
public void setOnPageChangeListener(VerticalViewPager.OnPageChangeListener listener) {
mListener = listener;
}
/*
* (non-Javadoc)
*
* @see android.view.View#onMeasure(int, int)
*/
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
if (mOrientation == HORIZONTAL) {
setMeasuredDimension(measureLong(widthMeasureSpec), measureShort(heightMeasureSpec));
} else {
setMeasuredDimension(measureShort(widthMeasureSpec), measureLong(heightMeasureSpec));
}
}
/**
* Determines the width of this view
*
* @param measureSpec
* A measureSpec packed into an int
* @return The width of the view, honoring constraints from measureSpec
*/
private int measureLong(int measureSpec) {
int result;
int specMode = MeasureSpec.getMode(measureSpec);
int specSize = MeasureSpec.getSize(measureSpec);
if ((specMode == MeasureSpec.EXACTLY) || (mViewPager == null)) {
//We were told how big to be
result = specSize;
} else {
//Calculate the width according the views count
final int count = mViewPager.getAdapter().getCount();
result = (int)(getPaddingLeft() + getPaddingRight()
+ (count * 2 * mRadius) + (count - 1) * mRadius + 1);
//Respect AT_MOST value if that was what is called for by measureSpec
if (specMode == MeasureSpec.AT_MOST) {
result = Math.min(result, specSize);
}
}
return result;
}
/**
* Determines the height of this view
*
* @param measureSpec
* A measureSpec packed into an int
* @return The height of the view, honoring constraints from measureSpec
*/
private int measureShort(int measureSpec) {
int result;
int specMode = MeasureSpec.getMode(measureSpec);
int specSize = MeasureSpec.getSize(measureSpec);
if (specMode == MeasureSpec.EXACTLY) {
//We were told how big to be
result = specSize;
} else {
//Measure the height
result = (int)(2 * mRadius + getPaddingTop() + getPaddingBottom() + 1);
//Respect AT_MOST value if that was what is called for by measureSpec
if (specMode == MeasureSpec.AT_MOST) {
result = Math.min(result, specSize);
}
}
return result;
}
@Override
public void onRestoreInstanceState(Parcelable state) {
SavedState savedState = (SavedState)state;
super.onRestoreInstanceState(savedState.getSuperState());
mCurrentPage = savedState.currentPage;
mSnapPage = savedState.currentPage;
requestLayout();
}
@Override
public Parcelable onSaveInstanceState() {
Parcelable superState = super.onSaveInstanceState();
SavedState savedState = new SavedState(superState);
savedState.currentPage = mCurrentPage;
return savedState;
}
static class SavedState extends BaseSavedState {
int currentPage;
public SavedState(Parcelable superState) {
super(superState);
}
private SavedState(Parcel in) {
super(in);
currentPage = in.readInt();
}
@Override
public void writeToParcel(Parcel dest, int flags) {
super.writeToParcel(dest, flags);
dest.writeInt(currentPage);
}
@SuppressWarnings("UnusedDeclaration")
public static final Parcelable.Creator<SavedState> CREATOR = new Parcelable.Creator<SavedState>() {
@Override
public SavedState createFromParcel(Parcel in) {
return new SavedState(in);
}
@Override
public SavedState[] newArray(int size) {
return new SavedState[size];
}
};
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.broker.auth;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.fail;
import java.util.EnumSet;
import org.apache.pulsar.broker.authorization.AuthorizationService;
import org.apache.pulsar.common.naming.TopicName;
import org.apache.pulsar.common.policies.data.AuthAction;
import org.apache.pulsar.common.policies.data.ClusterData;
import org.apache.pulsar.common.policies.data.TenantInfo;
import org.apache.pulsar.common.policies.data.SubscriptionAuthMode;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
@Test
public class AuthorizationTest extends MockedPulsarServiceBaseTest {
public AuthorizationTest() {
super();
}
@BeforeClass
@Override
protected void setup() throws Exception {
conf.setClusterName("c1");
conf.setAuthorizationEnabled(true);
conf.setAuthorizationAllowWildcardsMatching(true);
conf.setSuperUserRoles(Sets.newHashSet("pulsar.super_user"));
internalSetup();
}
@AfterClass
@Override
protected void cleanup() throws Exception {
internalCleanup();
}
@Test
void simple() throws Exception {
AuthorizationService auth = pulsar.getBrokerService().getAuthorizationService();
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "my-role", null), false);
admin.clusters().createCluster("c1", new ClusterData());
admin.tenants().createTenant("p1", new TenantInfo(Sets.newHashSet("role1"), Sets.newHashSet("c1")));
waitForChange();
admin.namespaces().createNamespace("p1/c1/ns1");
waitForChange();
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "my-role", null), false);
admin.namespaces().grantPermissionOnNamespace("p1/c1/ns1", "my-role", EnumSet.of(AuthAction.produce));
waitForChange();
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "my-role", null), true);
assertEquals(auth.canProduce(TopicName.get("persistent://p1/c1/ns1/ds1"), "my-role", null), true);
admin.topics().grantPermission("persistent://p1/c1/ns1/ds2", "other-role",
EnumSet.of(AuthAction.consume));
waitForChange();
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds2"), "other-role", null), true);
assertEquals(auth.canProduce(TopicName.get("persistent://p1/c1/ns1/ds1"), "my-role", null), true);
assertEquals(auth.canProduce(TopicName.get("persistent://p1/c1/ns1/ds2"), "other-role", null), false);
assertEquals(auth.canConsume(TopicName.get("persistent://p1/c1/ns1/ds2"), "other-role", null, null), true);
assertEquals(auth.canConsume(TopicName.get("persistent://p1/c1/ns1/ds2"), "no-access-role", null, null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "no-access-role", null), false);
admin.namespaces().grantPermissionOnNamespace("p1/c1/ns1", "my-role", EnumSet.allOf(AuthAction.class));
waitForChange();
assertEquals(auth.canProduce(TopicName.get("persistent://p1/c1/ns1/ds1"), "my-role", null), true);
assertEquals(auth.canConsume(TopicName.get("persistent://p1/c1/ns1/ds1"), "my-role", null, null), true);
// test for wildcard
// namespace prefix match
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "my.role.1", null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "my.role.2", null), false);
assertEquals(auth.canProduce(TopicName.get("persistent://p1/c1/ns1/ds1"), "my.role.1", null), false);
assertEquals(auth.canConsume(TopicName.get("persistent://p1/c1/ns1/ds1"), "my.role.1", null, null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "other.role.1", null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "other.role.2", null), false);
admin.namespaces().grantPermissionOnNamespace("p1/c1/ns1", "my.role.*", EnumSet.of(AuthAction.produce));
waitForChange();
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "my.role.1", null), true);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "my.role.2", null), true);
assertEquals(auth.canProduce(TopicName.get("persistent://p1/c1/ns1/ds1"), "my.role.1", null), true);
assertEquals(auth.canConsume(TopicName.get("persistent://p1/c1/ns1/ds1"), "my.role.1", null, null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "other.role.1", null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "other.role.2", null), false);
// namespace suffix match
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "1.role.my", null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "2.role.my", null), false);
assertEquals(auth.canProduce(TopicName.get("persistent://p1/c1/ns1/ds1"), "1.role.my", null), false);
assertEquals(auth.canConsume(TopicName.get("persistent://p1/c1/ns1/ds1"), "1.role.my", null, null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "2.role.other", null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "2.role.other", null), false);
admin.namespaces().grantPermissionOnNamespace("p1/c1/ns1", "*.role.my", EnumSet.of(AuthAction.consume));
waitForChange();
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "1.role.my", null), true);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "2.role.my", null), true);
assertEquals(auth.canProduce(TopicName.get("persistent://p1/c1/ns1/ds1"), "1.role.my", null), false);
assertEquals(auth.canConsume(TopicName.get("persistent://p1/c1/ns1/ds1"), "1.role.my", null, null), true);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "2.role.other", null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "2.role.other", null), false);
// revoke for next test
admin.namespaces().revokePermissionsOnNamespace("p1/c1/ns1", "my.role.*");
admin.namespaces().revokePermissionsOnNamespace("p1/c1/ns1", "*.role.my");
waitForChange();
// topic prefix match
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "my.role.1", null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "my.role.2", null), false);
assertEquals(auth.canProduce(TopicName.get("persistent://p1/c1/ns1/ds1"), "my.role.1", null), false);
assertEquals(auth.canConsume(TopicName.get("persistent://p1/c1/ns1/ds1"), "my.role.1", null, null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "other.role.1", null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "other.role.2", null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds2"), "my.role.1", null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds2"), "my.role.2", null), false);
admin.topics().grantPermission("persistent://p1/c1/ns1/ds1", "my.*",
EnumSet.of(AuthAction.produce));
waitForChange();
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "my.role.1", null), true);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "my.role.2", null), true);
assertEquals(auth.canProduce(TopicName.get("persistent://p1/c1/ns1/ds1"), "my.role.1", null), true);
assertEquals(auth.canConsume(TopicName.get("persistent://p1/c1/ns1/ds1"), "my.role.1", null, null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "other.role.1", null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "other.role.2", null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds2"), "my.role.1", null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds2"), "my.role.2", null), false);
// topic suffix match
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "1.role.my", null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "2.role.my", null), false);
assertEquals(auth.canProduce(TopicName.get("persistent://p1/c1/ns1/ds1"), "1.role.my", null), false);
assertEquals(auth.canConsume(TopicName.get("persistent://p1/c1/ns1/ds1"), "1.role.my", null, null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "2.role.other", null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "2.role.other", null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds2"), "1.role.my", null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds2"), "2.role.my", null), false);
admin.topics().grantPermission("persistent://p1/c1/ns1/ds1", "*.my",
EnumSet.of(AuthAction.consume));
waitForChange();
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "1.role.my", null), true);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "2.role.my", null), true);
assertEquals(auth.canProduce(TopicName.get("persistent://p1/c1/ns1/ds1"), "1.role.my", null), false);
assertEquals(auth.canConsume(TopicName.get("persistent://p1/c1/ns1/ds1"), "1.role.my", null, null), true);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "2.role.other", null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "2.role.other", null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds2"), "1.role.my", null), false);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds2"), "2.role.my", null), false);
admin.topics().revokePermissions("persistent://p1/c1/ns1/ds1", "my.*");
admin.topics().revokePermissions("persistent://p1/c1/ns1/ds1", "*.my");
// tests for subscription auth mode
admin.namespaces().grantPermissionOnNamespace("p1/c1/ns1", "*", EnumSet.of(AuthAction.consume));
admin.namespaces().setSubscriptionAuthMode("p1/c1/ns1", SubscriptionAuthMode.Prefix);
waitForChange();
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "role1", null), true);
assertEquals(auth.canLookup(TopicName.get("persistent://p1/c1/ns1/ds1"), "role2", null), true);
try {
assertEquals(auth.canConsume(TopicName.get("persistent://p1/c1/ns1/ds1"), "role1", null, "sub1"), false);
fail();
} catch (Exception e) {}
try {
assertEquals(auth.canConsume(TopicName.get("persistent://p1/c1/ns1/ds1"), "role2", null, "sub2"), false);
fail();
} catch (Exception e) {}
assertEquals(auth.canConsume(TopicName.get("persistent://p1/c1/ns1/ds1"), "role1", null, "role1-sub1"), true);
assertEquals(auth.canConsume(TopicName.get("persistent://p1/c1/ns1/ds1"), "role2", null, "role2-sub2"), true);
assertEquals(auth.canConsume(TopicName.get("persistent://p1/c1/ns1/ds1"), "pulsar.super_user", null, "role3-sub1"), true);
admin.namespaces().deleteNamespace("p1/c1/ns1");
admin.tenants().deleteTenant("p1");
admin.clusters().deleteCluster("c1");
}
private static void waitForChange() {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
}
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.apache.lucene.util.Counter;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.docset.DocSetCache;
import org.elasticsearch.index.cache.filter.FilterCache;
import org.elasticsearch.index.cache.fixedbitset.FixedBitSetFilterCache;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMappers;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.ParsedFilter;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.query.support.NestedScope;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.facet.SearchContextFacets;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext;
import org.elasticsearch.search.fetch.partial.PartialFieldsContext;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.fetch.script.ScriptFieldsContext;
import org.elasticsearch.search.fetch.source.FetchSourceContext;
import org.elasticsearch.search.highlight.SearchContextHighlight;
import org.elasticsearch.search.internal.ContextIndexSearcher;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.rescore.RescoreSearchContext;
import org.elasticsearch.search.scan.ScanContext;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import org.elasticsearch.threadpool.ThreadPool;
import java.util.List;
public class TestSearchContext extends SearchContext {
final CacheRecycler cacheRecycler;
final PageCacheRecycler pageCacheRecycler;
final BigArrays bigArrays;
final IndexService indexService;
final FilterCache filterCache;
final IndexFieldDataService indexFieldDataService;
final FixedBitSetFilterCache fixedBitSetFilterCache;
final ThreadPool threadPool;
ContextIndexSearcher searcher;
int size;
private int terminateAfter = DEFAULT_TERMINATE_AFTER;
private String[] types;
public TestSearchContext(ThreadPool threadPool, CacheRecycler cacheRecycler, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, IndexService indexService) {
this.cacheRecycler = cacheRecycler;
this.pageCacheRecycler = pageCacheRecycler;
this.bigArrays = bigArrays.withCircuitBreaking();
this.indexService = indexService;
this.filterCache = indexService.cache().filter();
this.indexFieldDataService = indexService.fieldData();
this.fixedBitSetFilterCache = indexService.fixedBitSetFilterCache();
this.threadPool = threadPool;
}
public TestSearchContext() {
this.cacheRecycler = null;
this.pageCacheRecycler = null;
this.bigArrays = null;
this.indexService = null;
this.filterCache = null;
this.indexFieldDataService = null;
this.threadPool = null;
this.fixedBitSetFilterCache = null;
}
public void setTypes(String... types) {
this.types = types;
}
@Override
public void preProcess() {
}
@Override
public Filter searchFilter(String[] types) {
return null;
}
@Override
public long id() {
return 0;
}
@Override
public String source() {
return null;
}
@Override
public ShardSearchRequest request() {
return null;
}
@Override
public SearchType searchType() {
return null;
}
@Override
public SearchContext searchType(SearchType searchType) {
return null;
}
@Override
public SearchShardTarget shardTarget() {
return null;
}
@Override
public int numberOfShards() {
return 0;
}
@Override
public boolean hasTypes() {
return false;
}
@Override
public String[] types() {
return new String[0];
}
@Override
public float queryBoost() {
return 0;
}
@Override
public SearchContext queryBoost(float queryBoost) {
return null;
}
@Override
protected long nowInMillisImpl() {
return 0;
}
@Override
public Scroll scroll() {
return null;
}
@Override
public SearchContext scroll(Scroll scroll) {
return null;
}
@Override
public SearchContextFacets facets() {
return null;
}
@Override
public SearchContext facets(SearchContextFacets facets) {
return null;
}
@Override
public SearchContextAggregations aggregations() {
return null;
}
@Override
public SearchContext aggregations(SearchContextAggregations aggregations) {
return null;
}
@Override
public SearchContextHighlight highlight() {
return null;
}
@Override
public void highlight(SearchContextHighlight highlight) {
}
@Override
public SuggestionSearchContext suggest() {
return null;
}
@Override
public void suggest(SuggestionSearchContext suggest) {
}
@Override
public List<RescoreSearchContext> rescore() {
return null;
}
@Override
public void addRescore(RescoreSearchContext rescore) {
}
@Override
public boolean hasFieldDataFields() {
return false;
}
@Override
public FieldDataFieldsContext fieldDataFields() {
return null;
}
@Override
public boolean hasScriptFields() {
return false;
}
@Override
public ScriptFieldsContext scriptFields() {
return null;
}
@Override
public boolean hasPartialFields() {
return false;
}
@Override
public PartialFieldsContext partialFields() {
return null;
}
@Override
public boolean sourceRequested() {
return false;
}
@Override
public boolean hasFetchSourceContext() {
return false;
}
@Override
public FetchSourceContext fetchSourceContext() {
return null;
}
@Override
public SearchContext fetchSourceContext(FetchSourceContext fetchSourceContext) {
return null;
}
@Override
public ContextIndexSearcher searcher() {
return searcher;
}
public void setSearcher(ContextIndexSearcher searcher) {
this.searcher = searcher;
}
@Override
public IndexShard indexShard() {
return null;
}
@Override
public MapperService mapperService() {
if (indexService != null) {
return indexService.mapperService();
}
return null;
}
@Override
public AnalysisService analysisService() {
return indexService.analysisService();
}
@Override
public IndexQueryParserService queryParserService() {
return indexService.queryParserService();
}
@Override
public SimilarityService similarityService() {
return null;
}
@Override
public ScriptService scriptService() {
return null;
}
@Override
public CacheRecycler cacheRecycler() {
return cacheRecycler;
}
@Override
public PageCacheRecycler pageCacheRecycler() {
return pageCacheRecycler;
}
@Override
public BigArrays bigArrays() {
return bigArrays;
}
@Override
public FilterCache filterCache() {
return filterCache;
}
@Override
public FixedBitSetFilterCache fixedBitSetFilterCache() {
return fixedBitSetFilterCache;
}
@Override
public DocSetCache docSetCache() {
return null;
}
@Override
public IndexFieldDataService fieldData() {
return indexFieldDataService;
}
@Override
public long timeoutInMillis() {
return 0;
}
@Override
public void timeoutInMillis(long timeoutInMillis) {
}
@Override
public int terminateAfter() {
return terminateAfter;
}
@Override
public void terminateAfter(int terminateAfter) {
this.terminateAfter = terminateAfter;
}
@Override
public SearchContext minimumScore(float minimumScore) {
return null;
}
@Override
public Float minimumScore() {
return null;
}
@Override
public SearchContext sort(Sort sort) {
return null;
}
@Override
public Sort sort() {
return null;
}
@Override
public SearchContext trackScores(boolean trackScores) {
return null;
}
@Override
public boolean trackScores() {
return false;
}
@Override
public SearchContext parsedPostFilter(ParsedFilter postFilter) {
return null;
}
@Override
public ParsedFilter parsedPostFilter() {
return null;
}
@Override
public Filter aliasFilter() {
return null;
}
@Override
public SearchContext parsedQuery(ParsedQuery query) {
return null;
}
@Override
public ParsedQuery parsedQuery() {
return null;
}
@Override
public Query query() {
return null;
}
@Override
public boolean queryRewritten() {
return false;
}
@Override
public SearchContext updateRewriteQuery(Query rewriteQuery) {
return null;
}
@Override
public int from() {
return 0;
}
@Override
public SearchContext from(int from) {
return null;
}
@Override
public int size() {
return size;
}
public void setSize(int size) {
this.size = size;
}
@Override
public SearchContext size(int size) {
return null;
}
@Override
public boolean hasFieldNames() {
return false;
}
@Override
public List<String> fieldNames() {
return null;
}
@Override
public void emptyFieldNames() {
}
@Override
public boolean explain() {
return false;
}
@Override
public void explain(boolean explain) {
}
@Override
public List<String> groupStats() {
return null;
}
@Override
public void groupStats(List<String> groupStats) {
}
@Override
public boolean version() {
return false;
}
@Override
public void version(boolean version) {
}
@Override
public int[] docIdsToLoad() {
return new int[0];
}
@Override
public int docIdsToLoadFrom() {
return 0;
}
@Override
public int docIdsToLoadSize() {
return 0;
}
@Override
public SearchContext docIdsToLoad(int[] docIdsToLoad, int docsIdsToLoadFrom, int docsIdsToLoadSize) {
return null;
}
@Override
public void accessed(long accessTime) {
}
@Override
public long lastAccessTime() {
return 0;
}
@Override
public long keepAlive() {
return 0;
}
@Override
public void keepAlive(long keepAlive) {
}
@Override
public void lastEmittedDoc(ScoreDoc doc) {
}
@Override
public ScoreDoc lastEmittedDoc() {
return null;
}
@Override
public SearchLookup lookup() {
return null;
}
@Override
public DfsSearchResult dfsResult() {
return null;
}
@Override
public QuerySearchResult queryResult() {
return null;
}
@Override
public FetchSearchResult fetchResult() {
return null;
}
@Override
public ScanContext scanContext() {
return null;
}
@Override
public MapperService.SmartNameFieldMappers smartFieldMappers(String name) {
return null;
}
@Override
public FieldMappers smartNameFieldMappers(String name) {
return null;
}
@Override
public FieldMapper<?> smartNameFieldMapper(String name) {
if (mapperService() != null) {
return mapperService().smartNameFieldMapper(name, types());
}
return null;
}
@Override
public FieldMapper<?> smartNameFieldMapperFromAnyType(String name) {
if (mapperService() != null) {
return mapperService().smartNameFieldMapper(name);
}
return null;
}
@Override
public MapperService.SmartNameObjectMapper smartNameObjectMapper(String name) {
if (mapperService() != null) {
return mapperService().smartNameObjectMapper(name, types);
}
return null;
}
@Override
public void doClose() throws ElasticsearchException {
}
@Override
public boolean useSlowScroll() {
return false;
}
@Override
public SearchContext useSlowScroll(boolean useSlowScroll) {
return null;
}
@Override
public Counter timeEstimateCounter() {
throw new UnsupportedOperationException();
}
@Override
public void innerHits(InnerHitsContext innerHitsContext) {
throw new UnsupportedOperationException();
}
@Override
public InnerHitsContext innerHits() {
throw new UnsupportedOperationException();
}
}
|
|
/**
* Copyright (c) 2000-present Liferay, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package org.oep.datamgt.model;
import com.liferay.portal.kernel.bean.AutoEscape;
import com.liferay.portal.kernel.exception.SystemException;
import com.liferay.portal.model.BaseModel;
import com.liferay.portal.model.CacheModel;
import com.liferay.portal.service.ServiceContext;
import com.liferay.portlet.expando.model.ExpandoBridge;
import java.io.Serializable;
import java.util.Date;
/**
* The base model interface for the DictCollection service. Represents a row in the "oep_datamgt_dictcollection" database table, with each column mapped to a property of this class.
*
* <p>
* This interface and its corresponding implementation {@link org.oep.datamgt.model.impl.DictCollectionModelImpl} exist only as a container for the default property accessors generated by ServiceBuilder. Helper methods and all application logic should be put in {@link org.oep.datamgt.model.impl.DictCollectionImpl}.
* </p>
*
* @author NQMINH
* @see DictCollection
* @see org.oep.datamgt.model.impl.DictCollectionImpl
* @see org.oep.datamgt.model.impl.DictCollectionModelImpl
* @generated
*/
public interface DictCollectionModel extends BaseModel<DictCollection> {
/*
* NOTE FOR DEVELOPERS:
*
* Never modify or reference this interface directly. All methods that expect a dictionary collection model instance should use the {@link DictCollection} interface instead.
*/
/**
* Returns the primary key of this dictionary collection.
*
* @return the primary key of this dictionary collection
*/
public long getPrimaryKey();
/**
* Sets the primary key of this dictionary collection.
*
* @param primaryKey the primary key of this dictionary collection
*/
public void setPrimaryKey(long primaryKey);
/**
* Returns the dict collection ID of this dictionary collection.
*
* @return the dict collection ID of this dictionary collection
*/
public long getDictCollectionId();
/**
* Sets the dict collection ID of this dictionary collection.
*
* @param dictCollectionId the dict collection ID of this dictionary collection
*/
public void setDictCollectionId(long dictCollectionId);
/**
* Returns the company ID of this dictionary collection.
*
* @return the company ID of this dictionary collection
*/
public long getCompanyId();
/**
* Sets the company ID of this dictionary collection.
*
* @param companyId the company ID of this dictionary collection
*/
public void setCompanyId(long companyId);
/**
* Returns the group ID of this dictionary collection.
*
* @return the group ID of this dictionary collection
*/
public long getGroupId();
/**
* Sets the group ID of this dictionary collection.
*
* @param groupId the group ID of this dictionary collection
*/
public void setGroupId(long groupId);
/**
* Returns the user ID of this dictionary collection.
*
* @return the user ID of this dictionary collection
*/
public long getUserId();
/**
* Sets the user ID of this dictionary collection.
*
* @param userId the user ID of this dictionary collection
*/
public void setUserId(long userId);
/**
* Returns the user uuid of this dictionary collection.
*
* @return the user uuid of this dictionary collection
* @throws SystemException if a system exception occurred
*/
public String getUserUuid() throws SystemException;
/**
* Sets the user uuid of this dictionary collection.
*
* @param userUuid the user uuid of this dictionary collection
*/
public void setUserUuid(String userUuid);
/**
* Returns the create date of this dictionary collection.
*
* @return the create date of this dictionary collection
*/
public Date getCreateDate();
/**
* Sets the create date of this dictionary collection.
*
* @param createDate the create date of this dictionary collection
*/
public void setCreateDate(Date createDate);
/**
* Returns the modified date of this dictionary collection.
*
* @return the modified date of this dictionary collection
*/
public Date getModifiedDate();
/**
* Sets the modified date of this dictionary collection.
*
* @param modifiedDate the modified date of this dictionary collection
*/
public void setModifiedDate(Date modifiedDate);
/**
* Returns the name of this dictionary collection.
*
* @return the name of this dictionary collection
*/
@AutoEscape
public String getName();
/**
* Sets the name of this dictionary collection.
*
* @param name the name of this dictionary collection
*/
public void setName(String name);
/**
* Returns the version of this dictionary collection.
*
* @return the version of this dictionary collection
*/
@AutoEscape
public String getVersion();
/**
* Sets the version of this dictionary collection.
*
* @param version the version of this dictionary collection
*/
public void setVersion(String version);
/**
* Returns the title of this dictionary collection.
*
* @return the title of this dictionary collection
*/
@AutoEscape
public String getTitle();
/**
* Sets the title of this dictionary collection.
*
* @param title the title of this dictionary collection
*/
public void setTitle(String title);
/**
* Returns the validated from of this dictionary collection.
*
* @return the validated from of this dictionary collection
*/
public Date getValidatedFrom();
/**
* Sets the validated from of this dictionary collection.
*
* @param validatedFrom the validated from of this dictionary collection
*/
public void setValidatedFrom(Date validatedFrom);
/**
* Returns the validated to of this dictionary collection.
*
* @return the validated to of this dictionary collection
*/
public Date getValidatedTo();
/**
* Sets the validated to of this dictionary collection.
*
* @param validatedTo the validated to of this dictionary collection
*/
public void setValidatedTo(Date validatedTo);
/**
* Returns the status of this dictionary collection.
*
* @return the status of this dictionary collection
*/
public int getStatus();
/**
* Sets the status of this dictionary collection.
*
* @param status the status of this dictionary collection
*/
public void setStatus(int status);
@Override
public boolean isNew();
@Override
public void setNew(boolean n);
@Override
public boolean isCachedModel();
@Override
public void setCachedModel(boolean cachedModel);
@Override
public boolean isEscapedModel();
@Override
public Serializable getPrimaryKeyObj();
@Override
public void setPrimaryKeyObj(Serializable primaryKeyObj);
@Override
public ExpandoBridge getExpandoBridge();
@Override
public void setExpandoBridgeAttributes(BaseModel<?> baseModel);
@Override
public void setExpandoBridgeAttributes(ExpandoBridge expandoBridge);
@Override
public void setExpandoBridgeAttributes(ServiceContext serviceContext);
@Override
public Object clone();
@Override
public int compareTo(DictCollection dictCollection);
@Override
public int hashCode();
@Override
public CacheModel<DictCollection> toCacheModel();
@Override
public DictCollection toEscapedModel();
@Override
public DictCollection toUnescapedModel();
@Override
public String toString();
@Override
public String toXmlString();
}
|
|
package de.kisi.android.db;
import java.sql.SQLException;
import android.content.Context;
import android.database.sqlite.SQLiteDatabase;
import android.util.Log;
import com.j256.ormlite.android.apptools.OrmLiteSqliteOpenHelper;
import com.j256.ormlite.dao.Dao;
import com.j256.ormlite.dao.RuntimeExceptionDao;
import com.j256.ormlite.support.ConnectionSource;
import com.j256.ormlite.table.TableUtils;
import de.kisi.android.model.Locator;
import de.kisi.android.model.Lock;
import de.kisi.android.model.Place;
import de.kisi.android.model.User;
/**
* Database helper class used to manage the creation and upgrading of your database. This class also usually provides
* the DAOs used by the other classes.
*/
public class DatabaseHelper extends OrmLiteSqliteOpenHelper {
// name of the database file for your application -- change to something appropriate for your app
private static final String DATABASE_NAME = "Kisi.db";
// any time you make changes to your database objects, you may have to increase the database version
private static final int DATABASE_VERSION = 4;
// the DAO object we use to access the Lock table;
private Dao<Lock, Integer> lockDao = null;
private RuntimeExceptionDao<Lock, Integer> lockRuntimeDao = null;
private Dao<Place, Integer> placeDao = null;
private RuntimeExceptionDao<Place, Integer> placeRuntimeDao = null;
private Dao<User, Integer> userDao = null;
private RuntimeExceptionDao<User, Integer> userRuntimeDao = null;
private Dao<Locator, Integer> locatorDao = null;
private RuntimeExceptionDao<Locator, Integer> locatorRuntimeDao = null;
public DatabaseHelper(Context context) {
super(context, DATABASE_NAME, null, DATABASE_VERSION, de.kisi.android.R.raw.ormlite_config );
}
/**
* This is called when the database is first created. Usually you should call createTable statements here to create
* the tables that will store your data.
*/
@Override
public void onCreate(SQLiteDatabase db, ConnectionSource connectionSource) {
try {
Log.i(DatabaseHelper.class.getName(), "onCreate");
TableUtils.createTable(connectionSource, Place.class);
TableUtils.createTable(connectionSource, Lock.class);
TableUtils.createTable(connectionSource, User.class);
TableUtils.createTable(connectionSource, Locator.class);
} catch (SQLException e) {
Log.e(DatabaseHelper.class.getName(), "Can't create database", e);
throw new RuntimeException(e);
}
}
/**
* This is called when your application is upgraded and it has a higher version number. This allows you to adjust
* the various data to match the new version number.
* TODO: Implement this in a later version
*/
@Override
public void onUpgrade(SQLiteDatabase db, ConnectionSource connectionSource, int oldVersion, int newVersion) {
try {
// Added in version 2 the user table
if (oldVersion < 2) {
TableUtils.createTable(connectionSource, User.class);
}
// Added in version 3 the locator table
if (oldVersion < 3) {
TableUtils.createTable(connectionSource, Locator.class);
}
//Added in verion 4 the column suggestUnlock in the place table
if(oldVersion < 4) {
getPlaceDao();
placeDao.executeRaw("ALTER TABLE 'place' ADD COLUMN suggestUnlock BOOLEAN;");
TableUtils.dropTable(connectionSource, Lock.class, true);
TableUtils.createTable(connectionSource, Lock.class);
}
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* Returns the Database Access Object (DAO) for our Lock class. It will create it or just give the cached
* value.
*/
public Dao<Lock, Integer> getLockDao() throws SQLException {
if (lockDao == null) {
lockDao = getDao(Lock.class);
}
return lockDao;
}
/**
* Returns the RuntimeExceptionDao (Database Access Object) version of a Dao for our Lock class. It will
* create it or just give the cached value. RuntimeExceptionDao only through RuntimeExceptions.
*/
public RuntimeExceptionDao<Lock, Integer> getLockDataDao() {
if (lockRuntimeDao == null) {
lockRuntimeDao = getRuntimeExceptionDao(Lock.class);
}
return lockRuntimeDao;
}
/**
* Returns the Database Access Object (DAO) for our Place class. It will create it or just give the cached
* value.
*/
public Dao<Place, Integer> getPlaceDao() throws SQLException {
if (placeDao == null) {
placeDao = getDao(Place.class);
}
return placeDao;
}
/**
* Returns the RuntimeExceptionDao (Database Access Object) version of a Dao for our Place class. It will
* create it or just give the cached value. RuntimeExceptionDao only through RuntimeExceptions.
*/
public RuntimeExceptionDao<Place, Integer> getPlaceDataDao() {
if (placeRuntimeDao == null) {
placeRuntimeDao = getRuntimeExceptionDao(Place.class);
}
return placeRuntimeDao;
}
/**
* Returns the Database Access Object (DAO) for our Place class. It will create it or just give the cached
* value.
*/
public Dao<User, Integer> getUserDao() throws SQLException {
if (userDao == null) {
userDao = getDao(User.class);
}
return userDao;
}
/**
* Returns the RuntimeExceptionDao (Database Access Object) version of a Dao for our Place class. It will
* create it or just give the cached value. RuntimeExceptionDao only through RuntimeExceptions.
*/
public RuntimeExceptionDao<User, Integer> getUserDataDao() {
if (userRuntimeDao == null) {
userRuntimeDao = getRuntimeExceptionDao(User.class);
}
return userRuntimeDao;
}
/**
* Returns the Database Access Object (DAO) for our Locator class. It will create it or just give the cached
* value.
*/
public Dao<Locator, Integer> getLocatorDao() throws SQLException {
if (locatorDao == null) {
locatorDao = getDao(Locator.class);
}
return locatorDao;
}
/**
* Returns the RuntimeExceptionDao (Database Access Object) version of a Dao for our Place class. It will
* create it or just give the cached value. RuntimeExceptionDao only through RuntimeExceptions.
*/
public RuntimeExceptionDao<Locator, Integer> getLocatorDataDao() {
if (locatorRuntimeDao == null) {
locatorRuntimeDao = getRuntimeExceptionDao(Locator.class);
}
return locatorRuntimeDao;
}
public void clear() {
try {
TableUtils.clearTable(connectionSource, Place.class);
TableUtils.clearTable(connectionSource, Lock.class);
TableUtils.clearTable(connectionSource, User.class);
TableUtils.clearTable(connectionSource, Locator.class);
} catch (SQLException e) {
e.printStackTrace();
}
}
public void clearPlaceLockLocator() {
try {
TableUtils.clearTable(connectionSource, Place.class);
TableUtils.clearTable(connectionSource, Lock.class);
TableUtils.clearTable(connectionSource, Locator.class);
} catch (SQLException e) {
e.printStackTrace();
}
}
/**
* Close the database connections and clear any cached DAOs.
*/
@Override
public void close() {
super.close();
lockRuntimeDao = null;
placeRuntimeDao = null;
locatorRuntimeDao = null;
}
}
|
|
///**
// *
// * PixelFlow | Copyright (C) 2017 Thomas Diewald (www.thomasdiewald.com)
// *
// * src - www.github.com/diwi/PixelFlow
// *
// * A Processing/Java library for high performance GPU-Computing.
// * MIT License: https://opensource.org/licenses/MIT
// *
// */
//
//package Skylight.Skylight_BulletPhysics_Breakable;
//
//import java.io.File;
//import java.util.ArrayList;
//import java.util.List;
//import java.util.Locale;
//
//
//import javax.vecmath.Matrix4f;
//import javax.vecmath.Vector3f;
//
//import com.bulletphysics.collision.shapes.CollisionShape;
//import com.bulletphysics.linearmath.Transform;
//import com.bulletphysics.util.ObjectArrayList;
//import com.jogamp.opengl.GL2;
//
//import peasy.*;
//import bRigid.*;
//import wblut.geom.WB_Coord;
//import wblut.geom.WB_Point;
//import wblut.geom.WB_Polygon;
//import wblut.geom.WB_Voronoi;
//import wblut.geom.WB_VoronoiCell2D;
//import com.thomasdiewald.pixelflow.java.DwPixelFlow;
//import com.thomasdiewald.pixelflow.java.antialiasing.SMAA.SMAA;
//import com.thomasdiewald.pixelflow.java.imageprocessing.filter.DepthOfField;
//import com.thomasdiewald.pixelflow.java.imageprocessing.filter.DwFilter;
//import com.thomasdiewald.pixelflow.java.render.skylight.DwSceneDisplay;
//import com.thomasdiewald.pixelflow.java.render.skylight.DwScreenSpaceGeometryBuffer;
//import com.thomasdiewald.pixelflow.java.render.skylight.DwSkyLight;
//import com.thomasdiewald.pixelflow.java.sampling.DwSampling;
//import com.thomasdiewald.pixelflow.java.utils.DwBoundingSphere;
//import com.thomasdiewald.pixelflow.java.utils.DwFrameCapture;
//import com.thomasdiewald.pixelflow.java.utils.DwUtils;
//
//import processing.core.PApplet;
//import processing.core.PFont;
//import processing.core.PMatrix3D;
//import processing.core.PShape;
//import processing.opengl.PGL;
//import processing.opengl.PGraphics3D;
//
//
//
//public class Skylight_BulletPhysics_Breakable1 extends PApplet {
//
// //
// // author: Thomas Diewald
// //
// //
// // This Example shows how to combine the PixelFlow Skylight-Renderer and
// // Bullet-Physics. Rigid Bodies are created from a Voronoi-Tesselation.
// //
// // Features:
// //
// // - CellFracture from Voronoi-Tesselation - HE_Mesh
// // - Rigid Body Simulation - bRigid, Bullet Physics
// // - Skylight Renderer, Sun + AO
// // - DoF
// // - Bloom
// // - SMAA
// // - shooting
// // - ...
// //
// // required Libraries to run this example (PDE contribution manager):
// //
// // - PeasyCam
// // library for camera control, by Jonathan Feinberg
// // http://mrfeinberg.com/peasycam/
// //
// // - bRigid (jBullet-Physics for Processing),
// // library for rigid body simulation, by Daniel Koehler
// // http://www.lab-eds.org/bRigid
// //
// // - HE_Mesh
// // library for creating and manipulating polygonal meshes, by Frederik Vanhoutte
// // https://github.com/wblut/HE_Mesh (manual installation)
// //
// // - PixelFlow
// // library for skylight, post-fx, lots of GLSL, etc..., by Thomas Diewald
// // https://github.com/diwi/PixelFlow
// //
// //
//
//
//
// int viewport_w = 1280;
// int viewport_h = 720;
// int viewport_x = 230;
// int viewport_y = 0;
//
// // Camera
// PeasyCam cam;
//
// // Bullet Physics
// MyBPhysics physics;
//
// // Bullet bodies, group-shape
// PShape group_bulletbodies;
//
// // PixelFlow Context
// DwPixelFlow context;
//
// // PixelFlow Filter, for post fx
// DwFilter filter;
//
// // some render-targets
// PGraphics3D pg_render;
// PGraphics3D pg_aa;
//
// // SkyLight Renderer
// DwSkyLight skylight;
// PMatrix3D mat_scene_view;
// PMatrix3D mat_scene_bounds;
//
// // AntiAliasing - SMAA
// SMAA smaa;
//
// // Depth of Field - DoF
// DepthOfField dof;
// DwScreenSpaceGeometryBuffer geombuffer;
// PGraphics3D pg_tmp;
//
// PFont font12;
//
//
// DwFrameCapture capture;
//
// // switches
// public boolean UPDATE_PHYSICS = true;
// public boolean APPLY_DOF = true;
// public boolean APPLY_BLOOM = true;
// public boolean DISPLAY_WIREFRAME = false;
//
// public void settings() {
// size(viewport_w, viewport_h, P3D);
// smooth(0);
// }
//
// public void setup() {
//
// surface.setLocation(viewport_x, viewport_y);
//
// float SCENE_SCALE = 1000;
//
// // for screenshot
// capture = new DwFrameCapture(this, "examples/");
//
// font12 = createFont("../data/SourceCodePro-Regular.ttf", 12);
//
// cam = new PeasyCam(this, 0, 0, 0, SCENE_SCALE);
// perspective(60 * DEG_TO_RAD, width/(float)height, 2, SCENE_SCALE * 250);
//
// group_bulletbodies = createShape(GROUP);
//
// physics = new MyBPhysics(); // no bounding box
// physics.world.setGravity(new Vector3f(0, 0, -100));
//
// pg_render = (PGraphics3D) createGraphics(width, height, P3D);
// pg_render.smooth(0);
// pg_render.beginDraw();
// pg_render.endDraw();
//
//
// // compute scene bounding-sphere
// DwBoundingSphere scene_bs = new DwBoundingSphere();
// scene_bs.set(0, 0, 200, 450);
// PMatrix3D mat_bs = scene_bs.getUnitSphereMatrix();
//
// // matrix, to place (centering, scaling) the scene in the viewport
// mat_scene_view = new PMatrix3D();
// mat_scene_view.scale(SCENE_SCALE);
// mat_scene_view.apply(mat_bs);
//
// // matrix, to place the scene in the skylight renderer
// mat_scene_bounds = mat_scene_view.get();
// mat_scene_bounds.invert();
// mat_scene_bounds.preApply(mat_bs);
//
// // callback for rendering the scene
// DwSceneDisplay scene_display = new DwSceneDisplay(){
// @Override
// public void display(PGraphics3D canvas) {
// displayScene(canvas);
// }
// };
//
// // library context
// context = new DwPixelFlow(this);
// context.print();
// context.printGL();
//
// // postprocessing filters
// filter = DwFilter.get(context);
//
// // init skylight renderer
// skylight = new DwSkyLight(context, scene_display, mat_scene_bounds);
//
// // parameters for sky-light
// skylight.sky.param.iterations = 50;
// skylight.sky.param.solar_azimuth = 0;
// skylight.sky.param.solar_zenith = 0;
// skylight.sky.param.sample_focus = 1; // full sphere sampling
// skylight.sky.param.intensity = 1.0f;
// skylight.sky.param.rgb = new float[]{1,1,1};
// skylight.sky.param.shadowmap_size = 512; // quality vs. performance
//
// // parameters for sun-light
// skylight.sun.param.iterations = 50;
// skylight.sun.param.solar_azimuth = 35;
// skylight.sun.param.solar_zenith = 65;
// skylight.sun.param.sample_focus = 0.1f;
// skylight.sun.param.intensity = 1.0f;
// skylight.sun.param.rgb = new float[]{1,1,1};
// skylight.sun.param.shadowmap_size = 512;
//
// // postprocessing AA
// smaa = new SMAA(context);
// pg_aa = (PGraphics3D) createGraphics(width, height, P3D);
// pg_aa.smooth(0);
// pg_aa.textureSampling(5);
//
//
// dof = new DepthOfField(context);
// geombuffer = new DwScreenSpaceGeometryBuffer(context, scene_display);
//
// pg_tmp = (PGraphics3D) createGraphics(width, height, P3D);
// pg_tmp.smooth(0);
// DwUtils.changeTextureFormat(pg_tmp, GL2.GL_RGBA16F, GL2.GL_RGBA, GL2.GL_FLOAT);
//
// // fresh start
// reset();
//
// createFractureShape();
//
// frameRate(60);
// }
//
//
// public static class MyBPhysics extends BPhysics{
//
//
// public void updateBehaviors1(){
// if (behaviors != null) {
// for (BObject o : rigidBodies) {
// for (BInterface b : behaviors) {
// b.apply(this, o);
// }
// }
// }
// }
//
// private void updateObjects1() {
// for (BObject o : rigidBodies) {
// if (o.behaviors != null) {
// for (BInterface b : o.behaviors) {
// b.apply(this, o);
// }
// }
// }
// }
//
// public void update(float frameRate){
// for (int i = 0; i < stepSimulation; i++) {
// updateBehaviors1();
// updateObjects1();
// // float stepTime = getDeltaTimeMicroseconds();
// // world.stepSimulation(stepTime / 1000000f);
// world.stepSimulation(1.0f / 400, 100, 1.0f / 200.0f);
// }
//
// }
//
// @Override
// public void update() {
// super.update();
// }
// }
//
//
// boolean slow = true;
//
// public void draw() {
//
// // handle bullet physics update, etc...
// if(UPDATE_PHYSICS){
//
// physics.update();
//// physics.update(30);
//
// removeLostBodies();
//
// for (BObject body : physics.rigidBodies) {
// updateShapes(body);
// }
// }
//
//
// // when the camera moves, the renderer restarts
// updateCamActiveStatus();
// if(CAM_ACTIVE || UPDATE_PHYSICS){
// skylight.reset();
// }
//
// // update renderer
// skylight.update();
//
//
// // apply AntiAliasing
// smaa.apply(skylight.renderer.pg_render, pg_aa);
//
// // apply bloom
// if(APPLY_BLOOM){
// filter.bloom.param.mult = 0.15f; //map(mouseX, 0, width, 0, 1);
// filter.bloom.param.radius = 0.5f; // map(mouseY, 0, height, 0, 1);
// filter.bloom.apply(pg_aa, null, pg_aa);
// }
//
// // apply DoF
// if(APPLY_DOF){
// int mult_blur = 5;
//
// geombuffer.update(skylight.renderer.pg_render);
//
// filter.gaussblur.apply(geombuffer.pg_geom, geombuffer.pg_geom, pg_tmp, mult_blur);
//
// dof.param.focus_pos = new float[]{0.5f, 0.5f};
//// dof.param.focus_pos[0] = map(mouseX, 0, width , 0, 1);
//// dof.param.focus_pos[1] = 1-map(mouseY, 0, height, 0, 1);
// dof.param.mult_blur = mult_blur;
// dof.apply(pg_aa, pg_render, geombuffer);
// filter.copy.apply(pg_render, pg_aa);
// }
//
// // display result
// cam.beginHUD();
// {
// background(255);
// noLights();
// image(pg_aa, 0, 0);
//
// displayCross();
//
// displayHUD();
// }
// cam.endHUD();
//
// // info
// String txt_fps = String.format(getClass().getName()+ " [fps %6.2f] [bodies %d]", frameRate, physics.rigidBodies.size());
// surface.setTitle(txt_fps);
// }
//
//
//
// public void displayCross(){
// pushMatrix();
// float cursor_s = 10;
// float fpx = ( dof.param.focus_pos[0]) * width;
// float fpy = (1.0f - dof.param.focus_pos[1]) * height;
// blendMode(EXCLUSION);
// translate(fpx, fpy);
// strokeWeight(1);
// stroke(255,200);
// line(-cursor_s, 0, +cursor_s, 0);
// line(0, -cursor_s, 0, +cursor_s);
// blendMode(BLEND);
// popMatrix();
// }
//
//
//
//
// public void displayHUD(){
//
// String txt_fps = String.format(Locale.ENGLISH, "fps: %6.2f", frameRate);
// String txt_num_bodies = String.format(Locale.ENGLISH, "rigid bodies: %d", physics.rigidBodies.size());
// String txt_samples_sky = String.format(Locale.ENGLISH, "sky/sun: %d/%d (samples)", skylight.sky.param.iterations, skylight.sun.param.iterations);
//
//// String txt_model = String.format(Locale.ENGLISH, "[1-9] model: %d", BUILDING);
// String txt_reset = String.format(Locale.ENGLISH, "[r] reset");
// String txt_update_physics = String.format(Locale.ENGLISH, "[t] physics: %b", UPDATE_PHYSICS);
// String txt_apply_bloom = String.format(Locale.ENGLISH, "[q] bloom: %b", APPLY_BLOOM);
// String txt_apply_dof = String.format(Locale.ENGLISH, "[w] DoF: %b", APPLY_DOF);
// String txt_wireframe = String.format(Locale.ENGLISH, "[e] wireframe: %b", DISPLAY_WIREFRAME);
// String txt_shoot = String.format(Locale.ENGLISH, "[ ] shoot");
//
// int tx, ty, sy;
// tx = 10;
// ty = 10;
// sy = 13;
//
// fill(0, 100);
// noStroke();
// stroke(0, 200);
// rectMode(CORNER);
// rect(5, 5, 200, 170);
//
// textFont(font12);
//// textMode(SCREEN);
// fill(220);
// text(txt_fps , tx, ty+=sy);
// text(txt_num_bodies , tx, ty+=sy);
// text(txt_samples_sky , tx, ty+=sy);
// ty+=sy;
//// text(txt_model , tx, ty+=sy);
// text(txt_reset , tx, ty+=sy);
// text(txt_update_physics , tx, ty+=sy);
// text(txt_apply_bloom , tx, ty+=sy);
// text(txt_apply_dof , tx, ty+=sy);
// text(txt_wireframe , tx, ty+=sy);
// text(txt_shoot , tx, ty+=sy);
//
// }
//
//
//
// // reset scene
// public void reset(){
// // remove bodies
// for(int i = physics.rigidBodies.size() - 1; i >= 0; i--){
// BObject body = physics.rigidBodies.get(i);
// physics.removeBody(body);
// }
//
// // just in case, i am actually not not sure if PShape really needs this to
// // avoid memoryleaks.
// for(int i = group_bulletbodies.getChildCount() - 1; i >= 0; i--){
// group_bulletbodies.removeChild(i);
// }
//
// addGround();
// }
//
//
// // bodies that have fallen outside of the scene can be removed
// public void removeLostBodies(){
// for(int i = physics.rigidBodies.size() - 1; i >= 0; i--){
// BObject body = physics.rigidBodies.get(i);
// Vector3f pos = body.getPosition();
//
// if(pos.z < -1000){
// int idx = group_bulletbodies.getChildIndex(body.displayShape);
// if(idx >= 0){
// group_bulletbodies.removeChild(idx);
// }
// physics.removeBody(body);
// }
// }
// }
//
//
// // toggle shading/wireframe display
// public void toggleDisplayWireFrame(){
// DISPLAY_WIREFRAME = !DISPLAY_WIREFRAME;
// for (BObject body : physics.rigidBodies) {
// PShape shp = body.displayShape;
// String name = shp.getName();
// if(name != null && name.contains("[wire]")){
// shp.setFill(!DISPLAY_WIREFRAME);
// shp.setStroke(DISPLAY_WIREFRAME);
// }
// }
// skylight.reset();
// }
//
//
// // check if camera is moving
// float[] cam_pos = new float[3];
// boolean CAM_ACTIVE = false;
// public void updateCamActiveStatus(){
// float[] cam_pos_curr = cam.getPosition();
// CAM_ACTIVE = false;
// CAM_ACTIVE |= cam_pos_curr[0] != cam_pos[0];
// CAM_ACTIVE |= cam_pos_curr[1] != cam_pos[1];
// CAM_ACTIVE |= cam_pos_curr[2] != cam_pos[2];
// cam_pos = cam_pos_curr;
// }
//
//
//
// public void keyReleased(){
// if(key == 't') UPDATE_PHYSICS = !UPDATE_PHYSICS;
// if(key == 'q') APPLY_BLOOM = !APPLY_BLOOM;
// if(key == 'w') APPLY_DOF = !APPLY_DOF;
// if(key == 'e') toggleDisplayWireFrame();
// if(key == 'r') createFractureShape();
// if(key == ' ') addShootingBody();
// if(key == 's') saveScreenshot();
// }
//
//
//
//
//
//
// // shoot body into the scene
// int shooter_count = 0;
// PMatrix3D mat_mvp = new PMatrix3D();
// PMatrix3D mat_mvp_inv = new PMatrix3D();
//
// public void addShootingBody(){
//
//
// float vel = 1000;
// float mass = 100000;
// float dimr = 30;
//
//
// PGraphics3D pg = (PGraphics3D) skylight.renderer.pg_render;
// mat_mvp.set(pg.modelview);
// mat_mvp.apply(mat_scene_view);
// mat_mvp_inv.set(mat_mvp);
// mat_mvp_inv.invert();
//
// float[] cam_start = {0, 0, -0, 1};
// float[] cam_aim = {0, 0, -400, 1};
// float[] world_start = new float[4];
// float[] world_aim = new float[4];
// mat_mvp_inv.mult(cam_start, world_start);
// mat_mvp_inv.mult(cam_aim, world_aim);
//
// Vector3f pos = new Vector3f(world_start[0], world_start[1], world_start[2]);
// Vector3f aim = new Vector3f(world_aim[0], world_aim[1], world_aim[2]);
// Vector3f dir = new Vector3f(aim);
// dir.sub(pos);
// dir.normalize();
// dir.scale(vel);
//
// BObject obj;
//
//// if((shooter_count % 2) == 0){
// obj = new BSphere(this, mass, 0, 0, 0, dimr*0.5f);
//// } else {
//// obj = new BBox(this, mass, dimr, dimr, dimr);
//// }
// BObject body = new BObject(this, mass, obj, pos, true);
//
// body.setPosition(pos);
// body.setVelocity(dir);
// body.setRotation(new Vector3f(random(-1, 1),random(-1, 1),random(-1, 1)), random(PI));
//
// body.rigidBody.setRestitution(0.9f);
// body.rigidBody.setFriction(1);
//// body.rigidBody.setHitFraction(1);
// body.rigidBody.setDamping(0.1f, 0.1f);
//
// body.displayShape.setStroke(false);
// body.displayShape.setFill(true);
// body.displayShape.setFill(color(255,200,0));
// body.displayShape.setStrokeWeight(1);
// body.displayShape.setStroke(color(0));
// if(obj instanceof BBox){
// fixBoxNormals(body.displayShape);
// }
//
// physics.addBody(body);
// group_bulletbodies.addChild(body.displayShape);
//
// body.displayShape.setName("[shooter_"+shooter_count+"] [wire]");
// shooter_count++;
// }
//
//
// // bRigid-bug: face 1 and 3, vertex order -> inverse normal
// private void fixBoxNormals(PShape box){
// PShape face;
// face = box.getChild(1);
// for(int i = 0; i < 4; i++){
// face.setNormal(i, -1, 0, 0);
// }
// face = box.getChild(3);
// for(int i = 0; i < 4; i++){
// face.setNormal(i, +1, 0, 0);
// }
// }
//
//
//
//
// public PShape createCellShape(List<WB_Point> points, float dimz, Vector3f center_of_mass){
//
// Vector3f com = center_of_mass;
// int num_points = points.size();
// float dimz_half = dimz*0.5f;
//
// PShape cell_top = createShape();
// cell_top.beginShape(POLYGON);
// cell_top.normal(0, 0, -1);
// for(WB_Point vtx : points){
// cell_top.vertex(vtx.xf()-com.x, vtx.yf()-com.y, +dimz_half-com.z);
// }
// cell_top.endShape(CLOSE);
//
// PShape cell_bot = createShape();
// cell_bot.beginShape(POLYGON);
// cell_bot.normal(0, 0, -1);
// for(WB_Point vtx : points){
// cell_bot.vertex(vtx.xf()-com.x, vtx.yf()-com.y, -dimz_half-com.z);
// }
// cell_bot.endShape(CLOSE);
//
// PShape cell_side = createShape();
// cell_side.beginShape(QUADS);
//
// for(int i = 0; i <= points.size(); i++){
// WB_Point v0 = points.get((i+0)%num_points);
// WB_Point v1 = points.get((i+1)%num_points);
// float v0x = v0.xf();
// float v0y = v0.yf();
// float v1x = v1.xf();
// float v1y = v1.yf();
//
// float dx = v1x - v0x;
// float dy = v1y - v0y;
//
// float nx = +dy;
// float ny = -dx;
// float nz = 0;
// float nn = sqrt(nx*nx + ny*ny);
// nx /= nn;
// ny /= nn;
//
// cell_side.normal(nx, ny, nz);
// cell_side.vertex(v0x-com.x, v0y-com.y, +dimz_half-com.z);
// cell_side.vertex(v0x-com.x, v0y-com.y, -dimz_half-com.z);
// cell_side.vertex(v1x-com.x, v1y-com.y, -dimz_half-com.z);
// cell_side.vertex(v1x-com.x, v1y-com.y, +dimz_half-com.z);
//
// }
//
// cell_side.endShape();
//
//
// PShape cell = createShape(GROUP);
// cell.addChild(cell_top);
// cell.addChild(cell_bot);
// cell.addChild(cell_side);
//
// float r = voronoi_col[0];
// float g = voronoi_col[1];
// float b = voronoi_col[2];
//
// cell.setFill(color(r,g,b));
// cell.setFill(true);
// cell.setStrokeWeight(1f);
// cell.setStroke(color(r,g,b,96));
// cell.setStroke(false);
//
// cell.setName("[wire]");
//
// return cell;
// }
//
//
// float[] voronoi_col = new float[3];
//
// static class MyBConvexHull extends BConvexHull{
//
// public MyBConvexHull(PApplet p, float mass, ObjectArrayList<Vector3f> vertices, Vector3f position, boolean inertia) {
// super(p, mass, vertices, position, inertia);
// }
//
//
// @Override
// public PShape drawToPShape(CollisionShape shape) {
// return displayShape;
// }
//
// }
//
//
// public void createFractureShape(){
// reset();
//
// float sx = 80;
//
//// voronoi_col = new float[]{180,140,255};
//// createFractureShape(sx * -2);
////
//// voronoi_col = new float[]{140,180,255};
//// createFractureShape(sx * -1);
////
//// voronoi_col = new float[]{140,255,180};
//// createFractureShape(0);
////
//// voronoi_col = new float[]{255,180,140};
//// createFractureShape(sx * +1);
////
//// voronoi_col = new float[]{255,255,140};
//// createFractureShape(sx * +2);
//
//
//
//
// voronoi_col = new float[]{140,180,255};
// createFractureShape(sx * -1);
//
// voronoi_col = new float[]{255,180,140};
// createFractureShape(sx * +0);
//
// voronoi_col = new float[]{255,255,140};
// createFractureShape(sx * +1);
//
//
//
// }
//
//
// public void createFractureShape(float translate_y){
// long timer = System.currentTimeMillis();
//
//
//
// float translate_z = 0;
// float pos_z = 20f;
//
// int num_voronoi_cells = 300;
//
// float dimx = 400;
// float dimy = 200;
// float dimz = 5;
// float dimx_half = dimx * 0.5f;
// float dimy_half = dimy * 0.5f;
// float dimz_half = dimz * 0.5f;
// float off = 10;
//
//
// PMatrix3D mat_wall = new PMatrix3D();
// mat_wall.translate(0, 0, pos_z);
// mat_wall.translate(0, translate_y, translate_z);
// mat_wall.rotateX(PI/2f);
// mat_wall.translate(0, dimy * 0.5f, 0);
// mat_wall.translate(0, translate_z, 0);
//
// // create centroids for voronoi-cells
// List<WB_Point> points = new ArrayList<WB_Point>(num_voronoi_cells);
//
// for(int i = 0; i < num_voronoi_cells; i++){
//// float[] halton = DwSampling.sampleDisk_Halton(i, 1f);
////
//// float rx = halton[0];
//// float ry = halton[1];
////
//// float px = rx * (dimx_half - off);
//// float py = ry * (dimy_half - off);
//
// int sample_idx = i;
// float r = 4 + 0.05f * (float) Math.pow(sample_idx, 1.5f);
//
//// float r = 4 + 10.05f * (float) Math.pow(sample_idx, 0.5f);
//
//
//// System.out.println(r);
// float angle = sample_idx * (float) DwSampling.GOLDEN_ANGLE_R*1f;
// float px = r * cos(angle);
// float py = r * sin(angle);
//
// float rrr = r*0.2f;
// px += random(-rrr, rrr);
// py += random(-rrr, rrr);
//
// float idxn = sample_idx / (float)num_voronoi_cells;
//
// if(random(1) < 0.8f * idxn) continue;
//
// if(px > -(dimx_half-off) && px < +(dimx_half-off) &&
// py > -(dimy_half-off) && py < +(dimy_half-off) )
// {
// points.add(new WB_Point(px, py, 0));
// }
// }
//
//
//
//
//
//
//
//
//// DwMeshCleaner cleaner = new DwMeshCleaner();
//
// // create voronoi
// ArrayList<WB_Point> pts = new ArrayList<WB_Point>();
// pts.add(new WB_Point(-dimx_half, -dimy_half));
// pts.add(new WB_Point(+dimx_half, -dimy_half));
// pts.add(new WB_Point(+dimx_half, +dimy_half));
// pts.add(new WB_Point(-dimx_half, +dimy_half));
//
// WB_Polygon boundary = new WB_Polygon(pts);
// List<WB_VoronoiCell2D> cells = WB_Voronoi.getClippedVoronoi2D(points, boundary, 0);
//
// ArrayList<BObject> bodies = new ArrayList<BObject>();
//
// //float mass_sum = 0;
//
// for (int i = 0; i < cells.size(); i++) {
//
// WB_VoronoiCell2D cell = cells.get(i);
// WB_Polygon cell_polygon = cell.getPolygon();
// List<WB_Point> cell_points = cell_polygon.getPoints();
//
// int num_verts = cell_points.size();
//
// // compute center of mass
// float[][] pnts = new float[num_verts][3];
// for(int j = 0; j < num_verts; j++){
// WB_Coord vtx = cell_points.get(j);
// pnts[j][0] = vtx.xf();
// pnts[j][1] = vtx.yf();
// }
//
// // this one gives better results, than the voronoi center
//// DwBoundingDisk cell_bs = new DwBoundingDisk();
//// cell_bs.compute(pnts, pnts.length);
//// Vector3f center_of_mass = new Vector3f(cell_bs.pos[0], cell_bs.pos[1], 0f);
//
//// Vector3f center_of_mass = new Vector3f();
//// center_of_mass.x = cell_polygon.getCenter().xf();
//// center_of_mass.y = cell_polygon.getCenter().yf();
//// center_of_mass.z = cell_polygon.getCenter().zf();
//
// Vector3f center_of_mass = new Vector3f();
// center_of_mass.x = (cell.getGenerator().xf() + cell_polygon.getCenter().xf() ) * 0.5f;
// center_of_mass.y = (cell.getGenerator().yf() + cell_polygon.getCenter().yf() ) * 0.5f;
// center_of_mass.z = 0;
//
// // create rigid body coords, center is at 0,0,0
// ObjectArrayList<Vector3f> vertices = new ObjectArrayList<Vector3f>(pnts.length * 2);
// for(int j = 0; j < pnts.length; j++){
// float x = pnts[j][0] - center_of_mass.x;
// float y = pnts[j][1] - center_of_mass.y;
// vertices.add(new Vector3f(x, y, -dimz_half));
// vertices.add(new Vector3f(x, y, +dimz_half));
// }
//
// // create rigid body
// float mass = (float) (cell.getArea() * dimz);
// //mass_sum += mass;
//
// BConvexHull body = new MyBConvexHull(this, mass, vertices, new Vector3f(center_of_mass), true);
////
// // setup initial body transform-matrix
// PMatrix3D mat_p5 = new PMatrix3D(mat_wall);
// mat_p5.translate(center_of_mass.x, center_of_mass.y, center_of_mass.z);
// Transform transform = asBulletTransform(mat_p5);
//
// // rigid-body properties
// body.rigidBody.setWorldTransform(transform);
//// body.rigidBody.setRestitution(.01f);
// body.rigidBody.setFriction(0.94f);
//// body.rigidBody.setDamping(0.2f, 0.2f);
//
//
// // create PShape
// PShape shp_cell = createCellShape(cell_points, dimz, center_of_mass);
////
//// float arean = 20* (float) (cell.getArea() / (dimx * dimy));
////
//// float r = 64;
//// float g = 180;
//// float b = 255 * arean;
//// shp_cell.setFill(color(r,g,b));
//
// // link everything together
// body.displayShape = shp_cell;
// group_bulletbodies.addChild(shp_cell);
//
// bodies.add(body);
// physics.addBody(body);
//
// }
//
//
//
// //float pillar_dim = dimz;
// //float pillar_mass = 0;
//
//
//
//
//// Vector3f pos = new Vector3f(dimx/2 + pillar_dim*0.5f, translate_y, pos_z + dimy/2);
//// BObject obj = new BBox(this, 0, pillar_dim, pillar_dim, dimy);
//// BObject body = new BObject(this, pillar_mass, obj, pos, true);
//// body.setPosition(pos);
//// physics.addBody(body);
////
//// body.displayShape = pillarShape(new Vector3f(pillar_dim, pillar_dim, dimy));
//// group_bulletbodies.addChild(body.displayShape);
////
//// pos = new Vector3f(-dimx/2 - pillar_dim*0.5f, translate_y, pos_z + dimy/2);
//// obj = new BBox(this, 0, pillar_dim, pillar_dim, dimy);
//// body = new BObject(this, pillar_mass, obj, pos, true);
//// body.setPosition(pos);
//// physics.addBody(body);
////
//// body.displayShape = pillarShape(new Vector3f(pillar_dim, pillar_dim, dimy));
//// group_bulletbodies.addChild(body.displayShape);
//
//
//
//
//// {
//// PMatrix3D mat_pillar = mat_wall.get();
//// mat_pillar.translate(dimx * 0.5f + dimz * 0.5f, 0, 0);
//// Transform transform = asBulletTransform(mat_pillar);
////
//// BObject body = new BBox(this, 0, dimz, dimy, dimz);
////
//// body.rigidBody.setWorldTransform(transform);
//// body.rigidBody.getMotionState().setWorldTransform(transform);
////
//// body.displayShape = pillarShape(new Vector3f(dimz, dimy, dimz));
//// group_bulletbodies.addChild(body.displayShape);
//// physics.addBody(body);
//// }
////
////
//// {
//// float dim_x = dimz*5;
////
//// PMatrix3D mat_pillar = mat_wall.get();
//// mat_pillar.translate(dimx * 0.5f - dimz * 0.5f, 0, dimz);
//// Transform transform = asBulletTransform(mat_pillar);
////
//// BObject body = new BBox(this, 0, dim_x, dimy, dimz);
////
//// body.rigidBody.setWorldTransform(transform);
//// body.rigidBody.getMotionState().setWorldTransform(transform);
//// body.rigidBody.setFriction(0.99f);
////
//// body.displayShape = pillarShape(new Vector3f(dim_x, dimy, dimz));
//// group_bulletbodies.addChild(body.displayShape);
//// physics.addBody(body);
//// }
////
//// {
//// float dim_x = dimz*5;
//// PMatrix3D mat_pillar = mat_wall.get();
//// mat_pillar.translate(dimx * 0.5f - dimz * 0.5f, 0, -dimz);
//// Transform transform = asBulletTransform(mat_pillar);
////
//// BObject body = new BBox(this, 0, dim_x, dimy, dimz);
////
//// body.rigidBody.setWorldTransform(transform);
//// body.rigidBody.getMotionState().setWorldTransform(transform);
////
//// body.rigidBody.setFriction(0.99f);
////
//// body.displayShape = pillarShape(new Vector3f(dim_x, dimy, dimz));
//// group_bulletbodies.addChild(body.displayShape);
//// physics.addBody(body);
//// }
//
// createFitting(mat_wall,new Vector3f(dimx, dimy, dimz), true);
// createFitting(mat_wall,new Vector3f(dimx, dimy, dimz), false);
//
//// System.out.println(bodies.size());
//// BCompound voronoi = new BCompound(this, 0, bodies, true);
//// BObject voronoi_obj = new BObject(this, 5, voronoi, new Vector3f(0,0, 500), true);
//// physics.addBody(voronoi_obj);
//
// timer = System.currentTimeMillis() - timer;
// System.out.println("createFractureShape "+timer+" ms");
//
// }
//
//
// public void createFitting(PMatrix3D mat_wall, Vector3f wall_dim, boolean right){
// float dimx = wall_dim.x;
// float dimy = wall_dim.y;
// float dimz = wall_dim.z;
//
//
//
// float dimx2 = dimz*3;
//
// float tx = dimx * 0.5f + dimz * 0.5f;
// float tx2 = tx - dimx2*0.5f + dimz*0.5f;
//
//
// float side = right ? 1f : -1f;
//
// tx *= side;
// tx2 *= side;
//
// {
// PMatrix3D mat_pillar = mat_wall.get();
// mat_pillar.translate(tx, 0, 0);
// Transform transform = asBulletTransform(mat_pillar);
//
// BObject body = new BBox(this, 0, dimz, dimy, dimz);
//
// body.rigidBody.setWorldTransform(transform);
// body.rigidBody.getMotionState().setWorldTransform(transform);
//
// body.displayShape = pillarShape(new Vector3f(dimz, dimy, dimz));
// group_bulletbodies.addChild(body.displayShape);
// physics.addBody(body);
// }
//
//
// {
//
//
// PMatrix3D mat_pillar = mat_wall.get();
// mat_pillar.translate(tx2, 0, dimz);
// Transform transform = asBulletTransform(mat_pillar);
//
// BObject body = new BBox(this, 0, dimx2, dimy, dimz);
//
// body.rigidBody.setWorldTransform(transform);
// body.rigidBody.getMotionState().setWorldTransform(transform);
// body.rigidBody.setFriction(0.99f);
//
// body.displayShape = pillarShape(new Vector3f(dimx2, dimy, dimz));
// group_bulletbodies.addChild(body.displayShape);
// physics.addBody(body);
// }
//
// {
// PMatrix3D mat_pillar = mat_wall.get();
// mat_pillar.translate(tx2, 0, -dimz);
// Transform transform = asBulletTransform(mat_pillar);
//
// BObject body = new BBox(this, 0, dimx2, dimy, dimz);
//
// body.rigidBody.setWorldTransform(transform);
// body.rigidBody.getMotionState().setWorldTransform(transform);
//
// body.rigidBody.setFriction(0.99f);
//
// body.displayShape = pillarShape(new Vector3f(dimx2, dimy, dimz));
// group_bulletbodies.addChild(body.displayShape);
// physics.addBody(body);
// }
// }
//
//
//
//
// public PShape pillarShape(Vector3f dim){
// PShape shp = createShape(BOX, dim.x, dim.y, dim.z);
// shp.setStroke(false);
// shp.setFill(true);
// shp.setFill(color(16));
// shp.setStrokeWeight(1);
// shp.setStroke(color(0));
// return shp;
// }
//
//
// public Transform asBulletTransform(PMatrix3D mat_p5){
// Matrix4f mat = new Matrix4f();
// mat.setRow(0, mat_p5.m00, mat_p5.m01, mat_p5.m02, mat_p5.m03);
// mat.setRow(1, mat_p5.m10, mat_p5.m11, mat_p5.m12, mat_p5.m13);
// mat.setRow(2, mat_p5.m20, mat_p5.m21, mat_p5.m22, mat_p5.m23);
// mat.setRow(3, mat_p5.m30, mat_p5.m31, mat_p5.m32, mat_p5.m33);
// return new Transform(mat);
// }
//
//
//
// // add ground bodies
// public void addGround(){
// {
// Vector3f pos = new Vector3f(0,0,10);
// BObject body = new BBox(this, 0, 650, 650, 20);
//// BObject body = new BBox(this, 0, pos.x, pos.y, pos.z, 650, 650, 20);
//// BObject body = new BObject(this, 0, obj, new Vector3f(), true);
//
//// body.setPosition(pos);
//// Transform transform = new Transform();
//// body.rigidBody.getMotionState().getWorldTransform(transform);
//// body.rigidBody.setWorldTransform(transform);
//
//// Transform transform = new Transform();
//// transform.set
//
//// body.rigidBody.getWorldTransform(transform);
//// transform.basis.transform(arg0);
//
//// body.rigidBody.setWorldTransform(transform);
//// body.rigidBody.getMotionState().setWorldTransform(transform);
//
//
//// PMatrix3D mat = new PMatrix3D();
//// mat.translate(pos.x, pos.y, pos.z);
//// Transform transform = asBulletTransform(mat);
//// body.rigidBody.setWorldTransform(transform);
//// body.rigidBody.getMotionState().setWorldTransform(transform);
//
// Transform transform = new Transform();
// body.rigidBody.getWorldTransform(transform);
//
// transform.origin.set(pos);
// body.rigidBody.setWorldTransform(transform);
// body.rigidBody.getMotionState().setWorldTransform(transform);
//
//
//
// body.displayShape = createShape(BOX, 650, 650, 20);
// body.displayShape.setStroke(false);
// body.displayShape.setFill(true);
// body.displayShape.setFill(color(200, 96, 16));
// body.displayShape.setFill(color(255));
// body.displayShape.setStrokeWeight(1);
// body.displayShape.setStroke(color(0));
//// if(body instanceof BBox){
//// fixBoxNormals(body.displayShape);
//// }
// physics.addBody(body);
// group_bulletbodies.addChild(body.displayShape);
// body.displayShape.setName("ground_box");
// }
// }
//
//
//
//
//
// // render scene
// public void displayScene(PGraphics3D pg){
// if(pg == skylight.renderer.pg_render){
// pg.background(16);
// }
//
// if(pg == geombuffer.pg_geom){
// pg.background(255, 255);
// pg.pgl.clearColor(1, 1, 1, 6000);
// pg.pgl.clear(PGL.COLOR_BUFFER_BIT);
// }
//
// pg.pushMatrix();
// pg.applyMatrix(mat_scene_view);
// pg.shape(group_bulletbodies);
// pg.popMatrix();
// }
//
//
// // update PShape matrices
// Transform transform = new Transform();
// Matrix4f out = new Matrix4f();
//
// public void updateShapes(BObject body){
// if (body.displayShape != null) {
// body.displayShape.resetMatrix();
// if (body.getMass() < 0) {
// transform = body.rigidBody.getMotionState().getWorldTransform(transform);
// out = transform.getMatrix(out);
// body.transform.set(transform);
// body.displayShape.applyMatrix(out.m00, out.m01, out.m02, out.m03, out.m10, out.m11, out.m12, out.m13, out.m20, out.m21, out.m22, out.m23, out.m30, out.m31, out.m32, out.m33);
//
// } else {
//// transform = body.rigidBody.getWorldTransform(transform);
//// body.transform.set(transform);
//// body.displayShape.translate(transform.origin.x, transform.origin.y, transform.origin.z);
//
// transform = body.rigidBody.getMotionState().getWorldTransform(transform);
// out = transform.getMatrix(out);
// body.transform.set(transform);
// body.displayShape.applyMatrix(out.m00, out.m01, out.m02, out.m03, out.m10, out.m11, out.m12, out.m13, out.m20, out.m21, out.m22, out.m23, out.m30, out.m31, out.m32, out.m33);
//
// }
// }
// }
//
//
// public void saveScreenshot(){
// File file = capture.createFilename();
//// pg_aa.save(file.getAbsolutePath());
// save(file.getAbsolutePath());
// System.out.println(file);
// }
//
//
// public static void main(String args[]) {
// PApplet.main(new String[] { Skylight_BulletPhysics_Breakable1.class.getName() });
// }
//}
|
|
/**
* Holico : Proposition d'implementation du HomeBus Holico
*
* Module name: com.francetelecom.rd.holico-tools.node-simulator-android
* Version: 0.4-SNAPSHOT
*
* Copyright (C) 2013 Orange
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of Orange nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* http://opensource.org/licenses/BSD-3-Clause
*/
package com.francetelecom.rd.app.nodessimulator.devices;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import android.annotation.SuppressLint;
import android.content.Intent;
import android.support.v4.app.FragmentActivity;
import com.francetelecom.rd.hlc.HlcConnector;
import com.francetelecom.rd.hlc.HomeBusException;
import com.francetelecom.rd.hlc.InvalidResourcePathException;
import com.francetelecom.rd.hlc.InvalidResourceTypeException;
import com.francetelecom.rd.hlc.Node;
import com.francetelecom.rd.hlc.NodeServiceCallback;
import com.francetelecom.rd.hlc.Resource;
import com.francetelecom.rd.hlc.impl.SdsAdapterListener;
public class NodeManager extends SdsAdapterListener implements NodeManagerCallback {
Node myNode;
// ====================================================================
public enum NodeType { SHUTTER,
SPRINKLER,
ALARM,
UNKNOWN,
DATAMODEL};
// ====================================================================
public boolean isPublished = false;
public String id;
public String name;
public String manufacturer;
public String version;
public NodeType type;
private final FragmentActivity activity;
public final String intentFilter;
public HlcConnector connector;
public List<String> services = new ArrayList<String>();
public Map<String, Object> resources = new HashMap<String, Object>();
// ====================================================================
public NodeManager(String id, String name, String manufacturer, String version, NodeType type, String intentFilter, FragmentActivity activity) {
this.id = id;
this.name = name;
this.type = type;
this.manufacturer = manufacturer;
this.version = version;
this.activity = activity;
this.intentFilter = intentFilter;
try {
myNode = DeviceManager.getInstance().getHomeBusFactory().createNode(this.id, "device_id", this.name);
if(this.manufacturer != null && !this.manufacturer.isEmpty())
{
myNode.setManufacturer(this.manufacturer);
}
if(this.version != null && !this.version.isEmpty())
{
myNode.setVersion(this.version);
}
} catch (Exception e) {
e.printStackTrace();
}
}
public void publish()
{
try {
if(myNode.publishOnHomeBus()) {
this.connector = myNode.getHlcConnector();
onNodePublished();
}
} catch (HomeBusException e) {
e.printStackTrace();
} catch (InvalidResourceTypeException e) {
e.printStackTrace();
} catch (InvalidResourcePathException e) {
e.printStackTrace();
}
}
public void unpublish()
{
/*try {
myNode.removesFromHlc();
} catch (HomeBusException e) {
e.printStackTrace();
}*/
}
@SuppressLint("UseValueOf")
public void addResource(String name, String path, int type)
{
try {
myNode.addResourcePublication(name, path, type);
switch(type){
case Resource.TYPE_VALUE_BOOL:
resources.put(path, new Boolean(false));
break;
case Resource.TYPE_VALUE_INT:
resources.put(path, new Integer(-1));
break;
case Resource.TYPE_VALUE_STRING:
resources.put(path, "");
break;
}
} catch (HomeBusException e) {
e.printStackTrace();
} catch (InvalidResourceTypeException e) {
e.printStackTrace();
} catch (InvalidResourcePathException e) {
e.printStackTrace();
}
}
public void setResource(String path, Object value)
{
try {
if(myNode.isPublishedOnHomeBus()) {
resources.put(path, value);
myNode.publishOnResource(path, value);
}
} catch (HomeBusException e) {
e.printStackTrace();
} catch (InvalidResourceTypeException e) {
e.printStackTrace();
} catch (InvalidResourcePathException e) {
e.printStackTrace();
}
}
public void addService(String id, String name, boolean isPrivate, NodeServiceCallback callback)
{
try {
services.add(name);
myNode.addNodeService(id, name, isPrivate, callback);
} catch (HomeBusException e) {
e.printStackTrace();
}
}
// ====================================================================
private void subscribeForPublicationUpdates(){
Iterator<String> it = this.resources.keySet().iterator();
while(it.hasNext()) {
String resourcePath = (String)it.next();
//DeviceManager.getInstance().getHomeBusFactory().getSdsAdaptor().addSdsAdapterListener("HomeLifeContext." + resourcePath, this);
}
}
// ====================================================================
@Override
public String toString()
{
return this.name;
}
// ====================================================================
@Override
public void onNodePublished() {
// Have to be Override by extender of this class
subscribeForPublicationUpdates();
}
// ====================================================================
@Override
public void onResourceArrived(Resource resource) {
refreshResource(resource);
}
@Override
public void onResourceChanged(Resource resource) {
refreshResource(resource);
}
@Override
public void onResourceLeft(Resource resource) {
refreshResource(resource);
}
private void refreshResource(Resource resource) {
RunnableRefresher refresher = new RunnableRefresher();
refresher.setResource(resource);
this.activity.runOnUiThread(refresher);
}
// ====================================================================
public class RunnableRefresher implements Runnable {
private Resource resource;
public void setResource(Resource _resource) {
this.resource = _resource;
}
public void run() {
try {
Intent intent = new Intent(intentFilter);
intent.putExtra("path", this.resource.getPath());
switch(this.resource.getResourceType()){
case Resource.TYPE_VALUE_INT:
intent.putExtra("value", (Integer)this.resource.getValue());
break;
case Resource.TYPE_VALUE_BOOL:
intent.putExtra("value", (Boolean)this.resource.getValue());
break;
case Resource.TYPE_VALUE_STRING:
intent.putExtra("value", (String)this.resource.getValue());
break;
}
activity.getBaseContext().getApplicationContext().sendBroadcast(intent);
} catch (InvalidResourceTypeException e) {
e.printStackTrace();
}
}
}
}
|
|
/**
* Copyright (c) 2012-2013, Daniele Codecasa <[email protected]>,
* Models and Algorithms for Data & Text Mining (MAD) laboratory of
* Milano-Bicocca University, and all the CTBNCToolkit contributors
* that will follow.
* All rights reserved.
*
* @author Daniele Codecasa and all the CTBNCToolkit contributors that will follow.
* @copyright 2012-2013 Daniele Codecasa, MAD laboratory, and all the CTBNCToolkit contributors that will follow
*/
package CTBNCToolkit;
import java.util.*;
/**
* @author Daniele Codecasa <[email protected]>
*
* This class extend the discrete node adding the CIM
* table.
* This class doesn't allow states names that contain
* character @ (it is used as special character).
*/
public class CTDiscreteNode extends DiscreteNode {
private boolean staticNode;
private int nParentsEntries = 1;
private boolean validatedCIMs = false;
private List<double[][]> CIMs = null;
/**
* Constructor of a node that evolves in continuous time.
*
* @param nodeName node name
* @param states set of states of the node
* @exception IllegalStateException if a state contains character '@'
*/
public CTDiscreteNode(String nodeName, Set<String> states) throws IllegalStateException {
this(nodeName, states, false);
}
/**
* Constructor
*
* @param nodeName node name
* @param states set of states of the node
* @param staticNode true if the node is static, false if can evolve continuously in time
* @exception IllegalStateException if a state contains character '@'
*/
public CTDiscreteNode(String nodeName, Set<String> states, boolean staticNode) throws IllegalStateException {
super(nodeName, states);
Iterator<String> iter = states.iterator();
while(iter.hasNext())
if(iter.next().contains("@"))
throw new IllegalStateException("Error: states names can not contain character '@'");
this.staticNode = staticNode;
this.CIMs = CTDiscreteNode.generateCIMsorCPTs(this.nParentsEntries, states.size(), this.staticNode);
this.validatedCIMs = false;
}
/**
* Returns the entry value to get the right CIM
* given all the parents instanced.
*
* @return the parents entry value
* @throws IllegalArgumentException if one parent is not instanced
*/
public int getCurrentParentsEntry() throws IllegalArgumentException {
int entry = 0;
int parentMultiplier = 1;
for(int i = 0; i < this.getParentsNumber(); ++i) {
Integer stateIndex = this.getParent(i).getCurrentStateIndex();
if( stateIndex == null)
throw new IllegalArgumentException("Error: parent number " + i + " (" + this.getParent(i).getName() + ") is not instanced");
entry += stateIndex * parentMultiplier;
parentMultiplier *= this.getParent(i).getStatesNumber();
}
return entry;
}
/**
* Set the parents values given the parent
* entry.
*
* @param pE parent entry (depend from the parents ordering)
* @throws IllegalArgumentException
*/
public void setParentsEntry(int pE) throws IllegalArgumentException {
if( this.getParentsNumber() == 0)
return;
int parentMultiplier = this.getNumberParentsEntries();
int i = this.getParentsNumber() - 1;
do {
CTDiscreteNode parent = this.getParent(i);
parentMultiplier /= parent.getStatesNumber();
int newState = pE / parentMultiplier;
parent.setEvidence( newState);
pE %= parentMultiplier;
--i;
}while(i >= 0);
}
/**
* Return the number of parents entries.
*
* @return number of parents entries.
*/
public int getNumberParentsEntries() {
return this.nParentsEntries;
}
/**
* Obtain the CIM value for a particular entry.
*
* @param parentEntry parent entry that identify the parents values
* @param s0 state 0 (for static node always 0)
* @param s1 state 1 (for static node the index of the state for which get the probability)
* @return q value
* @throws RuntimeException can return exceptions if the indexes are out of bound or there aren't states
*/
public double getCIMValue(int parentEntry, int s0, int s1) throws RuntimeException {
return this.CIMs.get(parentEntry)[s0][s1];
}
/**
* Return the CIM given the parent entry.
*
* @param parentEntry parent entry that identify the parents values
* @return the CIM given the parents value
* @throws RuntimeException can return exceptions if the indexes are out of bound or there aren't states
*/
public double[][] getCIM(int parentEntry) throws RuntimeException {
return this.CIMs.get(parentEntry);
}
/**
* Insert a value in the CIM for a particular
* parent entry.
*
* @param parentEntry parent entry that identify the parents values
* @param s0 state 0
* @param s1 state 1
* @param value value to insert
* @throws RuntimeException can return exceptions if the indexes are out of bound or there aren't states
*/
public void setCIMValue(int parentEntry, int s0, int s1, double value) throws RuntimeException {
this.CIMs.get(parentEntry)[s0][s1] = value;
this.validatedCIMs = false;
}
/**
* Insert a complete CIM for a particular parent
* entry.
*
* @param parentEntry parent entry that identify the parents values
* @param CIM CIM to insert
* @throws RuntimeException can return exceptions if the indexes are out of bound or there aren't states
*/
public void setCIM(int parentEntry, double[][] CIM) throws RuntimeException {
if(CIM != null) {
if( (!this.isStaticNode()) && CIM.length != CIM[0].length)
throw new IllegalArgumentException("The CIM must be a squared matrix in continuous time nodes");
else if( this.isStaticNode() && CIM.length != 1)
throw new IllegalArgumentException("The CIM must be a vector in static nodes");
if(CIM[0].length != this.getStatesNumber())
throw new IllegalArgumentException("The CIM dimensions must correspond with number of states of the node");
}
this.CIMs.set(parentEntry, CIM);
this.validatedCIMs = false;
}
/**
* Informs if the node is a static node or a
* continuous time node.
* The class is a static node.
*
* @return true if it is a static node, false otherwise.
*/
public boolean isStaticNode() {
return this.staticNode;
}
/**
* True if the CIMs were validated,
* false otherwise.
*
* @return if all the CIMs were validated.
*/
public boolean validatedCIMs() {
return this.validatedCIMs;
}
/**
* Check the validity of all the CIMs
* and return the validation result.
*
* @return -1 if all the CIMs are validated, otherwise the index of the first not valid parent entry CIM
*/
public int checkCIMs() {
if( this.validatedCIMs)
return -1;
if( this.CIMs == null)
return 0;
for( int pE = 0; pE < this.CIMs.size(); ++pE) {
double[][] cim = this.CIMs.get(pE);
if(cim == null)
return pE;
for(int s0 = 0; s0 < cim.length; ++s0) {
double sum = 0.0;
for( int s1 = 0; s1 < cim[s0].length; ++s1) {
if(this.staticNode) {
if (cim[s0][s1] < 0 || cim[s0][s1] > 1)
return pE;
} else if((s0 == s1 && cim[s0][s1] > 0) || (s0 != s1 && cim[s0][s1] < 0))
return pE;
sum += cim[s0][s1];
}
if(((!this.staticNode) && (!Node.equalsDoubles(sum, 0.0, 0.0000001))) || (this.staticNode && (!Node.equalsDoubles(sum, 1.0, 0.0000001))))
return pE;
}
}
this.validatedCIMs = true;
return -1;
}
/**
* Sample the next transition time given
* the current state and the current parents
* states.
*
* @return the sampled time
* @throws RuntimeException if the system state doesn't allow to generate a transition time
*/
public double sampleTransitionTime() throws RuntimeException {
if( this.isStaticNode())
throw new IllegalStateException("Error: static nodes can not change state in time");
if( !this.validatedCIMs())
throw new IllegalStateException("Error: CIMs are not validated");
if( !this.isInstanced())
throw new IllegalStateException("Error: transition time can be calculated only for instanced nodes");
double lambda = -this.getCIMValue(this.getCurrentParentsEntry(), this.getCurrentStateIndex(), this.getCurrentStateIndex());
if( lambda == 0) // if lambda is equal to zero the state is an absorbing state and can not change
return Double.POSITIVE_INFINITY;
return CTDiscreteNode.expSample(lambda);
}
/**
* Sample a state given the parents.
* If the node is static, sample a state
* using the prior. If the node is continuous
* sample the next jumping state.
* Note: sample a new state but doesn't set it.
*
* @return index of the sampled state
* @throws RuntimeException if the system state doesn't allow to generate a new state
*/
public int sampleState() throws RuntimeException {
if( !this.validatedCIMs())
throw new IllegalStateException("Error: CIMs are not validated");
if( this.isStaticNode()) // if the node is static
return DiscreteModel.sample(this.getCIM(this.getCurrentParentsEntry())[0]);
else if( !this.isInstanced()) // if the node is continuous but not instanced
throw new IllegalStateException("Error: the next state can be calculated only for instanced continuous nodes");
// If the node is continuous and it is instanced
int stateIndex = this.getCurrentStateIndex();
double[] pDistr = (this.getCIM(this.getCurrentParentsEntry())[stateIndex]).clone();
if( pDistr.length < 2)
throw new IllegalStateException("Error: in continuous node to jump to the next state you need at least 2 states");
if( pDistr[stateIndex] == 0)
throw new IllegalStateException("Error: the node it is in an absorbing state. New states can not be sampled");
for(int i = 0; i < pDistr.length; ++i) // generate the probability distribution
if( i != stateIndex)
pDistr[i] /= -pDistr[stateIndex];
pDistr[stateIndex] = 0.0;
return DiscreteModel.sample(pDistr);
}
@Override
public boolean addParent(INode parent) throws RuntimeException {
CTDiscreteNode ctParent = (CTDiscreteNode) parent;
if( this.staticNode)
throw new RuntimeException("Error: static nodes (class node) can not have parent");
if( super.addParent(ctParent)) {
this.nParentsEntries *= ctParent.getStatesNumber();
this.CIMs = CTDiscreteNode.generateCIMsorCPTs( this.nParentsEntries, this.getStatesNumber(), this.staticNode);
this.validatedCIMs = false;
return true;
}
return false;
}
@Override
public boolean removeParent(INode parent) throws RuntimeException {
CTDiscreteNode ctParent = (CTDiscreteNode) parent;
if( super.removeParent(ctParent)) {
this.nParentsEntries /= ctParent.getStatesNumber();
this.CIMs = CTDiscreteNode.generateCIMsorCPTs( this.nParentsEntries, this.getStatesNumber(), this.staticNode);
this.validatedCIMs = false;
return true;
}
return false;
}
// @Override
// protected int addState(String stateName) throws IllegalStateException {
//
// if(stateName.contains("@"))
// throw new IllegalStateException("Error: states names can not contain character '@'");
//
// int index = super.addState(stateName);
// if( index != -1) {
// this.CIMs = CTDiscreteNode.generateCIMsorCPTs( this.nParentsEntries, this.getStatesNumber(), this.staticNode);
// this.validatedCIMs = false;
// }
//
// return index;
// }
@Override
public CTDiscreteNode getChild(int i) throws IllegalArgumentException {
return (CTDiscreteNode) super.getChild(i);
}
@Override
public CTDiscreteNode getChild(String childName) throws IllegalArgumentException {
return (CTDiscreteNode) super.getChild(childName);
}
@Override
public CTDiscreteNode getParent(int i) throws IllegalArgumentException {
return (CTDiscreteNode) super.getParent(i);
}
@Override
public CTDiscreteNode getParent(String parentName) throws IllegalArgumentException {
return (CTDiscreteNode) super.getParent(parentName);
}
/**
* Clone the node without coping the parental
* relation but only the name and the states.
* Also the CIMs are not copied.
*
* @return the cloned node
*/
@Override
public CTDiscreteNode clone() {
TreeSet<String> states = new TreeSet<String>();
for( int i = 0; i < this.getStatesNumber(); ++i)
states.add(this.getStateName(i));
CTDiscreteNode clonedNode = new CTDiscreteNode(this.getName(), states, this.staticNode);
if( this.getCurrentState() != null)
clonedNode.setEvidence(this.getCurrentState());
return clonedNode;
}
/**
* Generate the CIMs or the CPTs for the node.
*
* @param nParentsEntries number of entries for the parents
* @param nStates number of node states
* @param CPT true if the method has to generate CPTs, false if has to generate CIMs
* @return the generated CIMs (null if there are no states)
*/
static private List<double[][]> generateCIMsorCPTs(int nParentsEntries, int nStates, boolean CPT) {
if(nStates < 1)
return null;
List<double[][]> newCIMs = new Vector<double[][]>(nParentsEntries);
for( int i = 0; i < nParentsEntries; ++i)
if(!CPT)
newCIMs.add(new double[nStates][nStates]);
else
newCIMs.add(new double[1][nStates]);
return newCIMs;
}
/**
* Sample a value with exponential distribution
* with parameter lambda.
*
* @param lambda lambda parameter of exponential distribution
* @return sampled time
*/
static public double expSample(double lambda) {
return -Math.log(1 - Math.random()) / lambda ;
}
}
|
|
/*
* Copyright 2002-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.ldap.userdetails;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import javax.naming.Context;
import javax.naming.NameNotFoundException;
import javax.naming.NamingEnumeration;
import javax.naming.directory.Attribute;
import javax.naming.directory.Attributes;
import javax.naming.directory.BasicAttribute;
import javax.naming.directory.DirContext;
import javax.naming.directory.ModificationItem;
import javax.naming.directory.SearchControls;
import javax.naming.ldap.ExtendedRequest;
import javax.naming.ldap.ExtendedResponse;
import javax.naming.ldap.LdapContext;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.core.log.LogMessage;
import org.springframework.ldap.core.AttributesMapper;
import org.springframework.ldap.core.AttributesMapperCallbackHandler;
import org.springframework.ldap.core.ContextExecutor;
import org.springframework.ldap.core.ContextSource;
import org.springframework.ldap.core.DirContextAdapter;
import org.springframework.ldap.core.DistinguishedName;
import org.springframework.ldap.core.LdapTemplate;
import org.springframework.ldap.core.SearchExecutor;
import org.springframework.security.authentication.BadCredentialsException;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.security.ldap.DefaultLdapUsernameToDnMapper;
import org.springframework.security.ldap.LdapUsernameToDnMapper;
import org.springframework.security.ldap.LdapUtils;
import org.springframework.security.provisioning.UserDetailsManager;
import org.springframework.util.Assert;
/**
* An Ldap implementation of UserDetailsManager.
* <p>
* It is designed around a standard setup where users and groups/roles are stored under
* separate contexts, defined by the "userDnBase" and "groupSearchBase" properties
* respectively.
* <p>
* In this case, LDAP is being used purely to retrieve information and this class can be
* used in place of any other UserDetailsService for authentication. Authentication isn't
* performed directly against the directory, unlike with the LDAP authentication provider
* setup.
*
* @author Luke Taylor
* @author Josh Cummings
* @since 2.0
*/
public class LdapUserDetailsManager implements UserDetailsManager {
private final Log logger = LogFactory.getLog(LdapUserDetailsManager.class);
/**
* The strategy for mapping usernames to LDAP distinguished names. This will be used
* when building DNs for creating new users etc.
*/
LdapUsernameToDnMapper usernameMapper = new DefaultLdapUsernameToDnMapper("cn=users", "uid");
/** The DN under which groups are stored */
private DistinguishedName groupSearchBase = new DistinguishedName("cn=groups");
/** Password attribute name */
private String passwordAttributeName = "userPassword";
/** The attribute which corresponds to the role name of a group. */
private String groupRoleAttributeName = "cn";
/** The attribute which contains members of a group */
private String groupMemberAttributeName = "uniquemember";
private final String rolePrefix = "ROLE_";
/** The pattern to be used for the user search. {0} is the user's DN */
private String groupSearchFilter = "(uniquemember={0})";
/**
* The strategy used to create a UserDetails object from the LDAP context, username
* and list of authorities. This should be set to match the required UserDetails
* implementation.
*/
private UserDetailsContextMapper userDetailsMapper = new InetOrgPersonContextMapper();
private final LdapTemplate template;
/** Default context mapper used to create a set of roles from a list of attributes */
private AttributesMapper roleMapper = (attributes) -> {
Attribute roleAttr = attributes.get(this.groupRoleAttributeName);
NamingEnumeration<?> ne = roleAttr.getAll();
Object group = ne.next();
String role = group.toString();
return new SimpleGrantedAuthority(this.rolePrefix + role.toUpperCase());
};
private String[] attributesToRetrieve;
private boolean usePasswordModifyExtensionOperation = false;
public LdapUserDetailsManager(ContextSource contextSource) {
this.template = new LdapTemplate(contextSource);
}
@Override
public UserDetails loadUserByUsername(String username) {
DistinguishedName dn = this.usernameMapper.buildDn(username);
List<GrantedAuthority> authorities = getUserAuthorities(dn, username);
this.logger.debug(LogMessage.format("Loading user '%s' with DN '%s'", username, dn));
DirContextAdapter userCtx = loadUserAsContext(dn, username);
return this.userDetailsMapper.mapUserFromContext(userCtx, username, authorities);
}
private DirContextAdapter loadUserAsContext(final DistinguishedName dn, final String username) {
return (DirContextAdapter) this.template.executeReadOnly((ContextExecutor) (ctx) -> {
try {
Attributes attrs = ctx.getAttributes(dn, this.attributesToRetrieve);
return new DirContextAdapter(attrs, LdapUtils.getFullDn(dn, ctx));
}
catch (NameNotFoundException ex) {
throw new UsernameNotFoundException("User " + username + " not found", ex);
}
});
}
/**
* Changes the password for the current user. The username is obtained from the
* security context.
*
* There are two supported strategies for modifying the user's password depending on
* the capabilities of the corresponding LDAP server.
*
* <p>
* Configured one way, this method will modify the user's password via the
* <a target="_blank" href="https://tools.ietf.org/html/rfc3062"> LDAP Password Modify
* Extended Operation </a>.
*
* See {@link LdapUserDetailsManager#setUsePasswordModifyExtensionOperation(boolean)}
* for details.
* </p>
*
* <p>
* By default, though, if the old password is supplied, the update will be made by
* rebinding as the user, thus modifying the password using the user's permissions. If
* <code>oldPassword</code> is null, the update will be attempted using a standard
* read/write context supplied by the context source.
* </p>
* @param oldPassword the old password
* @param newPassword the new value of the password.
*/
@Override
public void changePassword(final String oldPassword, final String newPassword) {
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
Assert.notNull(authentication,
"No authentication object found in security context. Can't change current user's password!");
String username = authentication.getName();
this.logger.debug(LogMessage.format("Changing password for user '%s'", username));
DistinguishedName userDn = this.usernameMapper.buildDn(username);
if (this.usePasswordModifyExtensionOperation) {
changePasswordUsingExtensionOperation(userDn, oldPassword, newPassword);
}
else {
changePasswordUsingAttributeModification(userDn, oldPassword, newPassword);
}
}
/**
* @param dn the distinguished name of the entry - may be either relative to the base
* context or a complete DN including the name of the context (either is supported).
* @param username the user whose roles are required.
* @return the granted authorities returned by the group search
*/
@SuppressWarnings("unchecked")
List<GrantedAuthority> getUserAuthorities(final DistinguishedName dn, final String username) {
SearchExecutor se = (ctx) -> {
DistinguishedName fullDn = LdapUtils.getFullDn(dn, ctx);
SearchControls ctrls = new SearchControls();
ctrls.setReturningAttributes(new String[] { this.groupRoleAttributeName });
return ctx.search(this.groupSearchBase, this.groupSearchFilter, new String[] { fullDn.toUrl(), username },
ctrls);
};
AttributesMapperCallbackHandler roleCollector = new AttributesMapperCallbackHandler(this.roleMapper);
this.template.search(se, roleCollector);
return roleCollector.getList();
}
@Override
public void createUser(UserDetails user) {
DirContextAdapter ctx = new DirContextAdapter();
copyToContext(user, ctx);
DistinguishedName dn = this.usernameMapper.buildDn(user.getUsername());
this.logger.debug(LogMessage.format("Creating new user '%s' with DN '%s'", user.getUsername(), dn));
this.template.bind(dn, ctx, null);
// Check for any existing authorities which might be set for this
// DN and remove them
List<GrantedAuthority> authorities = getUserAuthorities(dn, user.getUsername());
if (authorities.size() > 0) {
removeAuthorities(dn, authorities);
}
addAuthorities(dn, user.getAuthorities());
}
@Override
public void updateUser(UserDetails user) {
DistinguishedName dn = this.usernameMapper.buildDn(user.getUsername());
this.logger.debug(LogMessage.format("Updating new user '%s' with DN '%s'", user.getUsername(), dn));
List<GrantedAuthority> authorities = getUserAuthorities(dn, user.getUsername());
DirContextAdapter ctx = loadUserAsContext(dn, user.getUsername());
ctx.setUpdateMode(true);
copyToContext(user, ctx);
// Remove the objectclass attribute from the list of mods (if present).
List<ModificationItem> mods = new LinkedList<>(Arrays.asList(ctx.getModificationItems()));
ListIterator<ModificationItem> modIt = mods.listIterator();
while (modIt.hasNext()) {
ModificationItem mod = modIt.next();
Attribute a = mod.getAttribute();
if ("objectclass".equalsIgnoreCase(a.getID())) {
modIt.remove();
}
}
this.template.modifyAttributes(dn, mods.toArray(new ModificationItem[0]));
// template.rebind(dn, ctx, null);
// Remove the old authorities and replace them with the new one
removeAuthorities(dn, authorities);
addAuthorities(dn, user.getAuthorities());
}
@Override
public void deleteUser(String username) {
DistinguishedName dn = this.usernameMapper.buildDn(username);
removeAuthorities(dn, getUserAuthorities(dn, username));
this.template.unbind(dn);
}
@Override
public boolean userExists(String username) {
DistinguishedName dn = this.usernameMapper.buildDn(username);
try {
Object obj = this.template.lookup(dn);
if (obj instanceof Context) {
LdapUtils.closeContext((Context) obj);
}
return true;
}
catch (org.springframework.ldap.NameNotFoundException ex) {
return false;
}
}
/**
* Creates a DN from a group name.
* @param group the name of the group
* @return the DN of the corresponding group, including the groupSearchBase
*/
protected DistinguishedName buildGroupDn(String group) {
DistinguishedName dn = new DistinguishedName(this.groupSearchBase);
dn.add(this.groupRoleAttributeName, group.toLowerCase());
return dn;
}
protected void copyToContext(UserDetails user, DirContextAdapter ctx) {
this.userDetailsMapper.mapUserToContext(user, ctx);
}
protected void addAuthorities(DistinguishedName userDn, Collection<? extends GrantedAuthority> authorities) {
modifyAuthorities(userDn, authorities, DirContext.ADD_ATTRIBUTE);
}
protected void removeAuthorities(DistinguishedName userDn, Collection<? extends GrantedAuthority> authorities) {
modifyAuthorities(userDn, authorities, DirContext.REMOVE_ATTRIBUTE);
}
private void modifyAuthorities(final DistinguishedName userDn,
final Collection<? extends GrantedAuthority> authorities, final int modType) {
this.template.executeReadWrite((ContextExecutor) (ctx) -> {
for (GrantedAuthority authority : authorities) {
String group = convertAuthorityToGroup(authority);
DistinguishedName fullDn = LdapUtils.getFullDn(userDn, ctx);
ModificationItem addGroup = new ModificationItem(modType,
new BasicAttribute(this.groupMemberAttributeName, fullDn.toUrl()));
ctx.modifyAttributes(buildGroupDn(group), new ModificationItem[] { addGroup });
}
return null;
});
}
private String convertAuthorityToGroup(GrantedAuthority authority) {
String group = authority.getAuthority();
if (group.startsWith(this.rolePrefix)) {
group = group.substring(this.rolePrefix.length());
}
return group;
}
public void setUsernameMapper(LdapUsernameToDnMapper usernameMapper) {
this.usernameMapper = usernameMapper;
}
public void setPasswordAttributeName(String passwordAttributeName) {
this.passwordAttributeName = passwordAttributeName;
}
public void setGroupSearchBase(String groupSearchBase) {
this.groupSearchBase = new DistinguishedName(groupSearchBase);
}
public void setGroupRoleAttributeName(String groupRoleAttributeName) {
this.groupRoleAttributeName = groupRoleAttributeName;
}
public void setAttributesToRetrieve(String[] attributesToRetrieve) {
Assert.notNull(attributesToRetrieve, "attributesToRetrieve cannot be null");
this.attributesToRetrieve = attributesToRetrieve;
}
public void setUserDetailsMapper(UserDetailsContextMapper userDetailsMapper) {
this.userDetailsMapper = userDetailsMapper;
}
/**
* Sets the name of the multi-valued attribute which holds the DNs of users who are
* members of a group.
* <p>
* Usually this will be <tt>uniquemember</tt> (the default value) or <tt>member</tt>.
* </p>
* @param groupMemberAttributeName the name of the attribute used to store group
* members.
*/
public void setGroupMemberAttributeName(String groupMemberAttributeName) {
Assert.hasText(groupMemberAttributeName, "groupMemberAttributeName should have text");
this.groupMemberAttributeName = groupMemberAttributeName;
this.groupSearchFilter = "(" + groupMemberAttributeName + "={0})";
}
public void setRoleMapper(AttributesMapper roleMapper) {
this.roleMapper = roleMapper;
}
/**
* Sets the method by which a user's password gets modified.
*
* If set to {@code true}, then {@link LdapUserDetailsManager#changePassword} will
* modify the user's password by way of the
* <a target="_blank" href="https://tools.ietf.org/html/rfc3062">Password Modify
* Extension Operation</a>.
*
* If set to {@code false}, then {@link LdapUserDetailsManager#changePassword} will
* modify the user's password by directly modifying attributes on the corresponding
* entry.
*
* Before using this setting, ensure that the corresponding LDAP server supports this
* extended operation.
*
* By default, {@code usePasswordModifyExtensionOperation} is false.
* @param usePasswordModifyExtensionOperation
* @since 4.2.9
*/
public void setUsePasswordModifyExtensionOperation(boolean usePasswordModifyExtensionOperation) {
this.usePasswordModifyExtensionOperation = usePasswordModifyExtensionOperation;
}
private void changePasswordUsingAttributeModification(DistinguishedName userDn, String oldPassword,
String newPassword) {
ModificationItem[] passwordChange = new ModificationItem[] { new ModificationItem(DirContext.REPLACE_ATTRIBUTE,
new BasicAttribute(this.passwordAttributeName, newPassword)) };
if (oldPassword == null) {
this.template.modifyAttributes(userDn, passwordChange);
return;
}
this.template.executeReadWrite((dirCtx) -> {
LdapContext ctx = (LdapContext) dirCtx;
ctx.removeFromEnvironment("com.sun.jndi.ldap.connect.pool");
ctx.addToEnvironment(Context.SECURITY_PRINCIPAL, LdapUtils.getFullDn(userDn, ctx).toString());
ctx.addToEnvironment(Context.SECURITY_CREDENTIALS, oldPassword);
// TODO: reconnect doesn't appear to actually change the credentials
try {
ctx.reconnect(null);
}
catch (javax.naming.AuthenticationException ex) {
throw new BadCredentialsException("Authentication for password change failed.");
}
ctx.modifyAttributes(userDn, passwordChange);
return null;
});
}
private void changePasswordUsingExtensionOperation(DistinguishedName userDn, String oldPassword,
String newPassword) {
this.template.executeReadWrite((dirCtx) -> {
LdapContext ctx = (LdapContext) dirCtx;
String userIdentity = LdapUtils.getFullDn(userDn, ctx).encode();
PasswordModifyRequest request = new PasswordModifyRequest(userIdentity, oldPassword, newPassword);
try {
return ctx.extendedOperation(request);
}
catch (javax.naming.AuthenticationException ex) {
throw new BadCredentialsException("Authentication for password change failed.");
}
});
}
/**
* An implementation of the
* <a target="_blank" href="https://tools.ietf.org/html/rfc3062"> LDAP Password Modify
* Extended Operation </a> client request.
*
* Can be directed at any LDAP server that supports the Password Modify Extended
* Operation.
*
* @author Josh Cummings
* @since 4.2.9
*/
private static class PasswordModifyRequest implements ExtendedRequest {
private static final byte SEQUENCE_TYPE = 48;
private static final String PASSWORD_MODIFY_OID = "1.3.6.1.4.1.4203.1.11.1";
private static final byte USER_IDENTITY_OCTET_TYPE = -128;
private static final byte OLD_PASSWORD_OCTET_TYPE = -127;
private static final byte NEW_PASSWORD_OCTET_TYPE = -126;
private final ByteArrayOutputStream value = new ByteArrayOutputStream();
PasswordModifyRequest(String userIdentity, String oldPassword, String newPassword) {
ByteArrayOutputStream elements = new ByteArrayOutputStream();
if (userIdentity != null) {
berEncode(USER_IDENTITY_OCTET_TYPE, userIdentity.getBytes(), elements);
}
if (oldPassword != null) {
berEncode(OLD_PASSWORD_OCTET_TYPE, oldPassword.getBytes(), elements);
}
if (newPassword != null) {
berEncode(NEW_PASSWORD_OCTET_TYPE, newPassword.getBytes(), elements);
}
berEncode(SEQUENCE_TYPE, elements.toByteArray(), this.value);
}
@Override
public String getID() {
return PASSWORD_MODIFY_OID;
}
@Override
public byte[] getEncodedValue() {
return this.value.toByteArray();
}
@Override
public ExtendedResponse createExtendedResponse(String id, byte[] berValue, int offset, int length) {
return null;
}
/**
* Only minimal support for <a target="_blank" href=
* "https://www.itu.int/ITU-T/studygroups/com17/languages/X.690-0207.pdf"> BER
* encoding </a>; just what is necessary for the Password Modify request.
*
*/
private void berEncode(byte type, byte[] src, ByteArrayOutputStream dest) {
int length = src.length;
dest.write(type);
if (length < 128) {
dest.write(length);
}
else if ((length & 0x0000_00FF) == length) {
dest.write((byte) 0x81);
dest.write((byte) (length & 0xFF));
}
else if ((length & 0x0000_FFFF) == length) {
dest.write((byte) 0x82);
dest.write((byte) ((length >> 8) & 0xFF));
dest.write((byte) (length & 0xFF));
}
else if ((length & 0x00FF_FFFF) == length) {
dest.write((byte) 0x83);
dest.write((byte) ((length >> 16) & 0xFF));
dest.write((byte) ((length >> 8) & 0xFF));
dest.write((byte) (length & 0xFF));
}
else {
dest.write((byte) 0x84);
dest.write((byte) ((length >> 24) & 0xFF));
dest.write((byte) ((length >> 16) & 0xFF));
dest.write((byte) ((length >> 8) & 0xFF));
dest.write((byte) (length & 0xFF));
}
try {
dest.write(src);
}
catch (IOException ex) {
throw new IllegalArgumentException("Failed to BER encode provided value of type: " + type);
}
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.kvstore;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import com.google.common.base.Objects;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import org.apache.spark.annotation.Private;
/**
* Implementation of KVStore that keeps data deserialized in memory. This store does not index
* data; instead, whenever iterating over an indexed field, the stored data is copied and sorted
* according to the index. This saves memory but makes iteration more expensive.
*/
@Private
public class InMemoryStore implements KVStore {
private Object metadata;
private ConcurrentMap<Class<?>, InstanceList> data = new ConcurrentHashMap<>();
@Override
public <T> T getMetadata(Class<T> klass) {
return klass.cast(metadata);
}
@Override
public void setMetadata(Object value) {
this.metadata = value;
}
@Override
public long count(Class<?> type) {
InstanceList list = data.get(type);
return list != null ? list.size() : 0;
}
@Override
public long count(Class<?> type, String index, Object indexedValue) throws Exception {
InstanceList list = data.get(type);
int count = 0;
Object comparable = asKey(indexedValue);
KVTypeInfo.Accessor accessor = list.getIndexAccessor(index);
for (Object o : view(type)) {
if (Objects.equal(comparable, asKey(accessor.get(o)))) {
count++;
}
}
return count;
}
@Override
public <T> T read(Class<T> klass, Object naturalKey) {
InstanceList list = data.get(klass);
Object value = list != null ? list.get(naturalKey) : null;
if (value == null) {
throw new NoSuchElementException();
}
return klass.cast(value);
}
@Override
public void write(Object value) throws Exception {
InstanceList list = data.computeIfAbsent(value.getClass(), key -> {
try {
return new InstanceList(key);
} catch (Exception e) {
throw Throwables.propagate(e);
}
});
list.put(value);
}
@Override
public void delete(Class<?> type, Object naturalKey) {
InstanceList list = data.get(type);
if (list != null) {
list.delete(naturalKey);
}
}
@Override
public <T> KVStoreView<T> view(Class<T> type){
InstanceList list = data.get(type);
return list != null ? list.view(type)
: new InMemoryView<>(type, Collections.<T>emptyList(), null);
}
@Override
public void close() {
metadata = null;
data.clear();
}
@SuppressWarnings("unchecked")
private static Comparable<Object> asKey(Object in) {
if (in.getClass().isArray()) {
in = ArrayWrappers.forArray(in);
}
return (Comparable<Object>) in;
}
private static class InstanceList {
private final KVTypeInfo ti;
private final KVTypeInfo.Accessor naturalKey;
private final ConcurrentMap<Comparable<Object>, Object> data;
private int size;
private InstanceList(Class<?> type) throws Exception {
this.ti = new KVTypeInfo(type);
this.naturalKey = ti.getAccessor(KVIndex.NATURAL_INDEX_NAME);
this.data = new ConcurrentHashMap<>();
this.size = 0;
}
KVTypeInfo.Accessor getIndexAccessor(String indexName) {
return ti.getAccessor(indexName);
}
public Object get(Object key) {
return data.get(asKey(key));
}
public void put(Object value) throws Exception {
Preconditions.checkArgument(ti.type().equals(value.getClass()),
"Unexpected type: %s", value.getClass());
if (data.put(asKey(naturalKey.get(value)), value) == null) {
size++;
}
}
public void delete(Object key) {
if (data.remove(asKey(key)) != null) {
size--;
}
}
public int size() {
return size;
}
@SuppressWarnings("unchecked")
public <T> InMemoryView<T> view(Class<T> type) {
Preconditions.checkArgument(ti.type().equals(type), "Unexpected type: %s", type);
Collection<T> all = (Collection<T>) data.values();
return new InMemoryView(type, all, ti);
}
}
private static class InMemoryView<T> extends KVStoreView<T> {
private final Collection<T> elements;
private final KVTypeInfo ti;
private final KVTypeInfo.Accessor natural;
InMemoryView(Class<T> type, Collection<T> elements, KVTypeInfo ti) {
super(type);
this.elements = elements;
this.ti = ti;
this.natural = ti != null ? ti.getAccessor(KVIndex.NATURAL_INDEX_NAME) : null;
}
@Override
public Iterator<T> iterator() {
if (elements.isEmpty()) {
return new InMemoryIterator<>(elements.iterator());
}
try {
KVTypeInfo.Accessor getter = index != null ? ti.getAccessor(index) : null;
int modifier = ascending ? 1 : -1;
final List<T> sorted = copyElements();
Collections.sort(sorted, (e1, e2) -> modifier * compare(e1, e2, getter));
Stream<T> stream = sorted.stream();
if (first != null) {
stream = stream.filter(e -> modifier * compare(e, getter, first) >= 0);
}
if (last != null) {
stream = stream.filter(e -> modifier * compare(e, getter, last) <= 0);
}
if (skip > 0) {
stream = stream.skip(skip);
}
if (max < sorted.size()) {
stream = stream.limit((int) max);
}
return new InMemoryIterator<>(stream.iterator());
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
/**
* Create a copy of the input elements, filtering the values for child indices if needed.
*/
private List<T> copyElements() {
if (parent != null) {
KVTypeInfo.Accessor parentGetter = ti.getParentAccessor(index);
Preconditions.checkArgument(parentGetter != null, "Parent filter for non-child index.");
return elements.stream()
.filter(e -> compare(e, parentGetter, parent) == 0)
.collect(Collectors.toList());
} else {
return new ArrayList<>(elements);
}
}
private int compare(T e1, T e2, KVTypeInfo.Accessor getter) {
try {
int diff = compare(e1, getter, getter.get(e2));
if (diff == 0 && getter != natural) {
diff = compare(e1, natural, natural.get(e2));
}
return diff;
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
private int compare(T e1, KVTypeInfo.Accessor getter, Object v2) {
try {
return asKey(getter.get(e1)).compareTo(asKey(v2));
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
}
private static class InMemoryIterator<T> implements KVStoreIterator<T> {
private final Iterator<T> iter;
InMemoryIterator(Iterator<T> iter) {
this.iter = iter;
}
@Override
public boolean hasNext() {
return iter.hasNext();
}
@Override
public T next() {
return iter.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
@Override
public List<T> next(int max) {
List<T> list = new ArrayList<>(max);
while (hasNext() && list.size() < max) {
list.add(next());
}
return list;
}
@Override
public boolean skip(long n) {
long skipped = 0;
while (skipped < n) {
if (hasNext()) {
next();
skipped++;
} else {
return false;
}
}
return hasNext();
}
@Override
public void close() {
// no op.
}
}
}
|
|
/*
* Copyright (c) 2008, Damian Carrillo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted
* provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice, this list of
* conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
* * Neither the name of the copyright holder's organization nor the names of its contributors
* may be used to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
* IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package co.cdev.agave.web;
import java.io.BufferedInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.servlet.http.HttpServletRequest;
import co.cdev.agave.PartImpl;
import co.cdev.agave.Part;
/**
* @author <a href="mailto:[email protected]">Damian Carrillo</a>
*/
public abstract class AbstractMultipartParser<T> implements MultipartParser<T> {
protected static class CoupledLine {
public StringBuilder characters = new StringBuilder();
public ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
public void append(int i) {
characters.append((char)i);
byteStream.write(i);
}
}
private static final Pattern BOUNDARY_PATTERN = Pattern.compile("multipart/form-data;\\s*boundary=(.*)");
private static final String CONTENT_DISPOSIITON = "Content-Disposition:\\s*form-data;\\s*name=\"(.*)\"";
private static final Pattern PART_PATTERN = Pattern.compile(CONTENT_DISPOSIITON + ";\\s*filename=\"(.+)\"");
private static final Pattern PARAMETER_PATTERN = Pattern.compile(CONTENT_DISPOSIITON);
private static final Pattern CONTENT_TYPE_PATTERN = Pattern.compile("Content-Type:\\s*(.+)");
private static final Pattern OTHER_HEADER_PATTERN = Pattern.compile("(\\S+):\\s*(.+?)");
private Map<String, Collection<String>> parameters;
protected Map<String, Part<T>> parts;
protected String boundary;
protected String eos;
protected InputStream in;
@Override
public Map<String, Collection<String>> getParameters() {
return parameters;
}
@Override
public Map<String, Part<T>> getParts() {
return parts;
}
public String getBoundary() {
return boundary;
}
@Override
public void parseInput(HttpServletRequest request) throws Exception {
parameters = new HashMap<String, Collection<String>>();
parts = new HashMap<String, Part<T>>();
Matcher boundaryMatcher = BOUNDARY_PATTERN.matcher(request.getContentType());
if (boundaryMatcher.matches() && boundaryMatcher.groupCount() >= 1) {
boundary = "--" + boundaryMatcher.group(1);
}
eos = boundary + "--";
in = new BufferedInputStream(request.getInputStream());
readLine(in);
try {
while (true) {
Part<T> part = new PartImpl<T>();
readHeaders(part);
if (part.getFilename() != null) {
if (readPart(part)) {
break;
}
} else {
if (readParameter(part)) {
break;
}
}
}
} finally {
in.close();
in = null;
}
}
private void readHeaders(Part<T> part) throws IOException {
String line = null;
while ((line = readLine(in)) != null) {
line = line.trim();
if ("".equals(line)) {
break;
}
Matcher matcher = PART_PATTERN.matcher(line);
if (matcher.matches() && matcher.groupCount() >= 2) {
part.setName(matcher.group(1));
part.setFilename(matcher.group(2));
continue;
}
matcher = PARAMETER_PATTERN.matcher(line);
if (matcher.matches() && matcher.groupCount() >= 1) {
part.setName(matcher.group(1));
continue;
}
matcher = CONTENT_TYPE_PATTERN.matcher(line);
if (matcher.matches() && matcher.groupCount() >= 1) {
part.setContentType(matcher.group(1));
continue;
}
matcher = OTHER_HEADER_PATTERN.matcher(line);
if (matcher.matches() && matcher.groupCount() >= 2) {
part.addHeader(matcher.group(1), matcher.group(2));
continue;
}
}
}
private String readLine(InputStream in) throws IOException {
StringBuilder text = new StringBuilder();
int i = -1;
while ((i = in.read()) > 0) {
text.append((char) i);
if ((char) i == '\n') {
break;
}
}
return text.toString();
}
/**
* Reads a line from the input stream and represents it as a {@link CoupledLine}.
* This is done because it is not known whether the line is the terminating character
* sequence, and in lieu of backtracking, it's just stored as character and
* binary data. The amount of data stored should be relatively small, so little
* memory pressure should be evident.
*
* @param in the data from the multipart post that corresponds to an individual part
* @return a line represented as character and binary data
* @throws IOException
*/
protected CoupledLine readCoupledLine(InputStream in) throws IOException {
CoupledLine line = new CoupledLine();
int i = -1;
while ((i = in.read()) != -1) {
line.append(i);
if ((char) i == '\n') {
break;
}
}
return line;
}
private boolean readParameter(Part<T> part) throws IOException {
boolean end = false;
StringBuilder parameterValue = new StringBuilder();
String line = null;
while ((line = readLine(in)) != null) {
line = line.trim();
if (eos.equals(line)) {
end = true;
break;
}
if (boundary.equals(line)) {
break;
}
parameterValue.append(line);
}
if (!parameters.containsKey(part.getName())) {
parameters.put(part.getName(), new ArrayList<String>());
}
parameters.get(part.getName()).add(parameterValue.toString());
return end;
}
protected abstract boolean readPart(Part<T> part) throws Exception;
/**
* Just making sure that this is closed... (see the finally block of the parseInput method as well)
*/
@Override
protected void finalize() throws Throwable {
super.finalize();
if (in != null) {
in.close();
}
}
}
|
|
/**
*
*/
package org.vaadin.maps.client.ui;
import org.vaadin.gwtgraphics.client.AbstractDrawingContainer;
import org.vaadin.gwtgraphics.client.Drawing;
import org.vaadin.gwtgraphics.client.Group;
import org.vaadin.gwtgraphics.client.shape.Text;
import org.vaadin.maps.client.drawing.Utils;
import org.vaadin.maps.client.drawing.Utils.PointShape;
import org.vaadin.maps.client.geometry.Coordinate;
import org.vaadin.maps.client.geometry.Geometry;
import org.vaadin.maps.shared.ui.Style;
import com.google.gwt.event.dom.client.MouseOutEvent;
import com.google.gwt.event.dom.client.MouseOutHandler;
import com.google.gwt.event.dom.client.MouseOverEvent;
import com.google.gwt.event.dom.client.MouseOverHandler;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.user.client.ui.Widget;
/**
* @author Kamil Morong
*
*/
public class VVectorFeature extends AbstractDrawingContainer implements CanShift {
public static final String CLASSNAME = "v-vectorfeature";
private Geometry geometry = null;
private Drawing drawing = null;
private String text = null;
private Text textShape = null;
private Style style = Style.DEFAULT;
private Style hoverStyle = null;
private Coordinate centroid = null;
private Coordinate textOffset = new Coordinate();
private boolean hidden = false;
private PointShape pointShape = PointShape.Circle;
private double pointShapeScale = 1.0;
private HandlerRegistration mouseOverHandler = null;
private HandlerRegistration mouseOutHandler = null;
private int shiftX = 0;
private int shiftY = 0;
public VVectorFeature() {
super();
setStyleName(CLASSNAME);
}
public Geometry getGeometry() {
return geometry;
}
public void setGeometry(Geometry geometry) {
if (geometry != null) {
Utils.moveGeometry(geometry, -shiftX, -shiftY);
if (!geometry.equals(this.geometry)) {
// create new vector object and insert it into feature root
// element
drawGeometry(geometry);
}
} else {
clear();
}
this.geometry = geometry;
}
public String getText() {
return text;
}
public void setText(String text) {
if (text != null) {
if (!text.equals(this.text)) {
drawText(text);
}
} else {
clearText();
}
}
public Style getStyle() {
return style;
}
public void setStyle(Style style) {
this.style = style;
setPointShape();
updateDrawingStyle();
updateTextStyle();
}
private void setPointShape() {
if (style != null) {
pointShape = Utils.pointShapeFromString(style.pointShape);
if (null == pointShape) {
pointShape = PointShape.Circle;
}
pointShapeScale = style.pointShapeScale;
} else {
pointShape = PointShape.Circle;
pointShapeScale = 1.0;
}
}
public Style getHoverStyle() {
return hoverStyle;
}
public void setHoverStyle(Style style) {
this.hoverStyle = style;
updateHoverStyle();
}
public boolean isHidden() {
return hidden;
}
public void setHidden(boolean hidden) {
if (this.hidden != hidden) {
this.hidden = hidden;
VVectorFeatureContainer container = null;
Widget parent = getParent();
if (parent instanceof VVectorFeatureContainer) {
container = (VVectorFeatureContainer) parent;
} else {
Widget grandParent = parent.getParent();
if (grandParent instanceof VVectorFeatureContainer) {
container = (VVectorFeatureContainer) grandParent;
}
}
if (container != null) {
container.remove(this);
container.add(this);
}
}
}
private void updateDrawingStyle() {
if (drawing != null && style != null) {
Utils.updateDrawingStyle(drawing, style);
}
}
private void updateHoverStyle() {
if (drawing != null) {
if (hoverStyle != null) {
mouseOverHandler = drawing.addMouseOverHandler(new MouseOverHandler() {
@Override
public void onMouseOver(MouseOverEvent event) {
Utils.updateDrawingStyle(drawing, hoverStyle);
}
});
mouseOutHandler = drawing.addMouseOutHandler(new MouseOutHandler() {
@Override
public void onMouseOut(MouseOutEvent event) {
Utils.updateDrawingStyle(drawing, style);
}
});
} else {
if (mouseOverHandler != null) {
mouseOverHandler.removeHandler();
mouseOverHandler = null;
}
if (mouseOutHandler != null) {
mouseOutHandler.removeHandler();
mouseOutHandler = null;
}
}
}
}
private void updateTextStyle() {
if (textShape != null && style != null) {
Utils.updateDrawingStyle(textShape, style);
}
}
@Override
public void clear() {
super.clear();
drawing = null;
}
public void clearText() {
if (textShape != null) {
remove(textShape);
}
textShape = null;
text = null;
}
private void drawGeometry(Geometry geometry) {
clear();
if (geometry != null) {
drawing = Utils.drawGeometry(geometry, pointShape, pointShapeScale, shiftX, shiftY);
updateDrawingStyle();
updateHoverStyle();
add(drawing);
}
}
public void setCentroid(Double x, Double y) {
if (x != null && y != null) {
centroid = new Coordinate(x, y);
} else {
centroid = null;
}
updateTextPosition();
}
public void setTextOffset(double x, double y) {
textOffset.setXY(x, y);
updateTextPosition();
}
private void updateTextPosition() {
if (textShape != null && centroid != null) {
textShape.setX(Math.round((float) (centroid.getX() + textOffset.getX())));
textShape.setY(Math.round((float) (centroid.getY() + textOffset.getY())));
}
}
private void drawText(String text) {
if (null == textShape) {
textShape = new Text(0, 0, text);
updateTextPosition();
updateTextStyle();
add(textShape);
} else {
textShape.setText(text);
}
this.text = text;
}
public Drawing getDrawing() {
return drawing;
}
/**
* Returns type of Group class, constructor will create its implementation
* as root element
*/
@Override
public Class<? extends Drawing> getType() {
return Group.class;
}
@Override
public int getShiftX() {
return shiftX;
}
@Override
public int getShiftY() {
return shiftY;
}
@Override
public void setShift(int x, int y) {
shiftX = x;
shiftY = y;
drawGeometry(geometry);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.net.tftp;
import java.net.DatagramPacket;
import java.net.InetAddress;
/***
* A final class derived from TFTPPacket definiing the TFTP Data
* packet type.
* <p>
* Details regarding the TFTP protocol and the format of TFTP packets can
* be found in RFC 783. But the point of these classes is to keep you
* from having to worry about the internals. Additionally, only very
* few people should have to care about any of the TFTPPacket classes
* or derived classes. Almost all users should only be concerned with the
* {@link org.apache.commons.net.tftp.TFTPClient} class
* {@link org.apache.commons.net.tftp.TFTPClient#receiveFile receiveFile()}
* and
* {@link org.apache.commons.net.tftp.TFTPClient#sendFile sendFile()}
* methods.
*
*
* @see TFTPPacket
* @see TFTPPacketException
* @see TFTP
***/
public final class TFTPDataPacket extends TFTPPacket
{
/*** The maximum number of bytes in a TFTP data packet (512) ***/
public static final int MAX_DATA_LENGTH = 512;
/*** The minimum number of bytes in a TFTP data packet (0) ***/
public static final int MIN_DATA_LENGTH = 0;
/*** The block number of the packet. ***/
int _blockNumber;
/*** The length of the data. ***/
int _length;
/*** The offset into the _data array at which the data begins. ***/
int _offset;
/*** The data stored in the packet. ***/
byte[] _data;
/***
* Creates a data packet to be sent to a host at a given port
* with a given block number. The actual data to be sent is passed as
* an array, an offset, and a length. The offset is the offset into
* the byte array where the data starts. The length is the length of
* the data. If the length is greater than MAX_DATA_LENGTH, it is
* truncated.
*
* @param destination The host to which the packet is going to be sent.
* @param port The port to which the packet is going to be sent.
* @param blockNumber The block number of the data.
* @param data The byte array containing the data.
* @param offset The offset into the array where the data starts.
* @param length The length of the data.
***/
public TFTPDataPacket(InetAddress destination, int port, int blockNumber,
byte[] data, int offset, int length)
{
super(TFTPPacket.DATA, destination, port);
_blockNumber = blockNumber;
_data = data;
_offset = offset;
if (length > MAX_DATA_LENGTH) {
_length = MAX_DATA_LENGTH;
} else {
_length = length;
}
}
public TFTPDataPacket(InetAddress destination, int port, int blockNumber,
byte[] data)
{
this(destination, port, blockNumber, data, 0, data.length);
}
/***
* Creates a data packet based from a received
* datagram. Assumes the datagram is at least length 4, else an
* ArrayIndexOutOfBoundsException may be thrown.
*
* @param datagram The datagram containing the received data.
* @throws TFTPPacketException If the datagram isn't a valid TFTP
* data packet.
***/
TFTPDataPacket(DatagramPacket datagram) throws TFTPPacketException
{
super(TFTPPacket.DATA, datagram.getAddress(), datagram.getPort());
_data = datagram.getData();
_offset = 4;
if (getType() != _data[1]) {
throw new TFTPPacketException("TFTP operator code does not match type.");
}
_blockNumber = (((_data[2] & 0xff) << 8) | (_data[3] & 0xff));
_length = datagram.getLength() - 4;
if (_length > MAX_DATA_LENGTH) {
_length = MAX_DATA_LENGTH;
}
}
/***
* This is a method only available within the package for
* implementing efficient datagram transport by elminating buffering.
* It takes a datagram as an argument, and a byte buffer in which
* to store the raw datagram data. Inside the method, the data
* is set as the datagram's data and the datagram returned.
*
* @param datagram The datagram to create.
* @param data The buffer to store the packet and to use in the datagram.
* @return The datagram argument.
***/
@Override
DatagramPacket _newDatagram(DatagramPacket datagram, byte[] data)
{
data[0] = 0;
data[1] = (byte)_type;
data[2] = (byte)((_blockNumber & 0xffff) >> 8);
data[3] = (byte)(_blockNumber & 0xff);
// Doublecheck we're not the same
if (data != _data) {
System.arraycopy(_data, _offset, data, 4, _length);
}
datagram.setAddress(_address);
datagram.setPort(_port);
datagram.setData(data);
datagram.setLength(_length + 4);
return datagram;
}
/***
* Creates a UDP datagram containing all the TFTP
* data packet data in the proper format.
* This is a method exposed to the programmer in case he
* wants to implement his own TFTP client instead of using
* the {@link org.apache.commons.net.tftp.TFTPClient}
* class.
* Under normal circumstances, you should not have a need to call this
* method.
*
* @return A UDP datagram containing the TFTP data packet.
***/
@Override
public DatagramPacket newDatagram()
{
byte[] data;
data = new byte[_length + 4];
data[0] = 0;
data[1] = (byte)_type;
data[2] = (byte)((_blockNumber & 0xffff) >> 8);
data[3] = (byte)(_blockNumber & 0xff);
System.arraycopy(_data, _offset, data, 4, _length);
return new DatagramPacket(data, _length + 4, _address, _port);
}
/***
* Returns the block number of the data packet.
*
* @return The block number of the data packet.
***/
public int getBlockNumber()
{
return _blockNumber;
}
/*** Sets the block number of the data packet.
* @param blockNumber the number to set
***/
public void setBlockNumber(int blockNumber)
{
_blockNumber = blockNumber;
}
/***
* Sets the data for the data packet.
*
* @param data The byte array containing the data.
* @param offset The offset into the array where the data starts.
* @param length The length of the data.
***/
public void setData(byte[] data, int offset, int length)
{
_data = data;
_offset = offset;
_length = length;
if (length > MAX_DATA_LENGTH) {
_length = MAX_DATA_LENGTH;
} else {
_length = length;
}
}
/***
* Returns the length of the data part of the data packet.
*
* @return The length of the data part of the data packet.
***/
public int getDataLength()
{
return _length;
}
/***
* Returns the offset into the byte array where the packet data actually
* starts.
*
* @return The offset into the byte array where the packet data actually
* starts.
***/
public int getDataOffset()
{
return _offset;
}
/***
* Returns the byte array containing the packet data.
*
* @return The byte array containing the packet data.
***/
public byte[] getData()
{
return _data;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysds.test.functions.data.misc;
import org.junit.Test;
import org.apache.sysds.api.DMLScript;
import org.apache.sysds.common.Types.ExecMode;
import org.apache.sysds.common.Types.ExecType;
import org.apache.sysds.runtime.io.IOUtilFunctions;
import org.apache.sysds.runtime.io.FileFormatPropertiesMM.MMField;
import org.apache.sysds.runtime.io.FileFormatPropertiesMM.MMFormat;
import org.apache.sysds.runtime.io.FileFormatPropertiesMM.MMSymmetry;
import org.apache.sysds.runtime.matrix.data.IJV;
import org.apache.sysds.runtime.matrix.data.MatrixBlock;
import org.apache.sysds.test.AutomatedTestBase;
import org.apache.sysds.test.TestConfiguration;
import org.apache.sysds.test.TestUtils;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.Iterator;
import org.apache.commons.lang.NotImplementedException;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class MatrixMarketFormatTest extends AutomatedTestBase
{
private final static String TEST_NAME = "MatrixMarketFormat";
private final static String TEST_DIR = "functions/data/";
private final static String TEST_CLASS_DIR = TEST_DIR + MatrixMarketFormatTest.class.getSimpleName() + "/";
private final static int dim = 1200;
private final static double sparsity = 0.1;
@Override
public void setUp() {
TestUtils.clearAssertionInformation();
addTestConfiguration( TEST_NAME,
new TestConfiguration(TEST_CLASS_DIR, TEST_NAME, new String[] { "R", "C" }) );
}
@Test
public void testMMCooRealGeneralCP() {
runMatrixMarketFormatTest(MMFormat.COORDINATE, MMField.REAL, MMSymmetry.GENERAL, ExecType.CP);
}
@Test
public void testMMCooRealGeneralSp() {
runMatrixMarketFormatTest(MMFormat.COORDINATE, MMField.REAL, MMSymmetry.GENERAL, ExecType.SPARK);
}
@Test
public void testMMCooRealSymmetricCP() {
runMatrixMarketFormatTest(MMFormat.COORDINATE, MMField.REAL, MMSymmetry.SYMMETRIC, ExecType.CP);
}
@Test
public void testMMCooRealSymmetricSp() {
runMatrixMarketFormatTest(MMFormat.COORDINATE, MMField.REAL, MMSymmetry.SYMMETRIC, ExecType.SPARK);
}
// @Test
// public void testMMCooRealSkewSymmetricCP() {
// runMatrixMarketFormatTest(MMFormat.COORDINATE, MMField.REAL, MMSymmetry.SKEW_SYMMETRIC, ExecType.CP);
// }
//
// @Test
// public void testMMCooRealSkewSymmetricSp() {
// runMatrixMarketFormatTest(MMFormat.COORDINATE, MMField.REAL, MMSymmetry.SKEW_SYMMETRIC, ExecType.SPARK);
// }
@Test
public void testMMCooIntegerGeneralCP() {
runMatrixMarketFormatTest(MMFormat.COORDINATE, MMField.INTEGER, MMSymmetry.GENERAL, ExecType.CP);
}
@Test
public void testMMCooIntegerGeneralSp() {
runMatrixMarketFormatTest(MMFormat.COORDINATE, MMField.INTEGER, MMSymmetry.GENERAL, ExecType.SPARK);
}
@Test
public void testMMCooIntegerSymmetricCP() {
runMatrixMarketFormatTest(MMFormat.COORDINATE, MMField.INTEGER, MMSymmetry.SYMMETRIC, ExecType.CP);
}
@Test
public void testMMCooIntegerSymmetricSp() {
runMatrixMarketFormatTest(MMFormat.COORDINATE, MMField.INTEGER, MMSymmetry.SYMMETRIC, ExecType.SPARK);
}
// @Test
// public void testMMCooIntegerSkewSymmetricCP() {
// runMatrixMarketFormatTest(MMFormat.COORDINATE, MMField.INTEGER, MMSymmetry.SKEW_SYMMETRIC, ExecType.CP);
// }
//
// @Test
// public void testMMCooIntegerSkewSymmetricSp() {
// runMatrixMarketFormatTest(MMFormat.COORDINATE, MMField.INTEGER, MMSymmetry.SKEW_SYMMETRIC, ExecType.SPARK);
// }
@Test
public void testMMCooPatternGeneralCP() {
runMatrixMarketFormatTest(MMFormat.COORDINATE, MMField.PATTERN, MMSymmetry.GENERAL, ExecType.CP);
}
@Test
public void testMMCooPatternGeneralSp() {
runMatrixMarketFormatTest(MMFormat.COORDINATE, MMField.PATTERN, MMSymmetry.GENERAL, ExecType.SPARK);
}
@Test
public void testMMCooPatternSymmetricCP() {
runMatrixMarketFormatTest(MMFormat.COORDINATE, MMField.PATTERN, MMSymmetry.SYMMETRIC, ExecType.CP);
}
@Test
public void testMMCooPatternSymmetricSp() {
runMatrixMarketFormatTest(MMFormat.COORDINATE, MMField.PATTERN, MMSymmetry.SYMMETRIC, ExecType.SPARK);
}
// @Test
// public void testMMArrRealGeneralCP() {
// runMatrixMarketFormatTest(MMFormat.ARRAY, MMField.REAL, MMSymmetry.GENERAL, ExecType.CP);
// }
//
// @Test
// public void testMMArrRealGeneralSp() {
// runMatrixMarketFormatTest(MMFormat.ARRAY, MMField.REAL, MMSymmetry.GENERAL, ExecType.SPARK);
// }
//
// @Test
// public void testMMArrRealSymmetricCP() {
// runMatrixMarketFormatTest(MMFormat.ARRAY, MMField.REAL, MMSymmetry.SYMMETRIC, ExecType.CP);
// }
//
// @Test
// public void testMMArrRealSymmetricSp() {
// runMatrixMarketFormatTest(MMFormat.ARRAY, MMField.REAL, MMSymmetry.SYMMETRIC, ExecType.SPARK);
// }
//
// @Test
// public void testMMArrRealSkewSymmetricCP() {
// runMatrixMarketFormatTest(MMFormat.ARRAY, MMField.REAL, MMSymmetry.SKEW_SYMMETRIC, ExecType.CP);
// }
//
// @Test
// public void testMMArrRealSkewSymmetricSp() {
// runMatrixMarketFormatTest(MMFormat.ARRAY, MMField.REAL, MMSymmetry.SKEW_SYMMETRIC, ExecType.SPARK);
// }
//
// @Test
// public void testMMArrIntegerGeneralCP() {
// runMatrixMarketFormatTest(MMFormat.ARRAY, MMField.INTEGER, MMSymmetry.GENERAL, ExecType.CP);
// }
//
// @Test
// public void testMMArrIntegerGeneralSp() {
// runMatrixMarketFormatTest(MMFormat.ARRAY, MMField.INTEGER, MMSymmetry.GENERAL, ExecType.SPARK);
// }
//
// @Test
// public void testMMArrIntegerSymmetricCP() {
// runMatrixMarketFormatTest(MMFormat.ARRAY, MMField.INTEGER, MMSymmetry.SYMMETRIC, ExecType.CP);
// }
//
// @Test
// public void testMMArrIntegerSymmetricSp() {
// runMatrixMarketFormatTest(MMFormat.ARRAY, MMField.INTEGER, MMSymmetry.SYMMETRIC, ExecType.SPARK);
// }
//
// @Test
// public void testMMArrIntegerSkewSymmetricCP() {
// runMatrixMarketFormatTest(MMFormat.ARRAY, MMField.INTEGER, MMSymmetry.SKEW_SYMMETRIC, ExecType.CP);
// }
//
// @Test
// public void testMMArrIntegerSkewSymmetricSp() {
// runMatrixMarketFormatTest(MMFormat.ARRAY, MMField.INTEGER, MMSymmetry.SKEW_SYMMETRIC, ExecType.SPARK);
// }
private void runMatrixMarketFormatTest(MMFormat fmt, MMField field, MMSymmetry symmetry, ExecType et)
{
//rtplatform for MR
ExecMode platformOld = rtplatform;
switch( et ){
case SPARK: rtplatform = ExecMode.SPARK; break;
default: rtplatform = ExecMode.SINGLE_NODE; break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if( rtplatform == ExecMode.SPARK )
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try
{
TestConfiguration config = getTestConfiguration(TEST_NAME);
loadTestConfiguration(config);
String HOME = SCRIPT_DIR + TEST_DIR;
fullRScriptName = HOME + TEST_NAME + ".R";
fullDMLScriptName = HOME + TEST_NAME + ".dml";
programArgs = new String[]{"-args", input("X"), output("R"), output("C") };
rCmd = "Rscript" + " " + fullRScriptName + " " +
input("X") + " " + expected("R") + " " + expected("C");
generateAndWriteMMInput(input("X"), fmt, field, symmetry);
runTest(true, false, null, -1);
runRScript(true);
//compare row and column aggregates
TestUtils.compareMatrices(readDMLMatrixFromOutputDir("R"),
readRMatrixFromExpectedDir("R"), 1e-10, "Stat-DML", "Stat-R");
TestUtils.compareMatrices(readDMLMatrixFromOutputDir("C"),
readRMatrixFromExpectedDir("C"), 1e-10, "Stat-DML", "Stat-R");
}
catch (IOException e) {
throw new RuntimeException(e);
}
finally {
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
}
private static void generateAndWriteMMInput(String fname, MMFormat fmt, MMField field, MMSymmetry symmetry)
throws IOException
{
int rows = dim;
int cols = (symmetry==MMSymmetry.GENERAL) ? dim/3 : dim;
MatrixBlock tmp = MatrixBlock.randOperations(
rows, cols, sparsity, -10, 10, "uniform", 7);
String header = "%%MatrixMarket matrix " + fmt.toString() + " "
+ field.toString() + " " + symmetry.toString() + "\n";
String meta = rows + " " + cols + ((fmt == MMFormat.COORDINATE) ?
" " + tmp.getNonZeros() : "") + "\n";
Path path = new Path( fname );
FileSystem fs = IOUtilFunctions.getFileSystem(path);
try( BufferedWriter br = new BufferedWriter(new OutputStreamWriter(fs.create(path,true))) )
{
br.write(header);
br.write(meta);
if( fmt == MMFormat.ARRAY ) {
for(int j=0; j<tmp.getNumColumns(); j++) {
int bi = (symmetry == MMSymmetry.GENERAL) ? 0 :
(symmetry == MMSymmetry.SYMMETRIC) ? j : j+1;
for(int i=bi; i<tmp.getNumRows(); i++) {
double val = tmp.quickGetValue(i, j);
br.write(String.valueOf((field == MMField.INTEGER) ?
(int) val : val) + "\n" );
}
}
}
else { //COORDINATE
if( tmp.isInSparseFormat() ) {
StringBuilder sb = new StringBuilder();
Iterator<IJV> iter = tmp.getSparseBlockIterator();
while( iter.hasNext() ) {
IJV cell = iter.next();
if( (symmetry == MMSymmetry.SYMMETRIC && cell.getJ() > cell.getI())
|| (symmetry == MMSymmetry.SKEW_SYMMETRIC && cell.getJ() >= cell.getI()))
continue;
sb.append(cell.getI()+1);
sb.append(' ');
sb.append(cell.getJ()+1);
if( field != MMField.PATTERN ) {
sb.append(' ');
sb.append((field == MMField.INTEGER) ?
String.valueOf((int) cell.getV()) : String.valueOf(cell.getV()));
}
sb.append('\n');
br.write( sb.toString() ); //same as append
sb.setLength(0);
}
}
else {
//always sparse in above used setup
throw new NotImplementedException();
}
}
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.hbase.index;
import static org.apache.phoenix.hbase.index.util.IndexManagementUtil.rethrowIndexingException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionObserver;
import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
import org.apache.hadoop.hbase.regionserver.OperationStatus;
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.htrace.Span;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import org.apache.phoenix.coprocessor.BaseScannerRegionObserver.ReplayWrite;
import org.apache.phoenix.coprocessor.DelegateRegionCoprocessorEnvironment;
import org.apache.phoenix.hbase.index.LockManager.RowLock;
import org.apache.phoenix.hbase.index.builder.FatalIndexBuildingFailureException;
import org.apache.phoenix.hbase.index.builder.IndexBuildManager;
import org.apache.phoenix.hbase.index.builder.IndexBuilder;
import org.apache.phoenix.hbase.index.metrics.MetricsIndexerSource;
import org.apache.phoenix.hbase.index.metrics.MetricsIndexerSourceFactory;
import org.apache.phoenix.hbase.index.table.HTableInterfaceReference;
import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
import org.apache.phoenix.hbase.index.util.IndexManagementUtil;
import org.apache.phoenix.hbase.index.util.VersionUtil;
import org.apache.phoenix.hbase.index.wal.IndexedKeyValue;
import org.apache.phoenix.hbase.index.write.IndexFailurePolicy;
import org.apache.phoenix.hbase.index.write.IndexWriter;
import org.apache.phoenix.hbase.index.write.RecoveryIndexWriter;
import org.apache.phoenix.hbase.index.write.recovery.PerRegionIndexWriteCache;
import org.apache.phoenix.hbase.index.write.recovery.StoreFailuresInCachePolicy;
import org.apache.phoenix.query.QueryServicesOptions;
import org.apache.phoenix.trace.TracingUtils;
import org.apache.phoenix.trace.util.NullSpan;
import org.apache.phoenix.util.EnvironmentEdgeManager;
import org.apache.phoenix.util.ScanUtil;
import org.apache.phoenix.util.ServerUtil;
import org.apache.phoenix.util.ServerUtil.ConnectionType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Lists;
import com.google.common.collect.Multimap;
/**
* Do all the work of managing index updates from a single coprocessor. All Puts/Delets are passed
* to an {@link IndexBuilder} to determine the actual updates to make.
* <p>
* If the WAL is enabled, these updates are then added to the WALEdit and attempted to be written to
* the WAL after the WALEdit has been saved. If any of the index updates fail, this server is
* immediately terminated and we rely on WAL replay to attempt the index updates again (see
* {@link #preWALRestore(ObserverContext, HRegionInfo, HLogKey, WALEdit)}).
* <p>
* If the WAL is disabled, the updates are attempted immediately. No consistency guarantees are made
* if the WAL is disabled - some or none of the index updates may be successful. All updates in a
* single batch must have the same durability level - either everything gets written to the WAL or
* nothing does. Currently, we do not support mixed-durability updates within a single batch. If you
* want to have different durability levels, you only need to split the updates into two different
* batches.
* <p>
* We don't need to implement {@link #postPut(ObserverContext, Put, WALEdit, Durability)} and
* {@link #postDelete(ObserverContext, Delete, WALEdit, Durability)} hooks because
* Phoenix always does batch mutations.
* <p>
*/
public class Indexer implements RegionObserver, RegionCoprocessor {
private static final Logger LOGGER = LoggerFactory.getLogger(Indexer.class);
private static final OperationStatus IGNORE = new OperationStatus(OperationStatusCode.SUCCESS);
private static final OperationStatus NOWRITE = new OperationStatus(OperationStatusCode.SUCCESS);
protected IndexWriter writer;
protected IndexBuildManager builder;
private LockManager lockManager;
// Hack to get around not being able to save any state between
// coprocessor calls. TODO: remove after HBASE-18127 when available
private static class BatchMutateContext {
public final int clientVersion;
public Collection<Pair<Mutation, byte[]>> indexUpdates = Collections.emptyList();
public List<RowLock> rowLocks = Lists.newArrayListWithExpectedSize(QueryServicesOptions.DEFAULT_MUTATE_BATCH_SIZE);
public BatchMutateContext(int clientVersion) {
this.clientVersion = clientVersion;
}
}
private ThreadLocal<BatchMutateContext> batchMutateContext =
new ThreadLocal<BatchMutateContext>();
/** Configuration key for the {@link IndexBuilder} to use */
public static final String INDEX_BUILDER_CONF_KEY = "index.builder";
/**
* Configuration key for if the indexer should check the version of HBase is running. Generally,
* you only want to ignore this for testing or for custom versions of HBase.
*/
public static final String CHECK_VERSION_CONF_KEY = "com.saleforce.hbase.index.checkversion";
private static final String INDEX_RECOVERY_FAILURE_POLICY_KEY = "org.apache.hadoop.hbase.index.recovery.failurepolicy";
private static final String INDEXER_INDEX_WRITE_SLOW_THRESHOLD_KEY = "phoenix.indexer.slow.post.batch.mutate.threshold";
private static final long INDEXER_INDEX_WRITE_SLOW_THRESHOLD_DEFAULT = 3_000;
private static final String INDEXER_INDEX_PREPARE_SLOW_THRESHOLD_KEY = "phoenix.indexer.slow.pre.batch.mutate.threshold";
private static final long INDEXER_INDEX_PREPARE_SLOW_THREHSOLD_DEFAULT = 3_000;
private static final String INDEXER_PRE_WAL_RESTORE_SLOW_THRESHOLD_KEY = "phoenix.indexer.slow.pre.wal.restore.threshold";
private static final long INDEXER_PRE_WAL_RESTORE_SLOW_THRESHOLD_DEFAULT = 3_000;
private static final String INDEXER_POST_OPEN_SLOW_THRESHOLD_KEY = "phoenix.indexer.slow.open.threshold";
private static final long INDEXER_POST_OPEN_SLOW_THRESHOLD_DEFAULT = 3_000;
private static final String INDEXER_PRE_INCREMENT_SLOW_THRESHOLD_KEY = "phoenix.indexer.slow.pre.increment";
private static final long INDEXER_PRE_INCREMENT_SLOW_THRESHOLD_DEFAULT = 3_000;
/**
* cache the failed updates to the various regions. Used for making the WAL recovery mechanisms
* more robust in the face of recoverying index regions that were on the same server as the
* primary table region
*/
private PerRegionIndexWriteCache failedIndexEdits = new PerRegionIndexWriteCache();
/**
* IndexWriter for writing the recovered index edits. Separate from the main indexer since we need
* different write/failure policies
*/
private IndexWriter recoveryWriter;
private MetricsIndexerSource metricSource;
private boolean stopped;
private boolean disabled;
private long slowIndexWriteThreshold;
private long slowIndexPrepareThreshold;
private long slowPreWALRestoreThreshold;
private long slowPostOpenThreshold;
private long slowPreIncrementThreshold;
private int rowLockWaitDuration;
public static final String RecoveryFailurePolicyKeyForTesting = INDEX_RECOVERY_FAILURE_POLICY_KEY;
public static final int INDEXING_SUPPORTED_MAJOR_VERSION = VersionUtil
.encodeMaxPatchVersion(0, 94);
public static final int INDEXING_SUPPORTED__MIN_MAJOR_VERSION = VersionUtil
.encodeVersion("0.94.0");
private static final int INDEX_WAL_COMPRESSION_MINIMUM_SUPPORTED_VERSION = VersionUtil
.encodeVersion("0.94.9");
private static final int DEFAULT_ROWLOCK_WAIT_DURATION = 30000;
@Override
public Optional<RegionObserver> getRegionObserver() {
return Optional.of(this);
}
@Override
public void start(CoprocessorEnvironment e) throws IOException {
try {
final RegionCoprocessorEnvironment env = (RegionCoprocessorEnvironment) e;
String serverName = env.getServerName().getServerName();
if (env.getConfiguration().getBoolean(CHECK_VERSION_CONF_KEY, true)) {
// make sure the right version <-> combinations are allowed.
String errormsg = Indexer.validateVersion(env.getHBaseVersion(), env.getConfiguration());
if (errormsg != null) {
throw new FatalIndexBuildingFailureException(errormsg);
}
}
this.builder = new IndexBuildManager(env);
// Clone the config since it is shared
DelegateRegionCoprocessorEnvironment indexWriterEnv = new DelegateRegionCoprocessorEnvironment(env, ConnectionType.INDEX_WRITER_CONNECTION);
// setup the actual index writer
this.writer = new IndexWriter(indexWriterEnv, serverName + "-index-writer");
this.rowLockWaitDuration = env.getConfiguration().getInt("hbase.rowlock.wait.duration",
DEFAULT_ROWLOCK_WAIT_DURATION);
this.lockManager = new LockManager();
// Metrics impl for the Indexer -- avoiding unnecessary indirection for hadoop-1/2 compat
this.metricSource = MetricsIndexerSourceFactory.getInstance().getIndexerSource();
setSlowThresholds(e.getConfiguration());
try {
// get the specified failure policy. We only ever override it in tests, but we need to do it
// here
Class<? extends IndexFailurePolicy> policyClass =
env.getConfiguration().getClass(INDEX_RECOVERY_FAILURE_POLICY_KEY,
StoreFailuresInCachePolicy.class, IndexFailurePolicy.class);
IndexFailurePolicy policy =
policyClass.getConstructor(PerRegionIndexWriteCache.class).newInstance(failedIndexEdits);
LOGGER.debug("Setting up recovery writter with failure policy: " + policy.getClass());
recoveryWriter =
new RecoveryIndexWriter(policy, indexWriterEnv, serverName + "-recovery-writer");
} catch (Exception ex) {
throw new IOException("Could not instantiate recovery failure policy!", ex);
}
} catch (NoSuchMethodError ex) {
disabled = true;
LOGGER.error("Must be too early a version of HBase. Disabled coprocessor ", ex);
}
}
/**
* Extracts the slow call threshold values from the configuration.
*/
private void setSlowThresholds(Configuration c) {
slowIndexPrepareThreshold = c.getLong(INDEXER_INDEX_WRITE_SLOW_THRESHOLD_KEY,
INDEXER_INDEX_WRITE_SLOW_THRESHOLD_DEFAULT);
slowIndexWriteThreshold = c.getLong(INDEXER_INDEX_PREPARE_SLOW_THRESHOLD_KEY,
INDEXER_INDEX_PREPARE_SLOW_THREHSOLD_DEFAULT);
slowPreWALRestoreThreshold = c.getLong(INDEXER_PRE_WAL_RESTORE_SLOW_THRESHOLD_KEY,
INDEXER_PRE_WAL_RESTORE_SLOW_THRESHOLD_DEFAULT);
slowPostOpenThreshold = c.getLong(INDEXER_POST_OPEN_SLOW_THRESHOLD_KEY,
INDEXER_POST_OPEN_SLOW_THRESHOLD_DEFAULT);
slowPreIncrementThreshold = c.getLong(INDEXER_PRE_INCREMENT_SLOW_THRESHOLD_KEY,
INDEXER_PRE_INCREMENT_SLOW_THRESHOLD_DEFAULT);
}
private String getCallTooSlowMessage(String callName, long duration, long threshold) {
StringBuilder sb = new StringBuilder(64);
sb.append("(callTooSlow) ").append(callName).append(" duration=").append(duration);
sb.append("ms, threshold=").append(threshold).append("ms");
return sb.toString();
}
@Override
public void stop(CoprocessorEnvironment e) throws IOException {
if (this.stopped) {
return;
}
if (this.disabled) {
return;
}
this.stopped = true;
String msg = "Indexer is being stopped";
this.builder.stop(msg);
this.writer.stop(msg);
this.recoveryWriter.stop(msg);
}
/**
* We use an Increment to serialize the ON DUPLICATE KEY clause so that the HBase plumbing
* sets up the necessary locks and mvcc to allow an atomic update. The Increment is not a
* real increment, though, it's really more of a Put. We translate the Increment into a
* list of mutations, at most a single Put and Delete that are the changes upon executing
* the list of ON DUPLICATE KEY clauses for this row.
*/
@Override
public Result preIncrementAfterRowLock(final ObserverContext<RegionCoprocessorEnvironment> e,
final Increment inc) throws IOException {
long start = EnvironmentEdgeManager.currentTimeMillis();
try {
List<Mutation> mutations = this.builder.executeAtomicOp(inc);
if (mutations == null) {
return null;
}
// Causes the Increment to be ignored as we're committing the mutations
// ourselves below.
e.bypass();
// ON DUPLICATE KEY IGNORE will return empty list if row already exists
// as no action is required in that case.
if (!mutations.isEmpty()) {
Region region = e.getEnvironment().getRegion();
// Otherwise, submit the mutations directly here
region.batchMutate(mutations.toArray(new Mutation[0]));
}
return Result.EMPTY_RESULT;
} catch (Throwable t) {
throw ServerUtil.createIOException(
"Unable to process ON DUPLICATE IGNORE for " +
e.getEnvironment().getRegion().getRegionInfo().getTable().getNameAsString() +
"(" + Bytes.toStringBinary(inc.getRow()) + ")", t);
} finally {
long duration = EnvironmentEdgeManager.currentTimeMillis() - start;
if (duration >= slowIndexPrepareThreshold) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(getCallTooSlowMessage("preIncrementAfterRowLock",
duration, slowPreIncrementThreshold));
}
metricSource.incrementSlowDuplicateKeyCheckCalls();
}
metricSource.updateDuplicateKeyCheckTime(duration);
}
}
@Override
public void preBatchMutate(ObserverContext<RegionCoprocessorEnvironment> c,
MiniBatchOperationInProgress<Mutation> miniBatchOp) throws IOException {
if (this.disabled) {
return;
}
long start = EnvironmentEdgeManager.currentTimeMillis();
try {
preBatchMutateWithExceptions(c, miniBatchOp);
return;
} catch (Throwable t) {
rethrowIndexingException(t);
} finally {
long duration = EnvironmentEdgeManager.currentTimeMillis() - start;
if (duration >= slowIndexPrepareThreshold) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(getCallTooSlowMessage("preBatchMutate",
duration, slowIndexPrepareThreshold));
}
metricSource.incrementNumSlowIndexPrepareCalls();
}
metricSource.updateIndexPrepareTime(duration);
}
throw new RuntimeException(
"Somehow didn't return an index update but also didn't propagate the failure to the client!");
}
private static void setTimeStamp(KeyValue kv, byte[] tsBytes) {
int tsOffset = kv.getTimestampOffset();
System.arraycopy(tsBytes, 0, kv.getBuffer(), tsOffset, Bytes.SIZEOF_LONG);
}
public void preBatchMutateWithExceptions(ObserverContext<RegionCoprocessorEnvironment> c,
MiniBatchOperationInProgress<Mutation> miniBatchOp) throws Throwable {
// first group all the updates for a single row into a single update to be processed
Map<ImmutableBytesPtr, MultiMutation> mutationsMap =
new HashMap<ImmutableBytesPtr, MultiMutation>();
Durability defaultDurability = Durability.SYNC_WAL;
if(c.getEnvironment().getRegion() != null) {
defaultDurability = c.getEnvironment().getRegion().getTableDescriptor().getDurability();
defaultDurability = (defaultDurability == Durability.USE_DEFAULT) ?
Durability.SYNC_WAL : defaultDurability;
}
/*
* Exclusively lock all rows so we get a consistent read
* while determining the index updates
*/
BatchMutateContext context = new BatchMutateContext(this.builder.getIndexMetaData(miniBatchOp).getClientVersion());
setBatchMutateContext(c, context);
Durability durability = Durability.SKIP_WAL;
boolean copyMutations = false;
for (int i = 0; i < miniBatchOp.size(); i++) {
Mutation m = miniBatchOp.getOperation(i);
if (this.builder.isAtomicOp(m)) {
miniBatchOp.setOperationStatus(i, IGNORE);
continue;
}
if (this.builder.isEnabled(m)) {
context.rowLocks.add(lockManager.lockRow(m.getRow(), rowLockWaitDuration));
Durability effectiveDurablity = (m.getDurability() == Durability.USE_DEFAULT) ?
defaultDurability : m.getDurability();
if (effectiveDurablity.ordinal() > durability.ordinal()) {
durability = effectiveDurablity;
}
// Track whether or not we need to
ImmutableBytesPtr row = new ImmutableBytesPtr(m.getRow());
if (mutationsMap.containsKey(row)) {
copyMutations = true;
} else {
mutationsMap.put(row, null);
}
}
}
// early exit if it turns out we don't have any edits
if (mutationsMap.isEmpty()) {
return;
}
// If we're copying the mutations
Collection<Mutation> originalMutations;
Collection<? extends Mutation> mutations;
if (copyMutations) {
originalMutations = null;
mutations = mutationsMap.values();
} else {
originalMutations = Lists.newArrayListWithExpectedSize(mutationsMap.size());
mutations = originalMutations;
}
Mutation firstMutation = miniBatchOp.getOperation(0);
ReplayWrite replayWrite = this.builder.getReplayWrite(firstMutation);
boolean resetTimeStamp = replayWrite == null;
long now = EnvironmentEdgeManager.currentTimeMillis();
for (int i = 0; i < miniBatchOp.size(); i++) {
Mutation m = miniBatchOp.getOperation(i);
// skip this mutation if we aren't enabling indexing
// unfortunately, we really should ask if the raw mutation (rather than the combined mutation)
// should be indexed, which means we need to expose another method on the builder. Such is the
// way optimization go though.
if (miniBatchOp.getOperationStatus(i) != IGNORE && this.builder.isEnabled(m)) {
if (resetTimeStamp) {
// Unless we're replaying edits to rebuild the index, we update the time stamp
// of the data table to prevent overlapping time stamps (which prevents index
// inconsistencies as this case isn't handled correctly currently).
for (List<Cell> cells : m.getFamilyCellMap().values()) {
for (Cell cell : cells) {
CellUtil.setTimestamp(cell, now);
}
}
}
// No need to write the table mutations when we're rebuilding
// the index as they're already written and just being replayed.
if (replayWrite == ReplayWrite.INDEX_ONLY
|| replayWrite == ReplayWrite.REBUILD_INDEX_ONLY) {
miniBatchOp.setOperationStatus(i, NOWRITE);
}
// Only copy mutations if we found duplicate rows
// which only occurs when we're partially rebuilding
// the index (since we'll potentially have both a
// Put and a Delete mutation for the same row).
if (copyMutations) {
// Add the mutation to the batch set
ImmutableBytesPtr row = new ImmutableBytesPtr(m.getRow());
MultiMutation stored = mutationsMap.get(row);
// we haven't seen this row before, so add it
if (stored == null) {
stored = new MultiMutation(row);
mutationsMap.put(row, stored);
}
stored.addAll(m);
} else {
originalMutations.add(m);
}
}
}
// dump all the index updates into a single WAL. They will get combined in the end anyways, so
// don't worry which one we get
WALEdit edit = miniBatchOp.getWalEdit(0);
if (edit == null) {
edit = new WALEdit();
miniBatchOp.setWalEdit(0, edit);
}
if (copyMutations || replayWrite != null) {
mutations = IndexManagementUtil.flattenMutationsByTimestamp(mutations);
}
// get the current span, or just use a null-span to avoid a bunch of if statements
try (TraceScope scope = Trace.startSpan("Starting to build index updates")) {
Span current = scope.getSpan();
if (current == null) {
current = NullSpan.INSTANCE;
}
long start = EnvironmentEdgeManager.currentTimeMillis();
// get the index updates for all elements in this batch
Collection<Pair<Mutation, byte[]>> indexUpdates =
this.builder.getIndexUpdate(miniBatchOp, mutations);
long duration = EnvironmentEdgeManager.currentTimeMillis() - start;
if (duration >= slowIndexPrepareThreshold) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(getCallTooSlowMessage(
"indexPrepare", duration, slowIndexPrepareThreshold));
}
metricSource.incrementNumSlowIndexPrepareCalls();
}
metricSource.updateIndexPrepareTime(duration);
current.addTimelineAnnotation("Built index updates, doing preStep");
TracingUtils.addAnnotation(current, "index update count", indexUpdates.size());
byte[] tableName = c.getEnvironment().getRegion().getTableDescriptor().getTableName().getName();
Iterator<Pair<Mutation, byte[]>> indexUpdatesItr = indexUpdates.iterator();
List<Mutation> localUpdates = new ArrayList<Mutation>(indexUpdates.size());
while(indexUpdatesItr.hasNext()) {
Pair<Mutation, byte[]> next = indexUpdatesItr.next();
if (Bytes.compareTo(next.getSecond(), tableName) == 0) {
localUpdates.add(next.getFirst());
indexUpdatesItr.remove();
}
}
if (!localUpdates.isEmpty()) {
miniBatchOp.addOperationsFromCP(0,
localUpdates.toArray(new Mutation[localUpdates.size()]));
}
if (!indexUpdates.isEmpty()) {
context.indexUpdates = indexUpdates;
// write index updates to WAL
if (durability != Durability.SKIP_WAL) {
// we have all the WAL durability, so we just update the WAL entry and move on
for (Pair<Mutation, byte[]> entry : indexUpdates) {
edit.add(new IndexedKeyValue(entry.getSecond(), entry.getFirst()));
}
}
}
}
}
private void setBatchMutateContext(ObserverContext<RegionCoprocessorEnvironment> c, BatchMutateContext context) {
this.batchMutateContext.set(context);
}
private BatchMutateContext getBatchMutateContext(ObserverContext<RegionCoprocessorEnvironment> c) {
return this.batchMutateContext.get();
}
private void removeBatchMutateContext(ObserverContext<RegionCoprocessorEnvironment> c) {
this.batchMutateContext.remove();
}
@Override
public void postBatchMutateIndispensably(ObserverContext<RegionCoprocessorEnvironment> c,
MiniBatchOperationInProgress<Mutation> miniBatchOp, final boolean success) throws IOException {
if (this.disabled) {
return;
}
long start = EnvironmentEdgeManager.currentTimeMillis();
BatchMutateContext context = getBatchMutateContext(c);
if (context == null) {
return;
}
try {
for (RowLock rowLock : context.rowLocks) {
rowLock.release();
}
this.builder.batchCompleted(miniBatchOp);
if (success) { // if miniBatchOp was successfully written, write index updates
doPost(c, context);
}
} finally {
removeBatchMutateContext(c);
long duration = EnvironmentEdgeManager.currentTimeMillis() - start;
if (duration >= slowIndexWriteThreshold) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(getCallTooSlowMessage("postBatchMutateIndispensably",
duration, slowIndexWriteThreshold));
}
metricSource.incrementNumSlowIndexWriteCalls();
}
metricSource.updateIndexWriteTime(duration);
}
}
private void doPost(ObserverContext<RegionCoprocessorEnvironment> c, BatchMutateContext context) throws IOException {
try {
doPostWithExceptions(c,context);
return;
} catch (Throwable e) {
rethrowIndexingException(e);
}
throw new RuntimeException(
"Somehow didn't complete the index update, but didn't return succesfully either!");
}
private void doPostWithExceptions(ObserverContext<RegionCoprocessorEnvironment> c, BatchMutateContext context)
throws IOException {
//short circuit, if we don't need to do any work
if (context == null || context.indexUpdates.isEmpty()) {
return;
}
// get the current span, or just use a null-span to avoid a bunch of if statements
try (TraceScope scope = Trace.startSpan("Completing index writes")) {
Span current = scope.getSpan();
if (current == null) {
current = NullSpan.INSTANCE;
}
long start = EnvironmentEdgeManager.currentTimeMillis();
current.addTimelineAnnotation("Actually doing index update for first time");
writer.writeAndKillYourselfOnFailure(context.indexUpdates, false, context.clientVersion);
long duration = EnvironmentEdgeManager.currentTimeMillis() - start;
if (duration >= slowIndexWriteThreshold) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(getCallTooSlowMessage("indexWrite",
duration, slowIndexWriteThreshold));
}
metricSource.incrementNumSlowIndexWriteCalls();
}
metricSource.updateIndexWriteTime(duration);
}
}
/**
* Search the {@link WALEdit} for the first {@link IndexedKeyValue} present
* @param edit {@link WALEdit}
* @return the first {@link IndexedKeyValue} in the {@link WALEdit} or <tt>null</tt> if not
* present
*/
private IndexedKeyValue getFirstIndexedKeyValue(WALEdit edit) {
for (Cell kv : edit.getCells()) {
if (kv instanceof IndexedKeyValue) {
return (IndexedKeyValue) kv;
}
}
return null;
}
/**
* Extract the index updates from the WAL Edit
* @param edit to search for index updates
* @return the mutations to apply to the index tables
*/
private Collection<Pair<Mutation, byte[]>> extractIndexUpdate(WALEdit edit) {
// Avoid multiple internal array resizings. Initial size of 64, unless we have fewer cells in the edit
int initialSize = Math.min(edit.size(), 64);
Collection<Pair<Mutation, byte[]>> indexUpdates = new ArrayList<Pair<Mutation, byte[]>>(initialSize);
for (Cell kv : edit.getCells()) {
if (kv instanceof IndexedKeyValue) {
IndexedKeyValue ikv = (IndexedKeyValue) kv;
indexUpdates.add(new Pair<Mutation, byte[]>(ikv.getMutation(), ikv.getIndexTable()));
}
}
return indexUpdates;
}
@Override
public void postOpen(final ObserverContext<RegionCoprocessorEnvironment> c) {
Multimap<HTableInterfaceReference, Mutation> updates = failedIndexEdits.getEdits(c.getEnvironment().getRegion());
if (this.disabled) {
return;
}
long start = EnvironmentEdgeManager.currentTimeMillis();
try {
//if we have no pending edits to complete, then we are done
if (updates == null || updates.size() == 0) {
return;
}
LOGGER.info("Found some outstanding index updates that didn't succeed during"
+ " WAL replay - attempting to replay now.");
// do the usual writer stuff, killing the server again, if we can't manage to make the index
// writes succeed again
try {
writer.writeAndKillYourselfOnFailure(updates, true, ScanUtil.UNKNOWN_CLIENT_VERSION);
} catch (IOException e) {
LOGGER.error("During WAL replay of outstanding index updates, "
+ "Exception is thrown instead of killing server during index writing", e);
}
} finally {
long duration = EnvironmentEdgeManager.currentTimeMillis() - start;
if (duration >= slowPostOpenThreshold) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(getCallTooSlowMessage("postOpen", duration, slowPostOpenThreshold));
}
metricSource.incrementNumSlowPostOpenCalls();
}
metricSource.updatePostOpenTime(duration);
}
}
@Override
public void preWALRestore(
org.apache.hadoop.hbase.coprocessor.ObserverContext<? extends RegionCoprocessorEnvironment> ctx,
org.apache.hadoop.hbase.client.RegionInfo info, org.apache.hadoop.hbase.wal.WALKey logKey, WALEdit logEdit)
throws IOException {
if (this.disabled) {
return;
}
// TODO check the regions in transition. If the server on which the region lives is this one,
// then we should rety that write later in postOpen.
// we might be able to get even smarter here and pre-split the edits that are server-local
// into their own recovered.edits file. This then lets us do a straightforward recovery of each
// region (and more efficiently as we aren't writing quite as hectically from this one place).
long start = EnvironmentEdgeManager.currentTimeMillis();
try {
/*
* Basically, we let the index regions recover for a little while long before retrying in the
* hopes they come up before the primary table finishes.
*/
Collection<Pair<Mutation, byte[]>> indexUpdates = extractIndexUpdate(logEdit);
recoveryWriter.writeAndKillYourselfOnFailure(indexUpdates, true, ScanUtil.UNKNOWN_CLIENT_VERSION);
} finally {
long duration = EnvironmentEdgeManager.currentTimeMillis() - start;
if (duration >= slowPreWALRestoreThreshold) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(getCallTooSlowMessage("preWALRestore",
duration, slowPreWALRestoreThreshold));
}
metricSource.incrementNumSlowPreWALRestoreCalls();
}
metricSource.updatePreWALRestoreTime(duration);
}
}
/**
* Exposed for testing!
* @return the currently instantiated index builder
*/
public IndexBuilder getBuilderForTesting() {
return this.builder.getBuilderForTesting();
}
/**
* Validate that the version and configuration parameters are supported
* @param hbaseVersion current version of HBase on which <tt>this</tt> coprocessor is installed
* @param conf configuration to check for allowed parameters (e.g. WAL Compression only if >=
* 0.94.9)
* @return <tt>null</tt> if the version is supported, the error message to display otherwise
*/
public static String validateVersion(String hbaseVersion, Configuration conf) {
int encodedVersion = VersionUtil.encodeVersion(hbaseVersion);
// above 0.94 everything should be supported
if (encodedVersion > INDEXING_SUPPORTED_MAJOR_VERSION) {
return null;
}
// check to see if its at least 0.94
if (encodedVersion < INDEXING_SUPPORTED__MIN_MAJOR_VERSION) {
return "Indexing not supported for versions older than 0.94.X";
}
// if less than 0.94.9, we need to check if WAL Compression is enabled
if (encodedVersion < INDEX_WAL_COMPRESSION_MINIMUM_SUPPORTED_VERSION) {
if (conf.getBoolean(HConstants.ENABLE_WAL_COMPRESSION, false)) {
return "Indexing not supported with WAL Compression for versions of HBase older than 0.94.9 - found version:"
+ hbaseVersion;
}
}
return null;
}
/**
* Enable indexing on the given table
* @param desc {@link TableDescriptor} for the table on which indexing should be enabled
* @param builder class to use when building the index for this table
* @param properties map of custom configuration options to make available to your
* {@link IndexBuilder} on the server-side
* @param priority TODO
* @throws IOException the Indexer coprocessor cannot be added
*/
public static void enableIndexing(TableDescriptorBuilder descBuilder, Class<? extends IndexBuilder> builder,
Map<String, String> properties, int priority) throws IOException {
if (properties == null) {
properties = new HashMap<String, String>();
}
properties.put(Indexer.INDEX_BUILDER_CONF_KEY, builder.getName());
descBuilder.addCoprocessor(Indexer.class.getName(), null, priority, properties);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.store.pcapng;
import org.apache.drill.common.types.TypeProtos;
import org.apache.drill.common.types.Types;
import org.apache.drill.exec.record.MaterializedField;
import org.apache.drill.exec.record.metadata.TupleMetadata;
import org.apache.drill.exec.record.metadata.TupleSchema;
import org.apache.drill.test.ClusterFixture;
import org.apache.drill.test.ClusterTest;
import org.apache.drill.exec.physical.rowSet.RowSet;
import org.apache.drill.exec.physical.rowSet.RowSetBuilder;
import org.apache.drill.test.rowSet.RowSetComparison;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.IOException;
import java.nio.file.Paths;
public class TestPcapngHeaders extends ClusterTest {
@BeforeClass
public static void setupTestFiles() throws Exception {
startCluster(ClusterFixture.builder(dirTestWatcher).maxParallelization(1));
dirTestWatcher.copyResourceToRoot(Paths.get("store", "pcapng"));
}
@Test
public void testValidHeadersForStarQuery() throws IOException {
String query = "select * from dfs.`store/pcapng/sniff.pcapng`";
RowSet actual = client.queryBuilder().sql(query).rowSet();
TupleMetadata expectedSchema = new TupleSchema();
expectedSchema.add(MaterializedField.create("tcp_flags_ece_ecn_capable", Types.optional(TypeProtos.MinorType.INT)));
expectedSchema.add(MaterializedField.create("tcp_flags_ece_congestion_experienced", Types.optional(TypeProtos.MinorType.INT)));
expectedSchema.add(MaterializedField.create("tcp_flags_psh", Types.optional(TypeProtos.MinorType.INT)));
expectedSchema.add(MaterializedField.create("type", Types.optional(TypeProtos.MinorType.VARCHAR)));
expectedSchema.add(MaterializedField.create("tcp_flags_cwr", Types.optional(TypeProtos.MinorType.INT)));
expectedSchema.add(MaterializedField.create("dst_ip", Types.optional(TypeProtos.MinorType.VARCHAR)));
expectedSchema.add(MaterializedField.create("src_ip", Types.optional(TypeProtos.MinorType.VARCHAR)));
expectedSchema.add(MaterializedField.create("tcp_flags_fin", Types.optional(TypeProtos.MinorType.INT)));
expectedSchema.add(MaterializedField.create("tcp_flags_ece", Types.optional(TypeProtos.MinorType.INT)));
expectedSchema.add(MaterializedField.create("tcp_flags", Types.optional(TypeProtos.MinorType.INT)));
expectedSchema.add(MaterializedField.create("tcp_flags_ack", Types.optional(TypeProtos.MinorType.INT)));
expectedSchema.add(MaterializedField.create("src_mac_address", Types.optional(TypeProtos.MinorType.VARCHAR)));
expectedSchema.add(MaterializedField.create("tcp_flags_syn", Types.optional(TypeProtos.MinorType.INT)));
expectedSchema.add(MaterializedField.create("tcp_flags_rst", Types.optional(TypeProtos.MinorType.INT)));
expectedSchema.add(MaterializedField.create("timestamp", Types.required(TypeProtos.MinorType.TIMESTAMP)));
expectedSchema.add(MaterializedField.create("tcp_session", Types.optional(TypeProtos.MinorType.BIGINT)));
expectedSchema.add(MaterializedField.create("packet_data", Types.optional(TypeProtos.MinorType.VARCHAR)));
expectedSchema.add(MaterializedField.create("tcp_parsed_flags", Types.optional(TypeProtos.MinorType.VARCHAR)));
expectedSchema.add(MaterializedField.create("tcp_flags_ns", Types.optional(TypeProtos.MinorType.INT)));
expectedSchema.add(MaterializedField.create("src_port", Types.optional(TypeProtos.MinorType.INT)));
expectedSchema.add(MaterializedField.create("packet_length", Types.required(TypeProtos.MinorType.INT)));
expectedSchema.add(MaterializedField.create("tcp_flags_urg", Types.optional(TypeProtos.MinorType.INT)));
expectedSchema.add(MaterializedField.create("tcp_ack", Types.optional(TypeProtos.MinorType.INT)));
expectedSchema.add(MaterializedField.create("dst_port", Types.optional(TypeProtos.MinorType.INT)));
expectedSchema.add(MaterializedField.create("dst_mac_address", Types.optional(TypeProtos.MinorType.VARCHAR)));
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
.build();
new RowSetComparison(expected)
.verifyAndClearAll(actual);
}
@Test
public void testValidHeadersForProjection() throws IOException {
String query = "select sRc_ip, dst_IP, dst_mAc_address, src_Port, tcp_session, `Timestamp` from dfs.`store/pcapng/sniff.pcapng`";
RowSet actual = client.queryBuilder().sql(query).rowSet();
TupleMetadata expectedSchema = new TupleSchema();
expectedSchema.add(MaterializedField.create("sRc_ip", Types.optional(TypeProtos.MinorType.VARCHAR)));
expectedSchema.add(MaterializedField.create("dst_IP", Types.optional(TypeProtos.MinorType.VARCHAR)));
expectedSchema.add(MaterializedField.create("dst_mAc_address", Types.optional(TypeProtos.MinorType.VARCHAR)));
expectedSchema.add(MaterializedField.create("src_Port", Types.optional(TypeProtos.MinorType.INT)));
expectedSchema.add(MaterializedField.create("tcp_session", Types.optional(TypeProtos.MinorType.BIGINT)));
expectedSchema.add(MaterializedField.create("Timestamp", Types.required(TypeProtos.MinorType.TIMESTAMP)));
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
.build();
new RowSetComparison(expected)
.verifyAndClearAll(actual);
}
@Test
public void testValidHeadersForMissColumns() throws IOException {
String query = "select `timestamp`, `name`, `color` from dfs.`store/pcapng/sniff.pcapng`";
RowSet actual = client.queryBuilder().sql(query).rowSet();
TupleMetadata expectedSchema = new TupleSchema();
expectedSchema.add(MaterializedField.create("timestamp", Types.required(TypeProtos.MinorType.TIMESTAMP)));
expectedSchema.add(MaterializedField.create("name", Types.optional(TypeProtos.MinorType.INT)));
expectedSchema.add(MaterializedField.create("color", Types.optional(TypeProtos.MinorType.INT)));
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
.build();
new RowSetComparison(expected)
.verifyAndClearAll(actual);
}
@Test
public void testMixColumns() throws IOException {
String query = "select src_ip, dst_ip, dst_mac_address, src_port, tcp_session, `timestamp` from dfs.`store/pcapng/sniff.pcapng`";
RowSet actual = client.queryBuilder().sql(query).rowSet();
TupleMetadata expectedSchema = new TupleSchema();
expectedSchema.add(MaterializedField.create("sRc_ip", Types.optional(TypeProtos.MinorType.VARCHAR)));
expectedSchema.add(MaterializedField.create("dst_IP", Types.optional(TypeProtos.MinorType.VARCHAR)));
expectedSchema.add(MaterializedField.create("dst_mAc_address", Types.optional(TypeProtos.MinorType.VARCHAR)));
expectedSchema.add(MaterializedField.create("src_Port", Types.optional(TypeProtos.MinorType.INT)));
expectedSchema.add(MaterializedField.create("tcp_session", Types.optional(TypeProtos.MinorType.BIGINT)));
expectedSchema.add(MaterializedField.create("Timestamp", Types.required(TypeProtos.MinorType.TIMESTAMP)));
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
.build();
new RowSetComparison(expected)
.verifyAndClearAll(actual);
String queryWithDiffOrder = "select `timestamp`, src_ip, dst_ip, src_port, tcp_session, dst_mac_address from dfs.`store/pcapng/sniff.pcapng`";
actual = client.queryBuilder().sql(queryWithDiffOrder).rowSet();
expectedSchema = new TupleSchema();
expectedSchema.add(MaterializedField.create("timestamp", Types.required(TypeProtos.MinorType.TIMESTAMP)));
expectedSchema.add(MaterializedField.create("src_ip", Types.optional(TypeProtos.MinorType.VARCHAR)));
expectedSchema.add(MaterializedField.create("dst_ip", Types.optional(TypeProtos.MinorType.VARCHAR)));
expectedSchema.add(MaterializedField.create("src_port", Types.optional(TypeProtos.MinorType.INT)));
expectedSchema.add(MaterializedField.create("tcp_session", Types.optional(TypeProtos.MinorType.BIGINT)));
expectedSchema.add(MaterializedField.create("dst_mac_address", Types.optional(TypeProtos.MinorType.VARCHAR)));
expected = new RowSetBuilder(client.allocator(), expectedSchema)
.build();
new RowSetComparison(expected)
.verifyAndClearAll(actual);
}
@Test
public void testValidHeaderForArrayColumns() throws IOException {
// query with non-existent field
String query = "select arr[3] as arr from dfs.`store/pcapng/sniff.pcapng`";
RowSet actual = client.queryBuilder().sql(query).rowSet();
TupleMetadata expectedSchema = new TupleSchema();
expectedSchema.add(MaterializedField.create("arr", Types.optional(TypeProtos.MinorType.INT)));
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
.build();
new RowSetComparison(expected)
.verifyAndClearAll(actual);
// query with an existent field which doesn't support arrays
query = "select type[45] as arr from dfs.`store/pcapng/sniff.pcapng`";
expectedSchema = new TupleSchema();
actual = client.queryBuilder().sql(query).rowSet();
expectedSchema.add(MaterializedField.create("arr", Types.optional(TypeProtos.MinorType.INT)));
expected = new RowSetBuilder(client.allocator(), expectedSchema)
.build();
new RowSetComparison(expected)
.verifyAndClearAll(actual);
}
@Test
public void testValidHeaderForNestedColumns() throws IOException {
// query with non-existent field
String query = "select top['nested'] as nested from dfs.`store/pcapng/sniff.pcapng`";
RowSet actual = client.queryBuilder().sql(query).rowSet();
TupleMetadata expectedSchema = new TupleSchema();
expectedSchema.add(MaterializedField.create("nested", Types.optional(TypeProtos.MinorType.INT)));
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
.build();
new RowSetComparison(expected)
.verifyAndClearAll(actual);
// query with an existent field which doesn't support nesting
query = "select type['nested'] as nested from dfs.`store/pcapng/sniff.pcapng`";
expectedSchema = new TupleSchema();
actual = client.queryBuilder().sql(query).rowSet();
expectedSchema.add(MaterializedField.create("nested", Types.optional(TypeProtos.MinorType.INT)));
expected = new RowSetBuilder(client.allocator(), expectedSchema)
.build();
new RowSetComparison(expected)
.verifyAndClearAll(actual);
}
}
|
|
/**
*/
package etlMetaModel.impl;
import etlMetaModel.EtlMetaModelPackage;
import etlMetaModel.MofAssociation;
import etlMetaModel.MofClass;
import etlMetaModel.MofMultiplicityElement;
import etlMetaModel.MofProperty;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.util.EcoreUtil;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Mof Property</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link etlMetaModel.impl.MofPropertyImpl#getUpper <em>Upper</em>}</li>
* <li>{@link etlMetaModel.impl.MofPropertyImpl#getLower <em>Lower</em>}</li>
* <li>{@link etlMetaModel.impl.MofPropertyImpl#getClazz <em>Clazz</em>}</li>
* <li>{@link etlMetaModel.impl.MofPropertyImpl#getAssociation <em>Association</em>}</li>
* <li>{@link etlMetaModel.impl.MofPropertyImpl#getIsComposite <em>Is Composite</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class MofPropertyImpl extends MofTypedElementImpl implements MofProperty {
/**
* The default value of the '{@link #getUpper() <em>Upper</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getUpper()
* @generated
* @ordered
*/
protected static final Integer UPPER_EDEFAULT = null;
/**
* The cached value of the '{@link #getUpper() <em>Upper</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getUpper()
* @generated
* @ordered
*/
protected Integer upper = UPPER_EDEFAULT;
/**
* The default value of the '{@link #getLower() <em>Lower</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getLower()
* @generated
* @ordered
*/
protected static final Integer LOWER_EDEFAULT = null;
/**
* The cached value of the '{@link #getLower() <em>Lower</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getLower()
* @generated
* @ordered
*/
protected Integer lower = LOWER_EDEFAULT;
/**
* The cached value of the '{@link #getAssociation() <em>Association</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getAssociation()
* @generated
* @ordered
*/
protected MofAssociation association;
/**
* The default value of the '{@link #getIsComposite() <em>Is Composite</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getIsComposite()
* @generated
* @ordered
*/
protected static final Boolean IS_COMPOSITE_EDEFAULT = null;
/**
* The cached value of the '{@link #getIsComposite() <em>Is Composite</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getIsComposite()
* @generated
* @ordered
*/
protected Boolean isComposite = IS_COMPOSITE_EDEFAULT;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public MofPropertyImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return EtlMetaModelPackage.Literals.MOF_PROPERTY;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Integer getUpper() {
return upper;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setUpper(Integer newUpper) {
Integer oldUpper = upper;
upper = newUpper;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, EtlMetaModelPackage.MOF_PROPERTY__UPPER, oldUpper, upper));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Integer getLower() {
return lower;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setLower(Integer newLower) {
Integer oldLower = lower;
lower = newLower;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, EtlMetaModelPackage.MOF_PROPERTY__LOWER, oldLower, lower));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public MofClass getClazz() {
if (eContainerFeatureID() != EtlMetaModelPackage.MOF_PROPERTY__CLAZZ) return null;
return (MofClass)eInternalContainer();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetClazz(MofClass newClazz, NotificationChain msgs) {
msgs = eBasicSetContainer((InternalEObject)newClazz, EtlMetaModelPackage.MOF_PROPERTY__CLAZZ, msgs);
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setClazz(MofClass newClazz) {
if (newClazz != eInternalContainer() || (eContainerFeatureID() != EtlMetaModelPackage.MOF_PROPERTY__CLAZZ && newClazz != null)) {
if (EcoreUtil.isAncestor(this, newClazz))
throw new IllegalArgumentException("Recursive containment not allowed for " + toString());
NotificationChain msgs = null;
if (eInternalContainer() != null)
msgs = eBasicRemoveFromContainer(msgs);
if (newClazz != null)
msgs = ((InternalEObject)newClazz).eInverseAdd(this, EtlMetaModelPackage.MOF_CLASS__OWNED_PROPERTIES, MofClass.class, msgs);
msgs = basicSetClazz(newClazz, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, EtlMetaModelPackage.MOF_PROPERTY__CLAZZ, newClazz, newClazz));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public MofAssociation getAssociation() {
if (association != null && association.eIsProxy()) {
InternalEObject oldAssociation = (InternalEObject)association;
association = (MofAssociation)eResolveProxy(oldAssociation);
if (association != oldAssociation) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, EtlMetaModelPackage.MOF_PROPERTY__ASSOCIATION, oldAssociation, association));
}
}
return association;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public MofAssociation basicGetAssociation() {
return association;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetAssociation(MofAssociation newAssociation, NotificationChain msgs) {
MofAssociation oldAssociation = association;
association = newAssociation;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EtlMetaModelPackage.MOF_PROPERTY__ASSOCIATION, oldAssociation, newAssociation);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setAssociation(MofAssociation newAssociation) {
if (newAssociation != association) {
NotificationChain msgs = null;
if (association != null)
msgs = ((InternalEObject)association).eInverseRemove(this, EtlMetaModelPackage.MOF_ASSOCIATION__MEMBER_ENDS, MofAssociation.class, msgs);
if (newAssociation != null)
msgs = ((InternalEObject)newAssociation).eInverseAdd(this, EtlMetaModelPackage.MOF_ASSOCIATION__MEMBER_ENDS, MofAssociation.class, msgs);
msgs = basicSetAssociation(newAssociation, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, EtlMetaModelPackage.MOF_PROPERTY__ASSOCIATION, newAssociation, newAssociation));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Boolean getIsComposite() {
return isComposite;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setIsComposite(Boolean newIsComposite) {
Boolean oldIsComposite = isComposite;
isComposite = newIsComposite;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, EtlMetaModelPackage.MOF_PROPERTY__IS_COMPOSITE, oldIsComposite, isComposite));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case EtlMetaModelPackage.MOF_PROPERTY__CLAZZ:
if (eInternalContainer() != null)
msgs = eBasicRemoveFromContainer(msgs);
return basicSetClazz((MofClass)otherEnd, msgs);
case EtlMetaModelPackage.MOF_PROPERTY__ASSOCIATION:
if (association != null)
msgs = ((InternalEObject)association).eInverseRemove(this, EtlMetaModelPackage.MOF_ASSOCIATION__MEMBER_ENDS, MofAssociation.class, msgs);
return basicSetAssociation((MofAssociation)otherEnd, msgs);
}
return super.eInverseAdd(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case EtlMetaModelPackage.MOF_PROPERTY__CLAZZ:
return basicSetClazz(null, msgs);
case EtlMetaModelPackage.MOF_PROPERTY__ASSOCIATION:
return basicSetAssociation(null, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eBasicRemoveFromContainerFeature(NotificationChain msgs) {
switch (eContainerFeatureID()) {
case EtlMetaModelPackage.MOF_PROPERTY__CLAZZ:
return eInternalContainer().eInverseRemove(this, EtlMetaModelPackage.MOF_CLASS__OWNED_PROPERTIES, MofClass.class, msgs);
}
return super.eBasicRemoveFromContainerFeature(msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case EtlMetaModelPackage.MOF_PROPERTY__UPPER:
return getUpper();
case EtlMetaModelPackage.MOF_PROPERTY__LOWER:
return getLower();
case EtlMetaModelPackage.MOF_PROPERTY__CLAZZ:
return getClazz();
case EtlMetaModelPackage.MOF_PROPERTY__ASSOCIATION:
if (resolve) return getAssociation();
return basicGetAssociation();
case EtlMetaModelPackage.MOF_PROPERTY__IS_COMPOSITE:
return getIsComposite();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case EtlMetaModelPackage.MOF_PROPERTY__UPPER:
setUpper((Integer)newValue);
return;
case EtlMetaModelPackage.MOF_PROPERTY__LOWER:
setLower((Integer)newValue);
return;
case EtlMetaModelPackage.MOF_PROPERTY__CLAZZ:
setClazz((MofClass)newValue);
return;
case EtlMetaModelPackage.MOF_PROPERTY__ASSOCIATION:
setAssociation((MofAssociation)newValue);
return;
case EtlMetaModelPackage.MOF_PROPERTY__IS_COMPOSITE:
setIsComposite((Boolean)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case EtlMetaModelPackage.MOF_PROPERTY__UPPER:
setUpper(UPPER_EDEFAULT);
return;
case EtlMetaModelPackage.MOF_PROPERTY__LOWER:
setLower(LOWER_EDEFAULT);
return;
case EtlMetaModelPackage.MOF_PROPERTY__CLAZZ:
setClazz((MofClass)null);
return;
case EtlMetaModelPackage.MOF_PROPERTY__ASSOCIATION:
setAssociation((MofAssociation)null);
return;
case EtlMetaModelPackage.MOF_PROPERTY__IS_COMPOSITE:
setIsComposite(IS_COMPOSITE_EDEFAULT);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case EtlMetaModelPackage.MOF_PROPERTY__UPPER:
return UPPER_EDEFAULT == null ? upper != null : !UPPER_EDEFAULT.equals(upper);
case EtlMetaModelPackage.MOF_PROPERTY__LOWER:
return LOWER_EDEFAULT == null ? lower != null : !LOWER_EDEFAULT.equals(lower);
case EtlMetaModelPackage.MOF_PROPERTY__CLAZZ:
return getClazz() != null;
case EtlMetaModelPackage.MOF_PROPERTY__ASSOCIATION:
return association != null;
case EtlMetaModelPackage.MOF_PROPERTY__IS_COMPOSITE:
return IS_COMPOSITE_EDEFAULT == null ? isComposite != null : !IS_COMPOSITE_EDEFAULT.equals(isComposite);
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public int eBaseStructuralFeatureID(int derivedFeatureID, Class<?> baseClass) {
if (baseClass == MofMultiplicityElement.class) {
switch (derivedFeatureID) {
case EtlMetaModelPackage.MOF_PROPERTY__UPPER: return EtlMetaModelPackage.MOF_MULTIPLICITY_ELEMENT__UPPER;
case EtlMetaModelPackage.MOF_PROPERTY__LOWER: return EtlMetaModelPackage.MOF_MULTIPLICITY_ELEMENT__LOWER;
default: return -1;
}
}
return super.eBaseStructuralFeatureID(derivedFeatureID, baseClass);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public int eDerivedStructuralFeatureID(int baseFeatureID, Class<?> baseClass) {
if (baseClass == MofMultiplicityElement.class) {
switch (baseFeatureID) {
case EtlMetaModelPackage.MOF_MULTIPLICITY_ELEMENT__UPPER: return EtlMetaModelPackage.MOF_PROPERTY__UPPER;
case EtlMetaModelPackage.MOF_MULTIPLICITY_ELEMENT__LOWER: return EtlMetaModelPackage.MOF_PROPERTY__LOWER;
default: return -1;
}
}
return super.eDerivedStructuralFeatureID(baseFeatureID, baseClass);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (upper: ");
result.append(upper);
result.append(", lower: ");
result.append(lower);
result.append(", isComposite: ");
result.append(isComposite);
result.append(')');
return result.toString();
}
} //MofPropertyImpl
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.discovery.local;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.cluster.Diff;
import org.elasticsearch.cluster.IncompatibleClusterStateVersionException;
import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.RoutingService;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.discovery.AckClusterStatePublishResponseHandler;
import org.elasticsearch.discovery.BlockingClusterStatePublishResponseHandler;
import org.elasticsearch.discovery.Discovery;
import org.elasticsearch.discovery.DiscoverySettings;
import org.elasticsearch.discovery.DiscoveryStats;
import java.util.HashSet;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.elasticsearch.cluster.ClusterState.Builder;
/**
*
*/
public class LocalDiscovery extends AbstractLifecycleComponent<Discovery> implements Discovery {
private static final LocalDiscovery[] NO_MEMBERS = new LocalDiscovery[0];
private final ClusterService clusterService;
private RoutingService routingService;
private final ClusterName clusterName;
private final DiscoverySettings discoverySettings;
private volatile boolean master = false;
private final AtomicBoolean initialStateSent = new AtomicBoolean();
private static final ConcurrentMap<ClusterName, ClusterGroup> clusterGroups = ConcurrentCollections.newConcurrentMap();
private volatile ClusterState lastProcessedClusterState;
@Inject
public LocalDiscovery(Settings settings, ClusterService clusterService, ClusterSettings clusterSettings) {
super(settings);
this.clusterName = clusterService.getClusterName();
this.clusterService = clusterService;
this.discoverySettings = new DiscoverySettings(settings, clusterSettings);
}
@Override
public void setRoutingService(RoutingService routingService) {
this.routingService = routingService;
}
@Override
protected void doStart() {
}
@Override
public void startInitialJoin() {
synchronized (clusterGroups) {
ClusterGroup clusterGroup = clusterGroups.get(clusterName);
if (clusterGroup == null) {
clusterGroup = new ClusterGroup();
clusterGroups.put(clusterName, clusterGroup);
}
logger.debug("Connected to cluster [{}]", clusterName);
clusterGroup.members().add(this);
LocalDiscovery firstMaster = null;
for (LocalDiscovery localDiscovery : clusterGroup.members()) {
if (localDiscovery.localNode().isMasterNode()) {
firstMaster = localDiscovery;
break;
}
}
if (firstMaster != null && firstMaster.equals(this)) {
// we are the first master (and the master)
master = true;
final LocalDiscovery master = firstMaster;
clusterService.submitStateUpdateTask("local-disco-initial_connect(master)", new ClusterStateUpdateTask() {
@Override
public boolean runOnlyOnMaster() {
return false;
}
@Override
public ClusterState execute(ClusterState currentState) {
DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder();
for (LocalDiscovery discovery : clusterGroups.get(clusterName).members()) {
nodesBuilder.put(discovery.localNode());
}
nodesBuilder.localNodeId(master.localNode().getId()).masterNodeId(master.localNode().getId());
// remove the NO_MASTER block in this case
ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()).removeGlobalBlock(discoverySettings.getNoMasterBlock());
return ClusterState.builder(currentState).nodes(nodesBuilder).blocks(blocks).build();
}
@Override
public void onFailure(String source, Throwable t) {
logger.error("unexpected failure during [{}]", t, source);
}
});
} else if (firstMaster != null) {
// tell the master to send the fact that we are here
final LocalDiscovery master = firstMaster;
firstMaster.clusterService.submitStateUpdateTask("local-disco-receive(from node[" + localNode() + "])", new ClusterStateUpdateTask() {
@Override
public boolean runOnlyOnMaster() {
return false;
}
@Override
public ClusterState execute(ClusterState currentState) {
DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder();
for (LocalDiscovery discovery : clusterGroups.get(clusterName).members()) {
nodesBuilder.put(discovery.localNode());
}
nodesBuilder.localNodeId(master.localNode().getId()).masterNodeId(master.localNode().getId());
return ClusterState.builder(currentState).nodes(nodesBuilder).build();
}
@Override
public void onFailure(String source, Throwable t) {
logger.error("unexpected failure during [{}]", t, source);
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
// we reroute not in the same cluster state update since in certain areas we rely on
// the node to be in the cluster state (sampled from ClusterService#state) to be there, also
// shard transitions need to better be handled in such cases
master.routingService.reroute("post_node_add");
}
});
}
} // else, no master node, the next node that will start will fill things in...
}
@Override
protected void doStop() {
synchronized (clusterGroups) {
ClusterGroup clusterGroup = clusterGroups.get(clusterName);
if (clusterGroup == null) {
logger.warn("Illegal state, should not have an empty cluster group when stopping, I should be there at teh very least...");
return;
}
clusterGroup.members().remove(this);
if (clusterGroup.members().isEmpty()) {
// no more members, remove and return
clusterGroups.remove(clusterName);
return;
}
LocalDiscovery firstMaster = null;
for (LocalDiscovery localDiscovery : clusterGroup.members()) {
if (localDiscovery.localNode().isMasterNode()) {
firstMaster = localDiscovery;
break;
}
}
if (firstMaster != null) {
// if the removed node is the master, make the next one as the master
if (master) {
firstMaster.master = true;
}
final Set<String> newMembers = new HashSet<>();
for (LocalDiscovery discovery : clusterGroup.members()) {
newMembers.add(discovery.localNode().getId());
}
final LocalDiscovery master = firstMaster;
master.clusterService.submitStateUpdateTask("local-disco-update", new ClusterStateUpdateTask() {
@Override
public boolean runOnlyOnMaster() {
return false;
}
@Override
public ClusterState execute(ClusterState currentState) {
DiscoveryNodes newNodes = currentState.nodes().removeDeadMembers(newMembers, master.localNode().getId());
DiscoveryNodes.Delta delta = newNodes.delta(currentState.nodes());
if (delta.added()) {
logger.warn("No new nodes should be created when a new discovery view is accepted");
}
// reroute here, so we eagerly remove dead nodes from the routing
ClusterState updatedState = ClusterState.builder(currentState).nodes(newNodes).build();
RoutingAllocation.Result routingResult = master.routingService.getAllocationService().reroute(
ClusterState.builder(updatedState).build(), "elected as master");
return ClusterState.builder(updatedState).routingResult(routingResult).build();
}
@Override
public void onFailure(String source, Throwable t) {
logger.error("unexpected failure during [{}]", t, source);
}
});
}
}
}
@Override
protected void doClose() {
}
@Override
public DiscoveryNode localNode() {
return clusterService.localNode();
}
@Override
public String nodeDescription() {
return clusterName.value() + "/" + localNode().getId();
}
@Override
public void publish(ClusterChangedEvent clusterChangedEvent, final Discovery.AckListener ackListener) {
if (!master) {
throw new IllegalStateException("Shouldn't publish state when not master");
}
LocalDiscovery[] members = members();
if (members.length > 0) {
Set<DiscoveryNode> nodesToPublishTo = new HashSet<>(members.length);
for (LocalDiscovery localDiscovery : members) {
if (localDiscovery.master) {
continue;
}
nodesToPublishTo.add(localDiscovery.localNode());
}
publish(members, clusterChangedEvent, new AckClusterStatePublishResponseHandler(nodesToPublishTo, ackListener));
}
}
@Override
public DiscoveryStats stats() {
return new DiscoveryStats(null);
}
@Override
public DiscoverySettings getDiscoverySettings() {
return discoverySettings;
}
@Override
public int getMinimumMasterNodes() {
return -1;
}
private LocalDiscovery[] members() {
ClusterGroup clusterGroup = clusterGroups.get(clusterName);
if (clusterGroup == null) {
return NO_MEMBERS;
}
Queue<LocalDiscovery> members = clusterGroup.members();
return members.toArray(new LocalDiscovery[members.size()]);
}
private void publish(LocalDiscovery[] members, ClusterChangedEvent clusterChangedEvent, final BlockingClusterStatePublishResponseHandler publishResponseHandler) {
try {
// we do the marshaling intentionally, to check it works well...
byte[] clusterStateBytes = null;
byte[] clusterStateDiffBytes = null;
ClusterState clusterState = clusterChangedEvent.state();
for (final LocalDiscovery discovery : members) {
if (discovery.master) {
continue;
}
ClusterState newNodeSpecificClusterState = null;
synchronized (this) {
// we do the marshaling intentionally, to check it works well...
// check if we published cluster state at least once and node was in the cluster when we published cluster state the last time
if (discovery.lastProcessedClusterState != null && clusterChangedEvent.previousState().nodes().nodeExists(discovery.localNode().getId())) {
// both conditions are true - which means we can try sending cluster state as diffs
if (clusterStateDiffBytes == null) {
Diff diff = clusterState.diff(clusterChangedEvent.previousState());
BytesStreamOutput os = new BytesStreamOutput();
diff.writeTo(os);
clusterStateDiffBytes = os.bytes().toBytes();
}
try {
newNodeSpecificClusterState = discovery.lastProcessedClusterState.readDiffFrom(StreamInput.wrap(clusterStateDiffBytes)).apply(discovery.lastProcessedClusterState);
logger.trace("sending diff cluster state version [{}] with size {} to [{}]", clusterState.version(), clusterStateDiffBytes.length, discovery.localNode().getName());
} catch (IncompatibleClusterStateVersionException ex) {
logger.warn("incompatible cluster state version [{}] - resending complete cluster state", ex, clusterState.version());
}
}
if (newNodeSpecificClusterState == null) {
if (clusterStateBytes == null) {
clusterStateBytes = Builder.toBytes(clusterState);
}
newNodeSpecificClusterState = ClusterState.Builder.fromBytes(clusterStateBytes, discovery.localNode());
}
discovery.lastProcessedClusterState = newNodeSpecificClusterState;
}
final ClusterState nodeSpecificClusterState = newNodeSpecificClusterState;
nodeSpecificClusterState.status(ClusterState.ClusterStateStatus.RECEIVED);
// ignore cluster state messages that do not include "me", not in the game yet...
if (nodeSpecificClusterState.nodes().getLocalNode() != null) {
assert nodeSpecificClusterState.nodes().getMasterNode() != null : "received a cluster state without a master";
assert !nodeSpecificClusterState.blocks().hasGlobalBlock(discoverySettings.getNoMasterBlock()) : "received a cluster state with a master block";
discovery.clusterService.submitStateUpdateTask("local-disco-receive(from master)", new ClusterStateUpdateTask() {
@Override
public boolean runOnlyOnMaster() {
return false;
}
@Override
public ClusterState execute(ClusterState currentState) {
if (currentState.supersedes(nodeSpecificClusterState)) {
return currentState;
}
if (currentState.blocks().hasGlobalBlock(discoverySettings.getNoMasterBlock())) {
// its a fresh update from the master as we transition from a start of not having a master to having one
logger.debug("got first state from fresh master [{}]", nodeSpecificClusterState.nodes().getMasterNodeId());
return nodeSpecificClusterState;
}
ClusterState.Builder builder = ClusterState.builder(nodeSpecificClusterState);
// if the routing table did not change, use the original one
if (nodeSpecificClusterState.routingTable().version() == currentState.routingTable().version()) {
builder.routingTable(currentState.routingTable());
}
if (nodeSpecificClusterState.metaData().version() == currentState.metaData().version()) {
builder.metaData(currentState.metaData());
}
return builder.build();
}
@Override
public void onFailure(String source, Throwable t) {
logger.error("unexpected failure during [{}]", t, source);
publishResponseHandler.onFailure(discovery.localNode(), t);
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
publishResponseHandler.onResponse(discovery.localNode());
}
});
} else {
publishResponseHandler.onResponse(discovery.localNode());
}
}
TimeValue publishTimeout = discoverySettings.getPublishTimeout();
if (publishTimeout.millis() > 0) {
try {
boolean awaited = publishResponseHandler.awaitAllNodes(publishTimeout);
if (!awaited) {
DiscoveryNode[] pendingNodes = publishResponseHandler.pendingNodes();
// everyone may have just responded
if (pendingNodes.length > 0) {
logger.warn("timed out waiting for all nodes to process published state [{}] (timeout [{}], pending nodes: {})", clusterState.version(), publishTimeout, pendingNodes);
}
}
} catch (InterruptedException e) {
// ignore & restore interrupt
Thread.currentThread().interrupt();
}
}
} catch (Exception e) {
// failure to marshal or un-marshal
throw new IllegalStateException("Cluster state failed to serialize", e);
}
}
private class ClusterGroup {
private Queue<LocalDiscovery> members = ConcurrentCollections.newQueue();
Queue<LocalDiscovery> members() {
return members;
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zookeeper.test;
import java.io.File;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.lang.management.OperatingSystemMXBean;
import java.net.InetSocketAddress;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.zookeeper.PortAssignment;
import org.apache.zookeeper.TestableZooKeeper;
import org.apache.zookeeper.server.quorum.Election;
import org.apache.zookeeper.server.quorum.QuorumPeer;
import org.apache.zookeeper.server.quorum.QuorumPeer.LearnerType;
import org.apache.zookeeper.server.quorum.QuorumPeer.QuorumServer;
import org.junit.Assert;
import org.junit.Test;
import com.sun.management.UnixOperatingSystemMXBean;
public class QuorumBase extends ClientBase {
private static final Logger LOG = LoggerFactory.getLogger(QuorumBase.class);
File s1dir, s2dir, s3dir, s4dir, s5dir;
QuorumPeer s1, s2, s3, s4, s5;
protected int port1;
protected int port2;
protected int port3;
protected int port4;
protected int port5;
protected int portLE1;
protected int portLE2;
protected int portLE3;
protected int portLE4;
protected int portLE5;
@Test
// This just avoids complaints by junit
public void testNull() {
}
@Override
public void setUp() throws Exception {
setUp(false);
}
protected void setUp(boolean withObservers) throws Exception {
LOG.info("QuorumBase.setup " + getTestName());
setupTestEnv();
JMXEnv.setUp();
setUpAll();
port1 = PortAssignment.unique();
port2 = PortAssignment.unique();
port3 = PortAssignment.unique();
port4 = PortAssignment.unique();
port5 = PortAssignment.unique();
portLE1 = PortAssignment.unique();
portLE2 = PortAssignment.unique();
portLE3 = PortAssignment.unique();
portLE4 = PortAssignment.unique();
portLE5 = PortAssignment.unique();
hostPort = "127.0.0.1:" + port1
+ ",127.0.0.1:" + port2
+ ",127.0.0.1:" + port3
+ ",127.0.0.1:" + port4
+ ",127.0.0.1:" + port5;
LOG.info("Ports are: " + hostPort);
s1dir = ClientBase.createTmpDir();
s2dir = ClientBase.createTmpDir();
s3dir = ClientBase.createTmpDir();
s4dir = ClientBase.createTmpDir();
s5dir = ClientBase.createTmpDir();
startServers(withObservers);
OperatingSystemMXBean osMbean =
ManagementFactory.getOperatingSystemMXBean();
if (osMbean != null && osMbean instanceof UnixOperatingSystemMXBean) {
UnixOperatingSystemMXBean unixos =
(UnixOperatingSystemMXBean)osMbean;
LOG.info("Initial fdcount is: "
+ unixos.getOpenFileDescriptorCount());
}
LOG.info("Setup finished");
}
void startServers() throws Exception {
startServers(false);
}
void startServers(boolean withObservers) throws Exception {
int tickTime = 2000;
int initLimit = 3;
int syncLimit = 3;
HashMap<Long,QuorumServer> peers = new HashMap<Long,QuorumServer>();
peers.put(Long.valueOf(1), new QuorumServer(1,
new InetSocketAddress("127.0.0.1", port1 + 1000),
new InetSocketAddress("127.0.0.1", portLE1 + 1000),
LearnerType.PARTICIPANT));
peers.put(Long.valueOf(2), new QuorumServer(2,
new InetSocketAddress("127.0.0.1", port2 + 1000),
new InetSocketAddress("127.0.0.1", portLE2 + 1000),
LearnerType.PARTICIPANT));
peers.put(Long.valueOf(3), new QuorumServer(3,
new InetSocketAddress("127.0.0.1", port3 + 1000),
new InetSocketAddress("127.0.0.1", portLE3 + 1000),
LearnerType.PARTICIPANT));
peers.put(Long.valueOf(4), new QuorumServer(4,
new InetSocketAddress("127.0.0.1", port4 + 1000),
new InetSocketAddress("127.0.0.1", portLE4 + 1000),
LearnerType.PARTICIPANT));
peers.put(Long.valueOf(5), new QuorumServer(5,
new InetSocketAddress("127.0.0.1", port5 + 1000),
new InetSocketAddress("127.0.0.1", portLE5 + 1000),
LearnerType.PARTICIPANT));
if (withObservers) {
peers.get(Long.valueOf(4)).type = LearnerType.OBSERVER;
peers.get(Long.valueOf(5)).type = LearnerType.OBSERVER;
}
LOG.info("creating QuorumPeer 1 port " + port1);
s1 = new QuorumPeer(peers, s1dir, s1dir, port1, 3, 1, tickTime, initLimit, syncLimit);
Assert.assertEquals(port1, s1.getClientPort());
LOG.info("creating QuorumPeer 2 port " + port2);
s2 = new QuorumPeer(peers, s2dir, s2dir, port2, 3, 2, tickTime, initLimit, syncLimit);
Assert.assertEquals(port2, s2.getClientPort());
LOG.info("creating QuorumPeer 3 port " + port3);
s3 = new QuorumPeer(peers, s3dir, s3dir, port3, 3, 3, tickTime, initLimit, syncLimit);
Assert.assertEquals(port3, s3.getClientPort());
LOG.info("creating QuorumPeer 4 port " + port4);
s4 = new QuorumPeer(peers, s4dir, s4dir, port4, 3, 4, tickTime, initLimit, syncLimit);
Assert.assertEquals(port4, s4.getClientPort());
LOG.info("creating QuorumPeer 5 port " + port5);
s5 = new QuorumPeer(peers, s5dir, s5dir, port5, 3, 5, tickTime, initLimit, syncLimit);
Assert.assertEquals(port5, s5.getClientPort());
if (withObservers) {
s4.setLearnerType(LearnerType.OBSERVER);
s5.setLearnerType(LearnerType.OBSERVER);
}
LOG.info("QuorumPeer 1 voting view: " + s1.getVotingView());
LOG.info("QuorumPeer 2 voting view: " + s2.getVotingView());
LOG.info("QuorumPeer 3 voting view: " + s3.getVotingView());
LOG.info("QuorumPeer 4 voting view: " + s4.getVotingView());
LOG.info("QuorumPeer 5 voting view: " + s5.getVotingView());
LOG.info("start QuorumPeer 1");
s1.start();
LOG.info("start QuorumPeer 2");
s2.start();
LOG.info("start QuorumPeer 3");
s3.start();
LOG.info("start QuorumPeer 4");
s4.start();
LOG.info("start QuorumPeer 5");
s5.start();
LOG.info("started QuorumPeer 5");
LOG.info ("Checking ports " + hostPort);
for (String hp : hostPort.split(",")) {
Assert.assertTrue("waiting for server up",
ClientBase.waitForServerUp(hp,
CONNECTION_TIMEOUT));
LOG.info(hp + " is accepting client connections");
}
// interesting to see what's there...
JMXEnv.dump();
// make sure we have these 5 servers listed
Set<String> ensureNames = new LinkedHashSet<String>();
for (int i = 1; i <= 5; i++) {
ensureNames.add("InMemoryDataTree");
}
for (int i = 1; i <= 5; i++) {
ensureNames.add("name0=ReplicatedServer_id" + i
+ ",name1=replica." + i + ",name2=");
}
for (int i = 1; i <= 5; i++) {
for (int j = 1; j <= 5; j++) {
ensureNames.add("name0=ReplicatedServer_id" + i
+ ",name1=replica." + j);
}
}
for (int i = 1; i <= 5; i++) {
ensureNames.add("name0=ReplicatedServer_id" + i);
}
JMXEnv.ensureAll(ensureNames.toArray(new String[ensureNames.size()]));
}
public void setupServers() throws IOException {
setupServer(1);
setupServer(2);
setupServer(3);
setupServer(4);
setupServer(5);
}
HashMap<Long,QuorumServer> peers = null;
public void setupServer(int i) throws IOException {
int tickTime = 2000;
int initLimit = 3;
int syncLimit = 3;
if(peers == null){
peers = new HashMap<Long,QuorumServer>();
peers.put(Long.valueOf(1), new QuorumServer(1,
new InetSocketAddress("127.0.0.1", port1 + 1000),
new InetSocketAddress("127.0.0.1", portLE1 + 1000),
LearnerType.PARTICIPANT));
peers.put(Long.valueOf(2), new QuorumServer(2,
new InetSocketAddress("127.0.0.1", port2 + 1000),
new InetSocketAddress("127.0.0.1", portLE2 + 1000),
LearnerType.PARTICIPANT));
peers.put(Long.valueOf(3), new QuorumServer(3,
new InetSocketAddress("127.0.0.1", port3 + 1000),
new InetSocketAddress("127.0.0.1", portLE3 + 1000),
LearnerType.PARTICIPANT));
peers.put(Long.valueOf(4), new QuorumServer(4,
new InetSocketAddress("127.0.0.1", port4 + 1000),
new InetSocketAddress("127.0.0.1", portLE4 + 1000),
LearnerType.PARTICIPANT));
peers.put(Long.valueOf(5), new QuorumServer(5,
new InetSocketAddress("127.0.0.1", port5 + 1000),
new InetSocketAddress("127.0.0.1", portLE5 + 1000),
LearnerType.PARTICIPANT));
}
switch(i){
case 1:
LOG.info("creating QuorumPeer 1 port " + port1);
s1 = new QuorumPeer(peers, s1dir, s1dir, port1, 3, 1, tickTime, initLimit, syncLimit);
Assert.assertEquals(port1, s1.getClientPort());
break;
case 2:
LOG.info("creating QuorumPeer 2 port " + port2);
s2 = new QuorumPeer(peers, s2dir, s2dir, port2, 3, 2, tickTime, initLimit, syncLimit);
Assert.assertEquals(port2, s2.getClientPort());
break;
case 3:
LOG.info("creating QuorumPeer 3 port " + port3);
s3 = new QuorumPeer(peers, s3dir, s3dir, port3, 3, 3, tickTime, initLimit, syncLimit);
Assert.assertEquals(port3, s3.getClientPort());
break;
case 4:
LOG.info("creating QuorumPeer 4 port " + port4);
s4 = new QuorumPeer(peers, s4dir, s4dir, port4, 3, 4, tickTime, initLimit, syncLimit);
Assert.assertEquals(port4, s4.getClientPort());
break;
case 5:
LOG.info("creating QuorumPeer 5 port " + port5);
s5 = new QuorumPeer(peers, s5dir, s5dir, port5, 3, 5, tickTime, initLimit, syncLimit);
Assert.assertEquals(port5, s5.getClientPort());
}
}
@Override
public void tearDown() throws Exception {
LOG.info("TearDown started");
OperatingSystemMXBean osMbean =
ManagementFactory.getOperatingSystemMXBean();
if (osMbean != null && osMbean instanceof UnixOperatingSystemMXBean) {
UnixOperatingSystemMXBean unixos =
(UnixOperatingSystemMXBean)osMbean;
LOG.info("fdcount after test is: "
+ unixos.getOpenFileDescriptorCount());
}
shutdownServers();
for (String hp : hostPort.split(",")) {
Assert.assertTrue("waiting for server down",
ClientBase.waitForServerDown(hp,
ClientBase.CONNECTION_TIMEOUT));
LOG.info(hp + " is no longer accepting client connections");
}
JMXEnv.tearDown();
}
public void shutdownServers() {
shutdown(s1);
shutdown(s2);
shutdown(s3);
shutdown(s4);
shutdown(s5);
}
public static void shutdown(QuorumPeer qp) {
try {
LOG.info("Shutting down quorum peer " + qp.getName());
qp.shutdown();
Election e = qp.getElectionAlg();
if (e != null) {
LOG.info("Shutting down leader election " + qp.getName());
e.shutdown();
} else {
LOG.info("No election available to shutdown " + qp.getName());
}
LOG.info("Waiting for " + qp.getName() + " to exit thread");
qp.join(30000);
if (qp.isAlive()) {
Assert.fail("QP failed to shutdown in 30 seconds: " + qp.getName());
}
} catch (InterruptedException e) {
LOG.debug("QP interrupted: " + qp.getName(), e);
}
}
protected TestableZooKeeper createClient()
throws IOException, InterruptedException
{
return createClient(hostPort);
}
protected TestableZooKeeper createClient(String hp)
throws IOException, InterruptedException
{
CountdownWatcher watcher = new CountdownWatcher();
return createClient(watcher, hp);
}
}
|
|
/*
* Copyright (C) 2012-2014 DataStax Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datastax.driver.core;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.net.InetAddress;
import java.nio.ByteBuffer;
import java.util.*;
import com.google.common.base.Objects;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import org.jboss.netty.buffer.ChannelBuffer;
import com.datastax.driver.core.exceptions.DriverInternalError;
import com.datastax.driver.core.exceptions.InvalidTypeException;
/**
* Data types supported by cassandra.
*/
public class DataType {
/**
* The CQL type name.
*/
public enum Name {
ASCII (1, String.class),
BIGINT (2, Long.class),
BLOB (3, ByteBuffer.class),
BOOLEAN (4, Boolean.class),
COUNTER (5, Long.class),
DECIMAL (6, BigDecimal.class),
DOUBLE (7, Double.class),
FLOAT (8, Float.class),
INET (16, InetAddress.class),
INT (9, Integer.class),
TEXT (10, String.class),
TIMESTAMP (11, Date.class),
UUID (12, UUID.class),
VARCHAR (13, String.class),
VARINT (14, BigInteger.class),
TIMEUUID (15, UUID.class),
LIST (32, List.class),
SET (34, Set.class),
MAP (33, Map.class),
CUSTOM (0, ByteBuffer.class);
final int protocolId;
final Class<?> javaType;
private static final Name[] nameToIds;
static {
int maxCode = -1;
for (Name name : Name.values())
maxCode = Math.max(maxCode, name.protocolId);
nameToIds = new Name[maxCode + 1];
for (Name name : Name.values()) {
if (nameToIds[name.protocolId] != null)
throw new IllegalStateException("Duplicate Id");
nameToIds[name.protocolId] = name;
}
}
private Name(int protocolId, Class<?> javaType) {
this.protocolId = protocolId;
this.javaType = javaType;
}
static Name fromProtocolId(int id) {
Name name = nameToIds[id];
if (name == null)
throw new DriverInternalError("Unknown data type protocol id: " + id);
return name;
}
/**
* Returns whether this data type name represent the name of a collection type
* that is a list, set or map.
*
* @return whether this data type name represent the name of a collection type.
*/
public boolean isCollection() {
switch (this) {
case LIST:
case SET:
case MAP:
return true;
default:
return false;
}
}
/**
* Returns the Java Class corresponding to this CQL type name.
*
* The correspondence between CQL types and java ones is as follow:
* <table>
* <caption>DataType to Java class correspondence</caption>
* <tr><th>DataType (CQL)</th><th>Java Class</th></tr>
* <tr><td>ASCII </td><td>String</td></tr>
* <tr><td>BIGINT </td><td>Long</td></tr>
* <tr><td>BLOB </td><td>ByteBuffer</td></tr>
* <tr><td>BOOLEAN </td><td>Boolean</td></tr>
* <tr><td>COUNTER </td><td>Long</td></tr>
* <tr><td>CUSTOM </td><td>ByteBuffer</td></tr>
* <tr><td>DECIMAL </td><td>BigDecimal</td></tr>
* <tr><td>DOUBLE </td><td>Double</td></tr>
* <tr><td>FLOAT </td><td>Float</td></tr>
* <tr><td>INET </td><td>InetAddress</td></tr>
* <tr><td>INT </td><td>Integer</td></tr>
* <tr><td>LIST </td><td>List</td></tr>
* <tr><td>MAP </td><td>Map</td></tr>
* <tr><td>SET </td><td>Set</td></tr>
* <tr><td>TEXT </td><td>String</td></tr>
* <tr><td>TIMESTAMP </td><td>Date</td></tr>
* <tr><td>UUID </td><td>UUID</td></tr>
* <tr><td>VARCHAR </td><td>String</td></tr>
* <tr><td>VARINT </td><td>BigInteger</td></tr>
* <tr><td>TIMEUUID </td><td>UUID</td></tr>
* </table>
*
* @return the java Class corresponding to this CQL type name.
*/
public Class<?> asJavaClass() {
return javaType;
}
@Override
public String toString() {
return super.toString().toLowerCase();
}
}
private final DataType.Name name;
private final List<DataType> typeArguments;
private final String customClassName;
private final TypeCodec<?> codec;
private static final Map<Name, DataType> primitiveTypeMap = new EnumMap<Name, DataType>(Name.class);
static {
for (Name name : Name.values()) {
if (!name.isCollection() && name != Name.CUSTOM)
primitiveTypeMap.put(name, new DataType(name, Collections.<DataType>emptyList(), TypeCodec.createFor(name)));
}
}
private static final Set<DataType> primitiveTypeSet = ImmutableSet.copyOf(primitiveTypeMap.values());
private DataType(DataType.Name name, List<DataType> typeArguments, TypeCodec<?> codec) {
this(name, typeArguments, null, codec);
}
private DataType(DataType.Name name, List<DataType> typeArguments, String customClassName, TypeCodec<?> codec) {
this.name = name;
this.typeArguments = typeArguments;
this.customClassName = customClassName;
this.codec = codec;
}
static DataType decode(ChannelBuffer buffer) {
Name name = Name.fromProtocolId(buffer.readUnsignedShort());
switch (name) {
case CUSTOM:
return custom(CBUtil.readString(buffer));
case LIST:
return list(decode(buffer));
case SET:
return set(decode(buffer));
case MAP:
DataType keys = decode(buffer);
DataType values = decode(buffer);
return map(keys, values);
default:
return primitiveTypeMap.get(name);
}
}
@SuppressWarnings("unchecked")
TypeCodec<Object> codec() {
return (TypeCodec<Object>)codec;
}
/**
* Returns the ASCII type.
*
* @return The ASCII type.
*/
public static DataType ascii() {
return primitiveTypeMap.get(Name.ASCII);
}
/**
* Returns the BIGINT type.
*
* @return The BIGINT type.
*/
public static DataType bigint() {
return primitiveTypeMap.get(Name.BIGINT);
}
/**
* Returns the BLOB type.
*
* @return The BLOB type.
*/
public static DataType blob() {
return primitiveTypeMap.get(Name.BLOB);
}
/**
* Returns the BOOLEAN type.
*
* @return The BOOLEAN type.
*/
public static DataType cboolean() {
return primitiveTypeMap.get(Name.BOOLEAN);
}
/**
* Returns the COUNTER type.
*
* @return The COUNTER type.
*/
public static DataType counter() {
return primitiveTypeMap.get(Name.COUNTER);
}
/**
* Returns the DECIMAL type.
*
* @return The DECIMAL type.
*/
public static DataType decimal() {
return primitiveTypeMap.get(Name.DECIMAL);
}
/**
* Returns the DOUBLE type.
*
* @return The DOUBLE type.
*/
public static DataType cdouble() {
return primitiveTypeMap.get(Name.DOUBLE);
}
/**
* Returns the FLOAT type.
*
* @return The FLOAT type.
*/
public static DataType cfloat() {
return primitiveTypeMap.get(Name.FLOAT);
}
/**
* Returns the INET type.
*
* @return The INET type.
*/
public static DataType inet() {
return primitiveTypeMap.get(Name.INET);
}
/**
* Returns the INT type.
*
* @return The INT type.
*/
public static DataType cint() {
return primitiveTypeMap.get(Name.INT);
}
/**
* Returns the TEXT type.
*
* @return The TEXT type.
*/
public static DataType text() {
return primitiveTypeMap.get(Name.TEXT);
}
/**
* Returns the TIMESTAMP type.
*
* @return The TIMESTAMP type.
*/
public static DataType timestamp() {
return primitiveTypeMap.get(Name.TIMESTAMP);
}
/**
* Returns the UUID type.
*
* @return The UUID type.
*/
public static DataType uuid() {
return primitiveTypeMap.get(Name.UUID);
}
/**
* Returns the VARCHAR type.
*
* @return The VARCHAR type.
*/
public static DataType varchar() {
return primitiveTypeMap.get(Name.VARCHAR);
}
/**
* Returns the VARINT type.
*
* @return The VARINT type.
*/
public static DataType varint() {
return primitiveTypeMap.get(Name.VARINT);
}
/**
* Returns the TIMEUUID type.
*
* @return The TIMEUUID type.
*/
public static DataType timeuuid() {
return primitiveTypeMap.get(Name.TIMEUUID);
}
/**
* Returns the type of lists of {@code elementType} elements.
*
* @param elementType the type of the list elements.
* @return the type of lists of {@code elementType} elements.
*/
public static DataType list(DataType elementType) {
// TODO: for list, sets and maps, we could cache them (may or may not be worth it, but since we
// don't allow nesting of collections, even pregenerating all the lists/sets like we do for
// primitives wouldn't be very costly)
return new DataType(Name.LIST, ImmutableList.of(elementType), TypeCodec.listOf(elementType));
}
/**
* Returns the type of sets of {@code elementType} elements.
*
* @param elementType the type of the set elements.
* @return the type of sets of {@code elementType} elements.
*/
public static DataType set(DataType elementType) {
return new DataType(Name.SET, ImmutableList.of(elementType), TypeCodec.setOf(elementType));
}
/**
* Returns the type of maps of {@code keyType} to {@code valueType} elements.
*
* @param keyType the type of the map keys.
* @param valueType the type of the map values.
* @return the type of map of {@code keyType} to {@code valueType} elements.
*/
public static DataType map(DataType keyType, DataType valueType) {
return new DataType(Name.MAP, ImmutableList.of(keyType, valueType), TypeCodec.mapOf(keyType, valueType));
}
/**
* Returns a Custom type.
* <p>
* A custom type is defined by the name of the class used on the Cassandra
* side to implement it. Note that the support for custom type by the
* driver is limited: values of a custom type won't be interpreted by the
* driver in any way. They will thus have to be set (by {@link BoundStatement#setBytesUnsafe}
* and retrieved (by {@link Row#getBytesUnsafe}) as raw ByteBuffer.
* <p>
* The use of custom types is rarely useful and is thus not encouraged.
*
* @param typeClassName the server-side fully qualified class name for the type.
* @return the custom type for {@code typeClassName}.
*/
public static DataType custom(String typeClassName) {
if (typeClassName == null)
throw new NullPointerException();
return new DataType(Name.CUSTOM, Collections.<DataType>emptyList(), typeClassName, TypeCodec.createFor(Name.CUSTOM));
}
/**
* Returns the name of that type.
*
* @return the name of that type.
*/
public Name getName() {
return name;
}
/**
* Returns the type arguments of this type.
* <p>
* Note that only the collection types (LIST, MAP, SET) have type
* arguments. For the other types, this will return an empty list.
* <p>
* For the collection types:
* <ul>
* <li>For lists and sets, this method returns one argument, the type of
* the elements.</li>
* <li>For maps, this method returns two arguments, the first one is the
* type of the map keys, the second one is the type of the map
* values.</li>
* </ul>
*
* @return an immutable list containing the type arguments of this type.
*/
public List<DataType> getTypeArguments() {
return typeArguments;
}
/**
* Returns the server-side class name for a custom type.
*
* @return the server-side fully qualified class name for a custom type or
* {@code null} for any other type.
*/
public String getCustomTypeClassName() {
return customClassName;
}
/**
* Parses a string value for the type this object represent, returning its
* Cassandra binary representation.
* <p>
* Please note that currently, parsing collections is not supported and will
* throw an {@code InvalidTypeException}.
*
* @param value the value to parse.
* @return the binary representation of {@code value}.
*
* @throws InvalidTypeException if {@code value} is not a valid string
* representation for this type. Please note that values for custom types
* can never be parsed and will always return this exception.
*/
public ByteBuffer parse(String value) {
if (name == Name.CUSTOM)
throw new InvalidTypeException(String.format("Cannot parse '%s' as value of custom type of class '%s' "
+ "(values for custom type cannot be parse and must be inputted as bytes directly)", value, customClassName));
if (name.isCollection())
throw new InvalidTypeException(String.format("Cannot parse value as %s, parsing collections is not currently supported", name));
return codec().serialize(codec.parse(value));
}
/**
* Returns whether this type is a collection one, i.e. a list, set or map type.
*
* @return whether this type is a collection one.
*/
public boolean isCollection() {
return name.isCollection();
}
/**
* Returns the Java Class corresponding to this type.
*
* This is a shortcut for {@code getName().asJavaClass()}.
*
* @return the java Class corresponding to this type.
*
* @see Name#asJavaClass
*/
public Class<?> asJavaClass() {
return getName().asJavaClass();
}
/**
* Returns a set of all the primitive types, where primitive types are
* defined as the types that don't have type arguments (that is excluding
* lists, sets, and maps).
*
* @return returns a set of all the primitive types.
*/
public static Set<DataType> allPrimitiveTypes() {
return primitiveTypeSet;
}
/**
* Serialize a value of this type to bytes.
* <p>
* The actual format of the resulting bytes will correspond to the
* Cassandra encoding for this type.
*
* @param value the value to serialize.
* @return the value serialized, or {@code null} if {@code value} is null.
*
* @throws InvalidTypeException if {@code value} is not a valid object
* for this {@code DataType}.
*/
public ByteBuffer serialize(Object value) {
Class<?> providedClass = value.getClass();
Class<?> expectedClass = asJavaClass();
if (!expectedClass.isAssignableFrom(providedClass))
throw new InvalidTypeException(String.format("Invalid value for CQL type %s, expecting %s but %s provided", toString(), expectedClass, providedClass));
try {
return codec().serialize(value);
} catch (ClassCastException e) {
// With collections, the element type has not been checked, so it can throw
throw new InvalidTypeException("Invalid type for collection element: " + e.getMessage());
}
}
/**
* Deserialize a value of this type from the provided bytes.
* <p>
* The format of {@code bytes} must correspond to the Cassandra
* encoding for this type.
*
* @param bytes bytes holding the value to deserialize.
* @return the deserialized value (of class {@code this.asJavaClass()}).
* Will return {@code null} if either {@code bytes} is {@code null} or if
* {@code bytes.remaining() == 0} and this type has no value corresponding
* to an empty byte buffer (the latter somewhat strange behavior is due to
* the fact that for historical/technical reason, Cassandra types always
* accept empty byte buffer as valid value of those type, and so we avoid
* throwing an exception in that case. It is however highly discouraged to
* store empty byte buffers for types for which it doesn't make sense, so
* this implementation can generally be ignored).
*
* @throws InvalidTypeException if {@code bytes} is not a valid
* encoding of an object of this {@code DataType}.
*/
public Object deserialize(ByteBuffer bytes) {
return codec().deserialize(bytes);
}
/**
* Serialize an object based on its java class.
* <p>
* This is equivalent to {@link #serialize} but with the difference that
* the actual {@code DataType} of the resulting value is inferred from the
* java class of {@code value}. The correspondence between CQL {@code DataType}
* and java class used is the one induced by the method {@link Name#asJavaClass}.
* Note that if you know the {@code DataType} of {@code value}, you should use
* the {@link #serialize} method instead as it is going to be faster.
*
* @param value the value to serialize.
* @return the value serialized, or {@code null} if {@code value} is null.
*
* @throws IllegalArgumentException if {@code value} is not of a type
* corresponding to a CQL3 type, i.e. is not a Class that could be returned
* by {@link DataType#asJavaClass}.
*/
public static ByteBuffer serializeValue(Object value) {
if (value == null)
return null;
DataType dt = TypeCodec.getDataTypeFor(value);
if (dt == null)
throw new IllegalArgumentException(String.format("Value of type %s does not correspond to any CQL3 type", value.getClass()));
try {
return dt.serialize(value);
} catch (InvalidTypeException e) {
// In theory we couldn't get that if getDataTypeFor does his job correctly,
// but there is no point in sending an exception that the user won't expect if we're
// wrong on that.
throw new IllegalArgumentException(e.getMessage());
}
}
@Override
public final int hashCode() {
return Arrays.hashCode(new Object[]{ name, typeArguments, customClassName });
}
@Override
public final boolean equals(Object o) {
if(!(o instanceof DataType))
return false;
DataType d = (DataType)o;
return name == d.name && typeArguments.equals(d.typeArguments) && Objects.equal(customClassName, d.customClassName);
}
@Override
public String toString() {
switch (name) {
case LIST:
case SET:
return String.format("%s<%s>", name, typeArguments.get(0));
case MAP:
return String.format("%s<%s, %s>", name, typeArguments.get(0), typeArguments.get(1));
case CUSTOM:
return String.format("'%s'", customClassName);
default:
return name.toString();
}
}
}
|
|
package org.drip.json.parser;
/*
* -*- mode: java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*/
/*!
* Copyright (C) 2017 Lakshmi Krishnamurthy
* Copyright (C) 2016 Lakshmi Krishnamurthy
* Copyright (C) 2015 Lakshmi Krishnamurthy
*
* This file is part of DRIP, a free-software/open-source library for buy/side financial/trading model
* libraries targeting analysts and developers
* https://lakshmidrip.github.io/DRIP/
*
* DRIP is composed of four main libraries:
*
* - DRIP Fixed Income - https://lakshmidrip.github.io/DRIP-Fixed-Income/
* - DRIP Asset Allocation - https://lakshmidrip.github.io/DRIP-Asset-Allocation/
* - DRIP Numerical Optimizer - https://lakshmidrip.github.io/DRIP-Numerical-Optimizer/
* - DRIP Statistical Learning - https://lakshmidrip.github.io/DRIP-Statistical-Learning/
*
* - DRIP Fixed Income: Library for Instrument/Trading Conventions, Treasury Futures/Options,
* Funding/Forward/Overnight Curves, Multi-Curve Construction/Valuation, Collateral Valuation and XVA
* Metric Generation, Calibration and Hedge Attributions, Statistical Curve Construction, Bond RV
* Metrics, Stochastic Evolution and Option Pricing, Interest Rate Dynamics and Option Pricing, LMM
* Extensions/Calibrations/Greeks, Algorithmic Differentiation, and Asset Backed Models and Analytics.
*
* - DRIP Asset Allocation: Library for model libraries for MPT framework, Black Litterman Strategy
* Incorporator, Holdings Constraint, and Transaction Costs.
*
* - DRIP Numerical Optimizer: Library for Numerical Optimization and Spline Functionality.
*
* - DRIP Statistical Learning: Library for Statistical Evaluation and Machine Learning.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* JSONParser is an Adaptation of the JSONParser Class from the RFC4627 compliant JSON Simple
* (https://code.google.com/p/json-simple/).
*
* @author Fang Yidong
* @author Lakshmi Krishnamurthy
*/
public class JSONParser {
public static final int S_INIT=0;
public static final int S_IN_FINISHED_VALUE=1;//string,number,boolean,null,object,array
public static final int S_IN_OBJECT=2;
public static final int S_IN_ARRAY=3;
public static final int S_PASSED_PAIR_KEY=4;
public static final int S_IN_PAIR_VALUE=5;
public static final int S_END=6;
public static final int S_IN_ERROR=-1;
@SuppressWarnings ("rawtypes") private java.util.LinkedList handlerStatusStack;
private Yylex lexer = new Yylex((java.io.Reader)null);
private Yytoken token = null;
private int status = S_INIT;
@SuppressWarnings ("rawtypes") private int peekStatus(java.util.LinkedList statusStack){
if(statusStack.size()==0)
return -1;
Integer status=(Integer)statusStack.getFirst();
return status.intValue();
}
/**
* Reset the parser to the initial state without resetting the underlying reader.
*
*/
public void reset(){
token = null;
status = S_INIT;
handlerStatusStack = null;
}
/**
* Reset the parser to the initial state with a new character reader.
*
* @param in - The new character reader.
*/
public void reset(java.io.Reader in){
lexer.yyreset(in);
reset();
}
/**
* @return The position of the beginning of the current token.
*/
public int getPosition(){
return lexer.getPosition();
}
public Object parse(String s) throws ParseException{
return parse(s, (ContainerFactory)null);
}
/**
* Parse the JSON String
*
* @param s The String
* @param containerFactory The Container Factory
*
* @return The JSON Object
*
* @throws ParseException Thrown if the Inputs are Invalid
*/
public Object parse(String s, ContainerFactory containerFactory) throws ParseException{
java.io.StringReader in=new java.io.StringReader(s);
try{
return parse(in, containerFactory);
}
catch(java.io.IOException ie){
/*
* Actually it will never happen.
*/
throw new ParseException(-1, ParseException.ERROR_UNEXPECTED_EXCEPTION, ie);
}
}
public Object parse(java.io.Reader in) throws java.io.IOException, ParseException{
return parse(in, (ContainerFactory)null);
}
/**
* Parse JSON text into java object from the input source.
*
* @param in The Input Reader
* @param containerFactory - Use this factory to createyour own JSON object and JSON array containers.
* @return Instance of the following:
* org.json.simple.JSONObject,
* org.json.simple.JSONArray,
* java.lang.String,
* java.lang.Number,
* java.lang.Boolean,
* null
*
* @throws java.io.IOException Thrown if the Inputs are Invalid
*
* @throws ParseException Thrown if the Inputs are Invalid
*/
@SuppressWarnings ({"rawtypes", "unchecked"}) public Object parse(java.io.Reader in, ContainerFactory containerFactory) throws java.io.IOException, ParseException{
reset(in);
java.util.LinkedList statusStack = new java.util.LinkedList();
java.util.LinkedList valueStack = new java.util.LinkedList();
try{
do{
nextToken();
switch(status){
case S_INIT:
switch(token.type){
case Yytoken.TYPE_VALUE:
status=S_IN_FINISHED_VALUE;
statusStack.addFirst(new Integer(status));
valueStack.addFirst(token.value);
break;
case Yytoken.TYPE_LEFT_BRACE:
status=S_IN_OBJECT;
statusStack.addFirst(new Integer(status));
valueStack.addFirst(createObjectContainer(containerFactory));
break;
case Yytoken.TYPE_LEFT_SQUARE:
status=S_IN_ARRAY;
statusStack.addFirst(new Integer(status));
valueStack.addFirst(createArrayContainer(containerFactory));
break;
default:
status=S_IN_ERROR;
}//inner switch
break;
case S_IN_FINISHED_VALUE:
if(token.type==Yytoken.TYPE_EOF)
return valueStack.removeFirst();
else
throw new ParseException(getPosition(), ParseException.ERROR_UNEXPECTED_TOKEN, token);
case S_IN_OBJECT:
switch(token.type){
case Yytoken.TYPE_COMMA:
break;
case Yytoken.TYPE_VALUE:
if(token.value instanceof String){
String key=(String)token.value;
valueStack.addFirst(key);
status=S_PASSED_PAIR_KEY;
statusStack.addFirst(new Integer(status));
}
else{
status=S_IN_ERROR;
}
break;
case Yytoken.TYPE_RIGHT_BRACE:
if(valueStack.size()>1){
statusStack.removeFirst();
valueStack.removeFirst();
status=peekStatus(statusStack);
}
else{
status=S_IN_FINISHED_VALUE;
}
break;
default:
status=S_IN_ERROR;
break;
}//inner switch
break;
case S_PASSED_PAIR_KEY:
switch(token.type){
case Yytoken.TYPE_COLON:
break;
case Yytoken.TYPE_VALUE:
statusStack.removeFirst();
String key=(String)valueStack.removeFirst();
java.util.Map parent=(java.util.Map)valueStack.getFirst();
parent.put(key,token.value);
status=peekStatus(statusStack);
break;
case Yytoken.TYPE_LEFT_SQUARE:
statusStack.removeFirst();
key=(String)valueStack.removeFirst();
parent=(java.util.Map)valueStack.getFirst();
java.util.List newArray=createArrayContainer(containerFactory);
parent.put(key,newArray);
status=S_IN_ARRAY;
statusStack.addFirst(new Integer(status));
valueStack.addFirst(newArray);
break;
case Yytoken.TYPE_LEFT_BRACE:
statusStack.removeFirst();
key=(String)valueStack.removeFirst();
parent=(java.util.Map)valueStack.getFirst();
java.util.Map newObject=createObjectContainer(containerFactory);
parent.put(key,newObject);
status=S_IN_OBJECT;
statusStack.addFirst(new Integer(status));
valueStack.addFirst(newObject);
break;
default:
status=S_IN_ERROR;
}
break;
case S_IN_ARRAY:
switch(token.type){
case Yytoken.TYPE_COMMA:
break;
case Yytoken.TYPE_VALUE:
java.util.List val=(java.util.List)valueStack.getFirst();
val.add(token.value);
break;
case Yytoken.TYPE_RIGHT_SQUARE:
if(valueStack.size()>1){
statusStack.removeFirst();
valueStack.removeFirst();
status=peekStatus(statusStack);
}
else{
status=S_IN_FINISHED_VALUE;
}
break;
case Yytoken.TYPE_LEFT_BRACE:
val=(java.util.List)valueStack.getFirst();
java.util.Map newObject=createObjectContainer(containerFactory);
val.add(newObject);
status=S_IN_OBJECT;
statusStack.addFirst(new Integer(status));
valueStack.addFirst(newObject);
break;
case Yytoken.TYPE_LEFT_SQUARE:
val=(java.util.List)valueStack.getFirst();
java.util.List newArray=createArrayContainer(containerFactory);
val.add(newArray);
status=S_IN_ARRAY;
statusStack.addFirst(new Integer(status));
valueStack.addFirst(newArray);
break;
default:
status=S_IN_ERROR;
}//inner switch
break;
case S_IN_ERROR:
throw new ParseException(getPosition(), ParseException.ERROR_UNEXPECTED_TOKEN, token);
}//switch
if(status==S_IN_ERROR){
throw new ParseException(getPosition(), ParseException.ERROR_UNEXPECTED_TOKEN, token);
}
}while(token.type!=Yytoken.TYPE_EOF);
}
catch(java.io.IOException ie){
throw ie;
}
throw new ParseException(getPosition(), ParseException.ERROR_UNEXPECTED_TOKEN, token);
}
private void nextToken() throws ParseException, java.io.IOException{
token = lexer.yylex();
if(token == null)
token = new Yytoken(Yytoken.TYPE_EOF, null);
}
@SuppressWarnings ("rawtypes") private java.util.Map createObjectContainer(ContainerFactory containerFactory){
if(containerFactory == null)
return new org.drip.json.simple.JSONObject();
java.util.Map m = containerFactory.createObjectContainer();
if(m == null)
return new org.drip.json.simple.JSONObject();
return m;
}
@SuppressWarnings ("rawtypes") private java.util.List createArrayContainer(ContainerFactory containerFactory){
if(containerFactory == null)
return new org.drip.json.simple.JSONArray();
java.util.List l = containerFactory.creatArrayContainer();
if(l == null)
return new org.drip.json.simple.JSONArray();
return l;
}
public void parse(String s, ContentHandler contentHandler) throws ParseException{
parse(s, contentHandler, false);
}
public void parse(String s, ContentHandler contentHandler, boolean isResume) throws ParseException{
java.io.StringReader in=new java.io.StringReader(s);
try{
parse(in, contentHandler, isResume);
}
catch(java.io.IOException ie){
/*
* Actually it will never happen.
*/
throw new ParseException(-1, ParseException.ERROR_UNEXPECTED_EXCEPTION, ie);
}
}
public void parse(java.io.Reader in, ContentHandler contentHandler) throws java.io.IOException, ParseException{
parse(in, contentHandler, false);
}
/**
* Stream processing of JSON text.
*
* @see ContentHandler
*
* @param in The Input Reader
* @param contentHandler The Content Handler Instance
* @param isResume - Indicates if it continues previous parsing operation.
* If set to true, resume parsing the old stream, and parameter 'in' will be ignored.
* If this method is called for the first time in this instance, isResume will be ignored.
*
* @throws java.io.IOException Thrown if the Inputs are Invalid
*
* @throws ParseException Thrown if the Inputs are Invalid
*/
@SuppressWarnings ({"rawtypes", "unchecked"}) public void parse(java.io.Reader in, ContentHandler contentHandler, boolean isResume) throws java.io.IOException, ParseException{
if(!isResume){
reset(in);
handlerStatusStack = new java.util.LinkedList();
}
else{
if(handlerStatusStack == null){
isResume = false;
reset(in);
handlerStatusStack = new java.util.LinkedList();
}
}
java.util.LinkedList statusStack = handlerStatusStack;
try{
do{
switch(status){
case S_INIT:
contentHandler.startJSON();
nextToken();
switch(token.type){
case Yytoken.TYPE_VALUE:
status=S_IN_FINISHED_VALUE;
statusStack.addFirst(new Integer(status));
if(!contentHandler.primitive(token.value))
return;
break;
case Yytoken.TYPE_LEFT_BRACE:
status=S_IN_OBJECT;
statusStack.addFirst(new Integer(status));
if(!contentHandler.startObject())
return;
break;
case Yytoken.TYPE_LEFT_SQUARE:
status=S_IN_ARRAY;
statusStack.addFirst(new Integer(status));
if(!contentHandler.startArray())
return;
break;
default:
status=S_IN_ERROR;
}//inner switch
break;
case S_IN_FINISHED_VALUE:
nextToken();
if(token.type==Yytoken.TYPE_EOF){
contentHandler.endJSON();
status = S_END;
return;
}
else{
status = S_IN_ERROR;
throw new ParseException(getPosition(), ParseException.ERROR_UNEXPECTED_TOKEN, token);
}
case S_IN_OBJECT:
nextToken();
switch(token.type){
case Yytoken.TYPE_COMMA:
break;
case Yytoken.TYPE_VALUE:
if(token.value instanceof String){
String key=(String)token.value;
status=S_PASSED_PAIR_KEY;
statusStack.addFirst(new Integer(status));
if(!contentHandler.startObjectEntry(key))
return;
}
else{
status=S_IN_ERROR;
}
break;
case Yytoken.TYPE_RIGHT_BRACE:
if(statusStack.size()>1){
statusStack.removeFirst();
status=peekStatus(statusStack);
}
else{
status=S_IN_FINISHED_VALUE;
}
if(!contentHandler.endObject())
return;
break;
default:
status=S_IN_ERROR;
break;
}//inner switch
break;
case S_PASSED_PAIR_KEY:
nextToken();
switch(token.type){
case Yytoken.TYPE_COLON:
break;
case Yytoken.TYPE_VALUE:
statusStack.removeFirst();
status=peekStatus(statusStack);
if(!contentHandler.primitive(token.value))
return;
if(!contentHandler.endObjectEntry())
return;
break;
case Yytoken.TYPE_LEFT_SQUARE:
statusStack.removeFirst();
statusStack.addFirst(new Integer(S_IN_PAIR_VALUE));
status=S_IN_ARRAY;
statusStack.addFirst(new Integer(status));
if(!contentHandler.startArray())
return;
break;
case Yytoken.TYPE_LEFT_BRACE:
statusStack.removeFirst();
statusStack.addFirst(new Integer(S_IN_PAIR_VALUE));
status=S_IN_OBJECT;
statusStack.addFirst(new Integer(status));
if(!contentHandler.startObject())
return;
break;
default:
status=S_IN_ERROR;
}
break;
case S_IN_PAIR_VALUE:
/*
* S_IN_PAIR_VALUE is just a marker to indicate the end of an object entry, it doesn't proccess any token,
* therefore delay consuming token until next round.
*/
statusStack.removeFirst();
status = peekStatus(statusStack);
if(!contentHandler.endObjectEntry())
return;
break;
case S_IN_ARRAY:
nextToken();
switch(token.type){
case Yytoken.TYPE_COMMA:
break;
case Yytoken.TYPE_VALUE:
if(!contentHandler.primitive(token.value))
return;
break;
case Yytoken.TYPE_RIGHT_SQUARE:
if(statusStack.size()>1){
statusStack.removeFirst();
status=peekStatus(statusStack);
}
else{
status=S_IN_FINISHED_VALUE;
}
if(!contentHandler.endArray())
return;
break;
case Yytoken.TYPE_LEFT_BRACE:
status=S_IN_OBJECT;
statusStack.addFirst(new Integer(status));
if(!contentHandler.startObject())
return;
break;
case Yytoken.TYPE_LEFT_SQUARE:
status=S_IN_ARRAY;
statusStack.addFirst(new Integer(status));
if(!contentHandler.startArray())
return;
break;
default:
status=S_IN_ERROR;
}//inner switch
break;
case S_END:
return;
case S_IN_ERROR:
throw new ParseException(getPosition(), ParseException.ERROR_UNEXPECTED_TOKEN, token);
}//switch
if(status==S_IN_ERROR){
throw new ParseException(getPosition(), ParseException.ERROR_UNEXPECTED_TOKEN, token);
}
}while(token.type!=Yytoken.TYPE_EOF);
}
catch(java.io.IOException ie){
status = S_IN_ERROR;
throw ie;
}
catch(ParseException pe){
status = S_IN_ERROR;
throw pe;
}
catch(RuntimeException re){
status = S_IN_ERROR;
throw re;
}
catch(Error e){
status = S_IN_ERROR;
throw e;
}
status = S_IN_ERROR;
throw new ParseException(getPosition(), ParseException.ERROR_UNEXPECTED_TOKEN, token);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.server;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.apache.accumulo.fate.util.UtilWaitThread.sleepUninterruptibly;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.UnknownHostException;
import java.util.Arrays;
import java.util.Map.Entry;
import java.util.TreeMap;
import java.util.concurrent.TimeUnit;
import org.apache.accumulo.core.Constants;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.Instance;
import org.apache.accumulo.core.conf.AccumuloConfiguration;
import org.apache.accumulo.core.conf.Property;
import org.apache.accumulo.core.util.AddressUtil;
import org.apache.accumulo.core.volume.Volume;
import org.apache.accumulo.core.zookeeper.ZooUtil;
import org.apache.accumulo.fate.ReadOnlyStore;
import org.apache.accumulo.fate.ReadOnlyTStore;
import org.apache.accumulo.fate.ZooStore;
import org.apache.accumulo.server.client.HdfsZooInstance;
import org.apache.accumulo.server.conf.ServerConfigurationFactory;
import org.apache.accumulo.server.fs.VolumeManager;
import org.apache.accumulo.server.util.time.SimpleTimer;
import org.apache.accumulo.server.zookeeper.ZooReaderWriter;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class Accumulo {
private static final Logger log = LoggerFactory.getLogger(Accumulo.class);
public static synchronized void updateAccumuloVersion(VolumeManager fs, int oldVersion) {
for (Volume volume : fs.getVolumes()) {
try {
if (getAccumuloPersistentVersion(volume) == oldVersion) {
log.debug("Attempting to upgrade {}", volume);
Path dataVersionLocation = ServerConstants.getDataVersionLocation(volume);
fs.create(new Path(dataVersionLocation, Integer.toString(ServerConstants.DATA_VERSION))).close();
// TODO document failure mode & recovery if FS permissions cause above to work and below to fail ACCUMULO-2596
Path prevDataVersionLoc = new Path(dataVersionLocation, Integer.toString(oldVersion));
if (!fs.delete(prevDataVersionLoc)) {
throw new RuntimeException("Could not delete previous data version location (" + prevDataVersionLoc + ") for " + volume);
}
}
} catch (IOException e) {
throw new RuntimeException("Unable to set accumulo version: an error occurred.", e);
}
}
}
public static synchronized int getAccumuloPersistentVersion(FileSystem fs, Path path) {
int dataVersion;
try {
FileStatus[] files = fs.listStatus(path);
if (files == null || files.length == 0) {
dataVersion = -1; // assume it is 0.5 or earlier
} else {
dataVersion = Integer.parseInt(files[0].getPath().getName());
}
return dataVersion;
} catch (IOException e) {
throw new RuntimeException("Unable to read accumulo version: an error occurred.", e);
}
}
public static synchronized int getAccumuloPersistentVersion(Volume v) {
Path path = ServerConstants.getDataVersionLocation(v);
return getAccumuloPersistentVersion(v.getFileSystem(), path);
}
public static synchronized int getAccumuloPersistentVersion(VolumeManager fs) {
// It doesn't matter which Volume is used as they should all have the data version stored
return getAccumuloPersistentVersion(fs.getVolumes().iterator().next());
}
public static synchronized Path getAccumuloInstanceIdPath(VolumeManager fs) {
// It doesn't matter which Volume is used as they should all have the instance ID stored
Volume v = fs.getVolumes().iterator().next();
return ServerConstants.getInstanceIdLocation(v);
}
public static void init(VolumeManager fs, Instance instance, ServerConfigurationFactory serverConfig, String application) throws IOException {
final AccumuloConfiguration conf = serverConfig.getSystemConfiguration();
log.info("{} starting", application);
log.info("Instance {}", instance.getInstanceID());
int dataVersion = Accumulo.getAccumuloPersistentVersion(fs);
log.info("Data Version {}", dataVersion);
Accumulo.waitForZookeeperAndHdfs(fs);
if (!(canUpgradeFromDataVersion(dataVersion))) {
throw new RuntimeException("This version of accumulo (" + Constants.VERSION + ") is not compatible with files stored using data version " + dataVersion);
}
TreeMap<String,String> sortedProps = new TreeMap<>();
for (Entry<String,String> entry : conf)
sortedProps.put(entry.getKey(), entry.getValue());
for (Entry<String,String> entry : sortedProps.entrySet()) {
String key = entry.getKey();
log.info("{} = {}", key, (Property.isSensitive(key) ? "<hidden>" : entry.getValue()));
}
monitorSwappiness(conf);
// Encourage users to configure TLS
final String SSL = "SSL";
for (Property sslProtocolProperty : Arrays.asList(Property.RPC_SSL_CLIENT_PROTOCOL, Property.RPC_SSL_ENABLED_PROTOCOLS,
Property.MONITOR_SSL_INCLUDE_PROTOCOLS)) {
String value = conf.get(sslProtocolProperty);
if (value.contains(SSL)) {
log.warn("It is recommended that {} only allow TLS", sslProtocolProperty);
}
}
}
/**
* Sanity check that the current persistent version is allowed to upgrade to the version of Accumulo running.
*
* @param dataVersion
* the version that is persisted in the backing Volumes
*/
public static boolean canUpgradeFromDataVersion(final int dataVersion) {
return ServerConstants.CAN_UPGRADE.get(dataVersion);
}
/**
* Does the data version number stored in the backing Volumes indicate we need to upgrade something?
*/
public static boolean persistentVersionNeedsUpgrade(final int accumuloPersistentVersion) {
return ServerConstants.NEEDS_UPGRADE.get(accumuloPersistentVersion);
}
/**
*
*/
public static void monitorSwappiness(AccumuloConfiguration config) {
SimpleTimer.getInstance(config).schedule(new Runnable() {
@Override
public void run() {
try {
String procFile = "/proc/sys/vm/swappiness";
File swappiness = new File(procFile);
if (swappiness.exists() && swappiness.canRead()) {
InputStream is = new FileInputStream(procFile);
try {
byte[] buffer = new byte[10];
int bytes = is.read(buffer);
String setting = new String(buffer, 0, bytes, UTF_8);
setting = setting.trim();
if (bytes > 0 && Integer.parseInt(setting) > 10) {
log.warn("System swappiness setting is greater than ten ({}) which can cause time-sensitive operations to be delayed. "
+ " Accumulo is time sensitive because it needs to maintain distributed lock agreement.", setting);
}
} finally {
is.close();
}
}
} catch (Throwable t) {
log.error("", t);
}
}
}, 1000, 10 * 60 * 1000);
}
public static void waitForZookeeperAndHdfs(VolumeManager fs) {
log.info("Attempting to talk to zookeeper");
while (true) {
try {
ZooReaderWriter.getInstance().getChildren(Constants.ZROOT);
break;
} catch (InterruptedException e) {
// ignored
} catch (KeeperException ex) {
log.info("Waiting for accumulo to be initialized");
sleepUninterruptibly(1, TimeUnit.SECONDS);
}
}
log.info("ZooKeeper connected and initialized, attempting to talk to HDFS");
long sleep = 1000;
int unknownHostTries = 3;
while (true) {
try {
if (fs.isReady())
break;
log.warn("Waiting for the NameNode to leave safemode");
} catch (IOException ex) {
log.warn("Unable to connect to HDFS", ex);
} catch (IllegalArgumentException exception) {
/* Unwrap the UnknownHostException so we can deal with it directly */
if (exception.getCause() instanceof UnknownHostException) {
if (unknownHostTries > 0) {
log.warn("Unable to connect to HDFS, will retry. cause: {}", exception.getCause());
/* We need to make sure our sleep period is long enough to avoid getting a cached failure of the host lookup. */
sleep = Math.max(sleep, (AddressUtil.getAddressCacheNegativeTtl((UnknownHostException) (exception.getCause())) + 1) * 1000);
} else {
log.error("Unable to connect to HDFS and have exceeded the maximum number of retries.", exception);
throw exception;
}
unknownHostTries--;
} else {
throw exception;
}
}
log.info("Backing off due to failure; current sleep period is {} seconds", sleep / 1000.);
sleepUninterruptibly(sleep, TimeUnit.MILLISECONDS);
/* Back off to give transient failures more time to clear. */
sleep = Math.min(60 * 1000, sleep * 2);
}
log.info("Connected to HDFS");
}
/**
* Exit loudly if there are outstanding Fate operations. Since Fate serializes class names, we need to make sure there are no queued transactions from a
* previous version before continuing an upgrade. The status of the operations is irrelevant; those in SUCCESSFUL status cause the same problem as those just
* queued.
*
* Note that the Master should not allow write access to Fate until after all upgrade steps are complete.
*
* Should be called as a guard before performing any upgrade steps, after determining that an upgrade is needed.
*
* see ACCUMULO-2519
*/
public static void abortIfFateTransactions() {
try {
final ReadOnlyTStore<Accumulo> fate = new ReadOnlyStore<>(new ZooStore<Accumulo>(ZooUtil.getRoot(HdfsZooInstance.getInstance()) + Constants.ZFATE,
ZooReaderWriter.getInstance()));
if (!(fate.list().isEmpty())) {
throw new AccumuloException("Aborting upgrade because there are outstanding FATE transactions from a previous Accumulo version. "
+ "Please see the README document for instructions on what to do under your previous version.");
}
} catch (Exception exception) {
log.error("Problem verifying Fate readiness", exception);
System.exit(1);
}
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.bootstrap;
import org.apache.lucene.util.Constants;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTestCase;
import java.io.FilePermission;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.security.PermissionCollection;
import java.security.Permissions;
import java.util.Set;
@SuppressForbidden(reason = "modifies system properties and attempts to create symbolic links intentionally")
public class EvilSecurityTests extends ESTestCase {
/** test generated permissions */
public void testGeneratedPermissions() throws Exception {
Path path = createTempDir();
// make a fake ES home and ensure we only grant permissions to that.
Path esHome = path.resolve("esHome");
Settings.Builder settingsBuilder = Settings.builder();
settingsBuilder.put(Environment.PATH_HOME_SETTING.getKey(), esHome.toString());
Settings settings = settingsBuilder.build();
Path fakeTmpDir = createTempDir();
String realTmpDir = System.getProperty("java.io.tmpdir");
Permissions permissions;
try {
System.setProperty("java.io.tmpdir", fakeTmpDir.toString());
Environment environment = new Environment(settings);
permissions = Security.createPermissions(environment);
} finally {
System.setProperty("java.io.tmpdir", realTmpDir);
}
// the fake es home
assertNoPermissions(esHome, permissions);
// its parent
assertNoPermissions(esHome.getParent(), permissions);
// some other sibling
assertNoPermissions(esHome.getParent().resolve("other"), permissions);
// double check we overwrote java.io.tmpdir correctly for the test
assertNoPermissions(PathUtils.get(realTmpDir), permissions);
}
/** test generated permissions for all configured paths */
@SuppressWarnings("deprecation") // needs to check settings for deprecated path
public void testEnvironmentPaths() throws Exception {
Path path = createTempDir();
// make a fake ES home and ensure we only grant permissions to that.
Path esHome = path.resolve("esHome");
Settings.Builder settingsBuilder = Settings.builder();
settingsBuilder.put(Environment.PATH_HOME_SETTING.getKey(), esHome.resolve("home").toString());
settingsBuilder.put(Environment.PATH_CONF_SETTING.getKey(), esHome.resolve("conf").toString());
settingsBuilder.put(Environment.PATH_SCRIPTS_SETTING.getKey(), esHome.resolve("scripts").toString());
settingsBuilder.putArray(Environment.PATH_DATA_SETTING.getKey(), esHome.resolve("data1").toString(),
esHome.resolve("data2").toString());
settingsBuilder.put(Environment.PATH_SHARED_DATA_SETTING.getKey(), esHome.resolve("custom").toString());
settingsBuilder.put(Environment.PATH_LOGS_SETTING.getKey(), esHome.resolve("logs").toString());
settingsBuilder.put(Environment.PIDFILE_SETTING.getKey(), esHome.resolve("test.pid").toString());
Settings settings = settingsBuilder.build();
Path fakeTmpDir = createTempDir();
String realTmpDir = System.getProperty("java.io.tmpdir");
Permissions permissions;
Environment environment;
try {
System.setProperty("java.io.tmpdir", fakeTmpDir.toString());
environment = new Environment(settings);
permissions = Security.createPermissions(environment);
} finally {
System.setProperty("java.io.tmpdir", realTmpDir);
}
// the fake es home
assertNoPermissions(esHome, permissions);
// its parent
assertNoPermissions(esHome.getParent(), permissions);
// some other sibling
assertNoPermissions(esHome.getParent().resolve("other"), permissions);
// double check we overwrote java.io.tmpdir correctly for the test
assertNoPermissions(PathUtils.get(realTmpDir), permissions);
// check that all directories got permissions:
// bin file: ro
assertExactPermissions(new FilePermission(environment.binFile().toString(), "read,readlink"), permissions);
// lib file: ro
assertExactPermissions(new FilePermission(environment.libFile().toString(), "read,readlink"), permissions);
// modules file: ro
assertExactPermissions(new FilePermission(environment.modulesFile().toString(), "read,readlink"), permissions);
// config file: ro
assertExactPermissions(new FilePermission(environment.configFile().toString(), "read,readlink"), permissions);
// scripts file: ro
assertExactPermissions(new FilePermission(environment.scriptsFile().toString(), "read,readlink"), permissions);
// plugins: ro
assertExactPermissions(new FilePermission(environment.pluginsFile().toString(), "read,readlink"), permissions);
// data paths: r/w
for (Path dataPath : environment.dataFiles()) {
assertExactPermissions(new FilePermission(dataPath.toString(), "read,readlink,write,delete"), permissions);
}
for (Path dataPath : environment.dataWithClusterFiles()) {
assertExactPermissions(new FilePermission(dataPath.toString(), "read,readlink,write,delete"), permissions);
}
assertExactPermissions(new FilePermission(environment.sharedDataFile().toString(), "read,readlink,write,delete"), permissions);
// logs: r/w
assertExactPermissions(new FilePermission(environment.logsFile().toString(), "read,readlink,write,delete"), permissions);
// temp dir: r/w
assertExactPermissions(new FilePermission(fakeTmpDir.toString(), "read,readlink,write,delete"), permissions);
// PID file: delete only (for the shutdown hook)
assertExactPermissions(new FilePermission(environment.pidFile().toString(), "delete"), permissions);
}
public void testEnsureSymlink() throws IOException {
Path p = createTempDir();
Path exists = p.resolve("exists");
Files.createDirectory(exists);
// symlink
Path linkExists = p.resolve("linkExists");
try {
Files.createSymbolicLink(linkExists, exists);
} catch (UnsupportedOperationException | IOException e) {
assumeNoException("test requires filesystem that supports symbolic links", e);
} catch (SecurityException e) {
assumeNoException("test cannot create symbolic links with security manager enabled", e);
}
Security.ensureDirectoryExists(linkExists);
Files.createTempFile(linkExists, null, null);
}
public void testEnsureBrokenSymlink() throws IOException {
Path p = createTempDir();
// broken symlink
Path brokenLink = p.resolve("brokenLink");
try {
Files.createSymbolicLink(brokenLink, p.resolve("nonexistent"));
} catch (UnsupportedOperationException | IOException e) {
assumeNoException("test requires filesystem that supports symbolic links", e);
} catch (SecurityException e) {
assumeNoException("test cannot create symbolic links with security manager enabled", e);
}
try {
Security.ensureDirectoryExists(brokenLink);
fail("didn't get expected exception");
} catch (IOException expected) {}
}
/** When a configured dir is a symlink, test that permissions work on link target */
public void testSymlinkPermissions() throws IOException {
// see https://github.com/elastic/elasticsearch/issues/12170
assumeFalse("windows does not automatically grant permission to the target of symlinks", Constants.WINDOWS);
Path dir = createTempDir();
Path target = dir.resolve("target");
Files.createDirectory(target);
// symlink
Path link = dir.resolve("link");
try {
Files.createSymbolicLink(link, target);
} catch (UnsupportedOperationException | IOException e) {
assumeNoException("test requires filesystem that supports symbolic links", e);
} catch (SecurityException e) {
assumeNoException("test cannot create symbolic links with security manager enabled", e);
}
Permissions permissions = new Permissions();
Security.addPath(permissions, "testing", link, "read");
assertExactPermissions(new FilePermission(link.toString(), "read"), permissions);
assertExactPermissions(new FilePermission(link.resolve("foo").toString(), "read"), permissions);
assertExactPermissions(new FilePermission(target.toString(), "read"), permissions);
assertExactPermissions(new FilePermission(target.resolve("foo").toString(), "read"), permissions);
}
/**
* checks exact file permissions, meaning those and only those for that path.
*/
static void assertExactPermissions(FilePermission expected, PermissionCollection actual) {
String target = expected.getName(); // see javadocs
Set<String> permissionSet = asSet(expected.getActions().split(","));
boolean read = permissionSet.remove("read");
boolean readlink = permissionSet.remove("readlink");
boolean write = permissionSet.remove("write");
boolean delete = permissionSet.remove("delete");
boolean execute = permissionSet.remove("execute");
assertTrue("unrecognized permission: " + permissionSet, permissionSet.isEmpty());
assertEquals(read, actual.implies(new FilePermission(target, "read")));
assertEquals(readlink, actual.implies(new FilePermission(target, "readlink")));
assertEquals(write, actual.implies(new FilePermission(target, "write")));
assertEquals(delete, actual.implies(new FilePermission(target, "delete")));
assertEquals(execute, actual.implies(new FilePermission(target, "execute")));
}
/**
* checks that this path has no permissions
*/
static void assertNoPermissions(Path path, PermissionCollection actual) {
String target = path.toString();
assertFalse(actual.implies(new FilePermission(target, "read")));
assertFalse(actual.implies(new FilePermission(target, "readlink")));
assertFalse(actual.implies(new FilePermission(target, "write")));
assertFalse(actual.implies(new FilePermission(target, "delete")));
assertFalse(actual.implies(new FilePermission(target, "execute")));
}
}
|
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.glue.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/GetJobRuns" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GetJobRunsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* A list of job-run metadata objects.
* </p>
*/
private java.util.List<JobRun> jobRuns;
/**
* <p>
* A continuation token, if not all requested job runs have been returned.
* </p>
*/
private String nextToken;
/**
* <p>
* A list of job-run metadata objects.
* </p>
*
* @return A list of job-run metadata objects.
*/
public java.util.List<JobRun> getJobRuns() {
return jobRuns;
}
/**
* <p>
* A list of job-run metadata objects.
* </p>
*
* @param jobRuns
* A list of job-run metadata objects.
*/
public void setJobRuns(java.util.Collection<JobRun> jobRuns) {
if (jobRuns == null) {
this.jobRuns = null;
return;
}
this.jobRuns = new java.util.ArrayList<JobRun>(jobRuns);
}
/**
* <p>
* A list of job-run metadata objects.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setJobRuns(java.util.Collection)} or {@link #withJobRuns(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param jobRuns
* A list of job-run metadata objects.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetJobRunsResult withJobRuns(JobRun... jobRuns) {
if (this.jobRuns == null) {
setJobRuns(new java.util.ArrayList<JobRun>(jobRuns.length));
}
for (JobRun ele : jobRuns) {
this.jobRuns.add(ele);
}
return this;
}
/**
* <p>
* A list of job-run metadata objects.
* </p>
*
* @param jobRuns
* A list of job-run metadata objects.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetJobRunsResult withJobRuns(java.util.Collection<JobRun> jobRuns) {
setJobRuns(jobRuns);
return this;
}
/**
* <p>
* A continuation token, if not all requested job runs have been returned.
* </p>
*
* @param nextToken
* A continuation token, if not all requested job runs have been returned.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* A continuation token, if not all requested job runs have been returned.
* </p>
*
* @return A continuation token, if not all requested job runs have been returned.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* A continuation token, if not all requested job runs have been returned.
* </p>
*
* @param nextToken
* A continuation token, if not all requested job runs have been returned.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetJobRunsResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getJobRuns() != null)
sb.append("JobRuns: ").append(getJobRuns()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GetJobRunsResult == false)
return false;
GetJobRunsResult other = (GetJobRunsResult) obj;
if (other.getJobRuns() == null ^ this.getJobRuns() == null)
return false;
if (other.getJobRuns() != null && other.getJobRuns().equals(this.getJobRuns()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getJobRuns() == null) ? 0 : getJobRuns().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public GetJobRunsResult clone() {
try {
return (GetJobRunsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.funtest.server.tests;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import java.util.HashMap;
import org.apache.ambari.funtest.server.AmbariUserRole;
import org.apache.ambari.funtest.server.ClusterConfigParams;
import org.apache.ambari.funtest.server.ConnectionParams;
import org.apache.ambari.funtest.server.WebRequest;
import org.apache.ambari.funtest.server.WebResponse;
import org.apache.ambari.funtest.server.api.cluster.CreateClusterWebRequest;
import org.apache.ambari.funtest.server.api.cluster.CreateConfigurationWebRequest;
import org.apache.ambari.funtest.server.api.cluster.DeleteClusterWebRequest;
import org.apache.ambari.funtest.server.api.cluster.GetAllClustersWebRequest;
import org.apache.ambari.funtest.server.api.user.DeleteUserWebRequest;
import org.apache.ambari.funtest.server.utils.ClusterUtils;
import org.apache.ambari.funtest.server.utils.RestApiUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.HttpStatus;
import org.junit.Ignore;
import org.junit.Test;
import com.google.gson.JsonElement;
/**
* Tests operations with users with different levels of privileges
*/
@Ignore
public class RoleBasedAccessControlBasicTest extends ServerTestBase {
private String clusterName = "c1";
private String hostName = "host1";
private String clusterVersion = "HDP-2.2.0";
private static Log LOG = LogFactory.getLog(RoleBasedAccessControlBasicTest.class);
@Override
public void setup() throws Exception {
super.setup();
setupCluster();
}
@Override
public void teardown() throws Exception {
teardownCluster();
super.teardown();
}
/**
* Creates an anonymous user (user with no role). Attempts to get the list of clusters
*
* @throws Exception
*/
@Test
public void testGetClustersAsAnonUser() throws Exception {
JsonElement jsonResponse;
ConnectionParams adminConnectionParams = createAdminConnectionParams();
String anonUserName = "nothing";
String anonUserPwd = "nothing";
/**
* Create a new user (non-admin)
*/
ClusterUtils.createUser(adminConnectionParams, clusterName, anonUserName, anonUserPwd, AmbariUserRole.NONE);
/**
* Attempt to query all the clusters using this user's privilege. Right now we should be
* able to get the list of clusters, though this user should not be able to. But this is
* required for UI to display the clusters.
*
* todo: Fix this when UI is fixed.
*/
ConnectionParams anonUserParams = createConnectionParams(anonUserName, anonUserPwd);
jsonResponse = RestApiUtils.executeRequest(new GetAllClustersWebRequest(anonUserParams));
assertFalse(jsonResponse.isJsonNull());
/**
* Delete the user
*/
jsonResponse = RestApiUtils.executeRequest(new DeleteUserWebRequest(adminConnectionParams, anonUserName));
LOG.info(jsonResponse);
}
/**
* Creates an anonymous user and uses the user to add a cluster configuration.
*
* @throws Exception
*/
@Test
public void testAddClusterConfigAsAnonUser() throws Exception {
ConnectionParams adminConnectionParams = createAdminConnectionParams();
String anonUserName = "nothing";
String anonUserPwd = "nothing";
/**
* Create a new user (non-admin)
*/
ClusterUtils.createUser(adminConnectionParams, clusterName, anonUserName, anonUserPwd, AmbariUserRole.NONE);
/**
* Create and add a configuration to our cluster using the new user's privilege
*/
String configType = "test-hadoop-env";
String configTag = "version1";
ClusterConfigParams configParams = new ClusterConfigParams();
configParams.setClusterName(clusterName);
configParams.setConfigType(configType);
configParams.setConfigTag(configTag);
configParams.setProperties(new HashMap<String, String>() {{
put("fs.default.name", "localhost:9995");
}});
/**
* Attempting to create the configuration should fail with 403
*/
ConnectionParams anonUserParams = createConnectionParams(anonUserName, anonUserPwd);
WebRequest webRequest = new CreateConfigurationWebRequest(anonUserParams, configParams);
WebResponse webResponse = webRequest.getResponse();
assertEquals(HttpStatus.SC_FORBIDDEN, webResponse.getStatusCode());
/**
* Delete the user
*/
JsonElement jsonResponse = RestApiUtils.executeRequest(new DeleteUserWebRequest(adminConnectionParams, "nothing"));
LOG.info(jsonResponse);
}
/**
* Creates a user with cluster administrator privilege and adds a cluster configuration.
*
* @throws Exception
*/
@Test
public void testAddClusterConfigAsClusterAdmin() throws Exception {
ConnectionParams adminConnectionParams = createAdminConnectionParams();
String clusterAdminName = "clusterAdmin";
String clusterAdminPwd = "clusterAdmin";
/**
* Create a user with cluster admin role
*/
ClusterUtils.createUserClusterAdministrator(adminConnectionParams, clusterName,
clusterAdminName, clusterAdminPwd);
/**
* Create and add a configuration to our cluster using the new user's privilege
*/
String configType = "test-hadoop-env";
String configTag = "version1";
ClusterConfigParams configParams = new ClusterConfigParams();
configParams.setClusterName(clusterName);
configParams.setConfigType(configType);
configParams.setConfigTag(configTag);
configParams.setProperties(new HashMap<String, String>() {{
put("fs.default.name", "localhost:9995");
}});
/**
* This user has enough privilege to create the cluster configuration. Should succeed with 201.
*/
ConnectionParams userConnectionParams = createConnectionParams(clusterAdminName, clusterAdminPwd);
WebRequest webRequest = new CreateConfigurationWebRequest(userConnectionParams, configParams);
WebResponse webResponse = webRequest.getResponse();
assertEquals(HttpStatus.SC_CREATED, webResponse.getStatusCode());
/**
* Delete the user
*/
RestApiUtils.executeRequest(new DeleteUserWebRequest(adminConnectionParams, clusterAdminName));
}
/**
* Create a cluster with name "c1". Does not have any hosts.
*
* @throws Exception
*/
private void setupCluster() throws Exception {
JsonElement jsonResponse;
ConnectionParams params = createAdminConnectionParams();
/**
* Create a cluster as admin:admin
*/
jsonResponse = RestApiUtils.executeRequest(new CreateClusterWebRequest(params, clusterName, clusterVersion));
LOG.info(jsonResponse);
}
private void teardownCluster() throws Exception {
JsonElement jsonResponse;
ConnectionParams params = createAdminConnectionParams();
jsonResponse = RestApiUtils.executeRequest(new DeleteClusterWebRequest(params, clusterName));
LOG.info(jsonResponse);
}
/**
* Helper method to create administrator connection parameters to the server.
*
* @return
*/
private ConnectionParams createAdminConnectionParams() {
return createConnectionParams(getAdminUserName(), getAdminPassword());
}
/**
* Helper method to create connection parameters to the server based on the
* specified user credentials.
*
* @param userName
* @param password
* @return
*/
private ConnectionParams createConnectionParams(String userName, String password) {
ConnectionParams params = new ConnectionParams();
params.setServerName("localhost");
params.setServerApiPort(serverPort);
params.setServerAgentPort(serverAgentPort);
params.setUserName(userName);
params.setPassword(password);
return params;
}
}
|
|
/* ========================================================================== *
* Copyright (c) 2006, Pier Paolo Fumagalli <mailto:[email protected]> *
* All rights reserved. *
* ========================================================================== *
* *
* Redistribution and use in source and binary forms, with or without modifi- *
* cation, are permitted provided that the following conditions are met: *
* *
* - Redistributions of source code must retain the above copyright notice, *
* this list of conditions and the following disclaimer. *
* *
* - Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* *
* - Neither the name of Pier Fumagalli, nor the names of other contributors *
* may be used to endorse or promote products derived from this software *
* without specific prior written permission. *
* *
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS IS" *
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE *
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE *
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE *
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR *
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF *
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS *
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN *
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) *
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE *
* POSSIBILITY OF SUCH DAMAGE. *
* ========================================================================== */
package it.could.util.http;
import it.could.util.StreamTools;
import it.could.util.StringTools;
import it.could.util.location.Location;
import it.could.util.location.Path;
import it.could.util.location.PathElement;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.PrintStream;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import java.util.StringTokenizer;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
/**
* <p>A class implementing an extremely simple WebDAV Level 1 client based on
* the {@link HttpClient}.</p>
*
* <p>Once opened this class will represent a WebDAV collection. Users of this
* class can then from an instance of this, deal with relative parent and
* children resources.</p>
*
* @author <a href="http://could.it/">Pier Fumagalli</a>
*/
public class WebDavClient {
/** <p>The WebDAV resource asociated with this instance.</p> */
private Resource resource;
/** <p>A map of children resources of this instance.</p> */
private Map children;
/**
* <p>Create a new {@link WebDavClient} instance opening the collection
* identified by the specified {@link Location}.</p>
*
* @param location the {@link Location} of the WebDAV collection to open.
* @throws IOException if an I/O or network error occurred, or if the
* {@link Location} specified does not point to a
* WebDAV collection.
* @throws NullPointerException if the {@link Location} was <b>null</b>.
*/
public WebDavClient(Location location)
throws NullPointerException, IOException {
if (location == null) throw new NullPointerException("Null location");
this.reload(location);
}
/* ====================================================================== */
/* ACTIONS */
/* ====================================================================== */
/**
* <p>Refresh this {@link WebDavClient} instance re-connecting to the remote
* collection and re-reading its properties.</p>
*
* @return this {@link WebDavClient} instance.
*/
public WebDavClient refresh()
throws IOException {
this.reload(this.resource.location);
return this;
}
/**
* <p>Fetch the contents of the specified child resource of the collection
* represented by this {@link WebDavClient} instance.</p>
*
* @see #isCollection(String)
* @return a <b>non-null</b> {@link InputStream}.
* @throws IOException if an I/O or network error occurred, or if the
* child specified represents a collection.
* @throws NullPointerException if the child was <b>null</b>.
*/
public InputStream get(String child)
throws NullPointerException, IOException {
if (child == null) throw new NullPointerException("Null child");
if (! this.isCollection(child)) {
final Location location = this.getLocation(child);
final HttpClient client = new HttpClient(location);
client.setAcceptableStatus(200).connect("GET");
return client.getResponseStream();
}
throw new IOException("Child \"" + child + "\" is a collection");
}
/**
* <p>Delete the child resource (or collection) of the collection
* represented by this {@link WebDavClient} instance.</p>
*
* @return this {@link WebDavClient} instance.
* @throws IOException if an I/O or network error occurred, or if the
* child specified represents a collection.
* @throws NullPointerException if the child was <b>null</b>.
*/
public WebDavClient delete(String child)
throws NullPointerException, IOException {
if (child == null) throw new NullPointerException("Null child");
final HttpClient client = new HttpClient(this.getLocation(child));
client.setAcceptableStatus(204).connect("DELETE").disconnect();
return this.refresh();
}
/**
* <p>Create a new collection as a child of the collection represented
* by this {@link WebDavClient} instance.</p>
*
* <p>In comparison to {@link #put(String)} and {@link #put(String, long)}
* this method will fail if the specified child already exist.</p>
*
* @see #hasChild(String)
* @return this {@link WebDavClient} instance.
* @throws IOException if an I/O or network error occurred, or if the
* child specified already exist.
* @throws NullPointerException if the child was <b>null</b>.
*/
public WebDavClient mkcol(String child)
throws NullPointerException, IOException {
if (child == null) throw new NullPointerException("Null child");
if (this.hasChild(child))
throw new IOException("Child \"" + child + "\" already exists");
final Location location = this.resource.location.resolve(child);
final HttpClient client = new HttpClient(location);
client.setAcceptableStatus(201).connect("MKCOL").disconnect();
return this.refresh();
}
/**
* <p>Create a new (or update the contents of a) child of of the collection
* represented by this {@link WebDavClient} instance.</p>
*
* <p>This method will behave exactly like the {@link #put(String, long)}
* method, but the data written to the returned {@link OutputStream} will
* be <i>buffered in memory</i> and will be transmitted to the remote
* server only when the {@link OutputStream#close()} method is called.</p>
*
* <p>If the returned {@link OutputStream} is garbage collected before the
* {@link OutputStream#close() close()} method is called, the entire
* transaction will be aborted and no connection to the remote server will
* be established.</p>
*
* <p>Use this method in extreme cases. In normal circumstances always rely
* on the {@link #put(String, long)} method.</p>
*
* @see #put(String, long)
* @return a <b>non-null</b> {@link OutputStream} instance.
* @throws NullPointerException if the child was <b>null</b>.
*/
public OutputStream put(final String child)
throws NullPointerException {
if (child == null) throw new NullPointerException("Null child");
final WebDavClient client = this;
return new ByteArrayOutputStream() {
private boolean closed = false;
public void close()
throws IOException {
if (this.closed) return;
this.flush();
OutputStream output = client.put(child, this.buf.length);
output.write(this.buf);
output.flush();
output.close();
}
protected void finalize()
throws Throwable {
this.closed = true;
super.finalize();
}
};
}
/**
* <p>Create a new (or update the contents of a) child of of the collection
* represented by this {@link WebDavClient} instance.</p>
*
* <p>If the specified child {@link #hasChild(String) already exists} on
* the remote server, it will be {@link #delete(String) deleted} before
* writing.</p>
*
* @return a <b>non-null</b> {@link OutputStream} instance.
* @throws NullPointerException if the child was <b>null</b>.
* @throws IOException if an I/O or network error occurred, or if the
* child specified already exist.
*/
public OutputStream put(String child, long length)
throws NullPointerException, IOException {
if (child == null) throw new NullPointerException("Null child");
if (this.hasChild(child)) this.delete(child);
final Location location = this.resource.location.resolve(child);
final HttpClient client = new HttpClient(location);
client.setAcceptableStatuses(new int[] { 201, 204 });
client.connect("PUT", length);
final WebDavClient webdav = this;
return new BufferedOutputStream(client.getRequestStream()) {
boolean closed = false;
public void close()
throws IOException {
if (this.closed) return;
try {
super.close();
} finally {
this.closed = true;
webdav.refresh();
}
}
protected void finalize()
throws Throwable {
try {
this.close();
} finally {
super.finalize();
}
}
};
}
/**
* <p>Open the specified child collection of the collection represented by
* this {@link WebDavClient} as a new {@link WebDavClient} instance.</p>
*
* <p>If the specified child is "<code>.</code>" this method
* will behave exactly like {@link #refresh()} and <i>this instance</i>
* will be returned.</p>
*
* <p>If the specified child is "<code>..</code>" this method
* will behave exactly like {@link #parent()}.</p>
*
* @return a <b>non-null</b> {@link WebDavClient} instance.
* @throws NullPointerException if the child was <b>null</b>.
* @throws IOException if an I/O or network error occurred, or if the
* child specified did not exist.
*/
public WebDavClient open(String child)
throws NullPointerException, IOException {
if (child == null) throw new NullPointerException("Null child");
if (".".equals(child)) return this.refresh();
if ("..".equals(child)) return this.parent();
if (resource.collection) {
Location loc = this.getLocation().resolve(this.getLocation(child));
return new WebDavClient(loc);
}
throw new IOException("Child \"" + child + "\" is not a collection");
}
/**
* <p>Open the parent collection of the collection represented by this
* {@link WebDavClient} as a new {@link WebDavClient} instance.</p>
*
* @return a <b>non-null</b> {@link WebDavClient} instance.
* @throws IOException if an I/O or network error occurred, or if the
* child specified did not exist.
*/
public WebDavClient parent()
throws IOException {
final Location location = this.resource.location.resolve("..");
return new WebDavClient(location);
}
/* ====================================================================== */
/* ACCESSOR METHODS */
/* ====================================================================== */
/**
* <p>Return an {@link Iterator} over {@link String}s for all the children
* of the collection represented by this {@link WebDavClient} instance.</p>
*/
public Iterator iterator() {
return this.children.keySet().iterator();
}
/**
* <p>Checks if the collection represented by this {@link WebDavClient}
* contains the specified child.</p>
*/
public boolean hasChild(String child) {
return this.children.containsKey(child);
}
/**
* <p>Return the {@link Location} associated with the collection
* represented by this {@link WebDavClient}.</p>
*
* <p>The returned {@link Location} can be different from the one specified
* at construction, in case the server redirected us upon connection.</p>
*/
public Location getLocation() {
return this.resource.location;
}
/**
* <p>Return the content length (in bytes) of the collection represented
* by this {@link WebDavClient} as passed to us by the WebDAV server.</p>
*/
public long getContentLength() {
return this.resource.contentLength;
}
/**
* <p>Return the content type (mime-type) of the collection represented
* by this {@link WebDavClient} as passed to us by the WebDAV server.</p>
*/
public String getContentType() {
return this.resource.contentType;
}
/**
* <p>Return the last modified {@link Date} of the collection represented
* by this {@link WebDavClient} as passed to us by the WebDAV server.</p>
*/
public Date getLastModified() {
return this.resource.lastModified;
}
/**
* <p>Return the creation {@link Date} of the collection represented
* by this {@link WebDavClient} as passed to us by the WebDAV server.</p>
*/
public Date getCreationDate() {
return this.resource.creationDate;
}
/**
* <p>Return the {@link Location} associated with the specified child of
* the collection represented by this {@link WebDavClient}.</p>
*
* @throws IOException if the specified child does not exist.
* @throws NullPointerException if the specified child was <b>null</b>.
*/
public Location getLocation(String child)
throws IOException {
Location location = this.getResource(child).location;
return this.resource.location.resolve(location);
}
/**
* <p>Checks if the specified child of the collection represented by this
* {@link WebDavClient} instance is a collection.</p>
*/
public boolean isCollection(String child)
throws IOException {
return this.getResource(child).collection;
}
/**
* <p>Return the content length (in bytes) associated with the specified
* child of the collection represented by this {@link WebDavClient}.</p>
*
* @throws IOException if the specified child does not exist.
* @throws NullPointerException if the specified child was <b>null</b>.
*/
public long getContentLength(String child)
throws IOException {
return this.getResource(child).contentLength;
}
/**
* <p>Return the content type (mime-type) associated with the specified
* child of the collection represented by this {@link WebDavClient}.</p>
*
* @throws IOException if the specified child does not exist.
* @throws NullPointerException if the specified child was <b>null</b>.
*/
public String getContentType(String child)
throws IOException {
return this.getResource(child).contentType;
}
/**
* <p>Return the last modified {@link Date} associated with the specified
* child of the collection represented by this {@link WebDavClient}.</p>
*
* @throws IOException if the specified child does not exist.
* @throws NullPointerException if the specified child was <b>null</b>.
*/
public Date getLastModified(String child)
throws IOException {
return this.getResource(child).lastModified;
}
/**
* <p>Return the creation {@link Date} associated with the specified
* child of the collection represented by this {@link WebDavClient}.</p>
*
* @throws IOException if the specified child does not exist.
* @throws NullPointerException if the specified child was <b>null</b>.
*/
public Date getCreationDate(String child)
throws IOException {
return this.getResource(child).creationDate;
}
/* ====================================================================== */
/* INTERNAL METHODS */
/* ====================================================================== */
/**
* <p>Return the resource associated with the specified child.</p>
*
* @throws IOException if the specified child does not exist.
* @throws NullPointerException if the specified child was <b>null</b>.
*/
private Resource getResource(String child)
throws IOException {
if (child == null) throw new NullPointerException();
final Resource resource = (Resource) this.children.get(child);
if (resource == null) throw new IOException("Not found: " + child);
return resource;
}
/**
* <p>Contact the remote WebDAV server and fetch all properties.</p>
*/
private void reload(Location location)
throws IOException {
/* Do an OPTIONS over onto the location */
location = this.options(location);
/* Do a PROPFIND to figure out the properties and the children */
final Iterator iterator = this.propfind(location).iterator();
final Map children = new HashMap();
while (iterator.hasNext()) {
final Resource resource = (Resource) iterator.next();
final Path path = resource.location.getPath();
if (path.size() == 0) {
resource.location = location.resolve(resource.location);
this.resource = resource;
} else if (path.size() == 1) {
final PathElement element = (PathElement) path.get(0);
if ("..".equals(element.getName())) continue;
children.put(element.toString(), resource);
}
}
/* Check if the current resource was discovered */
if (this.resource == null)
throw new IOException("Current resource not returned in PROOPFIND");
/* Don't actually allow resources to be modified */
this.children = Collections.unmodifiableMap(children);
}
/**
* <p>Contact the remote WebDAV server and do an OPTIONS lookup.</p>
*/
private Location options(Location location)
throws IOException {
/* Create the new HttpClient instance associated with the location */
final HttpClient client = new HttpClient(location);
client.setAcceptableStatus(200).connect("OPTIONS", true).disconnect();
/* Check that the remote server returned the "Dav" header */
final List davHeader = client.getResponseHeaderValues("dav");
if (davHeader == null) {
throw new IOException("Server did not respond with a DAV header");
}
/* Check if the OPTIONS request contained the DAV header */
final Iterator iterator = davHeader.iterator();
boolean foundLevel1 = false;
while (iterator.hasNext() && (! foundLevel1)) {
String value = (String) iterator.next();
StringTokenizer tokenizer = new StringTokenizer(value, ",");
while (tokenizer.hasMoreTokens()) {
if (! "1".equals(tokenizer.nextToken().trim())) continue;
foundLevel1 = true;
break;
}
}
/* Return the (possibly redirected) location or fail miserably */
if (foundLevel1) return client.getLocation();
throw new IOException("Server doesn't support DAV Level 1");
}
/**
* <p>Contact the remote WebDAV server and do a PROPFIND lookup, returning
* a {@link List} of all scavenged resources.</p>
*/
private List propfind(Location location)
throws IOException {
/* Create the new HttpClient instance associated with the location */
final HttpClient client = new HttpClient(location);
client.addRequestHeader("Depth", "1");
client.setAcceptableStatus(207).connect("PROPFIND", true);
/* Get the XML SAX Parser and parse the output of the PROPFIND */
try {
final SAXParserFactory factory = SAXParserFactory.newInstance();
factory.setValidating(false);
factory.setNamespaceAware(true);
final SAXParser parser = factory.newSAXParser();
final String systemId = location.toString();
final InputSource source = new InputSource(systemId);
final Handler handler = new Handler(location);
source.setByteStream(client.getResponseStream());
parser.parse(source, handler);
return handler.list;
} catch (ParserConfigurationException exception) {
Exception throwable = new IOException("Error creating XML parser");
throw (IOException) throwable.initCause(exception);
} catch (SAXException exception) {
Exception throwable = new IOException("Error creating XML parser");
throw (IOException) throwable.initCause(exception);
} finally {
client.disconnect();
}
}
/* ====================================================================== */
/* INTERNAL CLASSES */
/* ====================================================================== */
/**
* <p>An internal XML {@link DefaultHandler} used to parse out the various
* details of a PROPFIND response.</p>
*/
private static final class Handler extends DefaultHandler {
/* ================================================================== */
/* PSEUDO-XPATH LOCATIONS FOR QUICK-AND-DIRTY LOCATION LOOKUP */
/* ================================================================== */
private static final String RESPONSE_PATH = "/multistatus/response";
private static final String HREF_PATH = "/multistatus/response/href";
private static final String COLLECTION_PATH =
"/multistatus/response/propstat/prop/resourcetype/collection";
private static final String GETCONTENTTYPE_PATH =
"/multistatus/response/propstat/prop/getcontenttype";
private static final String GETLASTMODIFIED_PATH =
"/multistatus/response/propstat/prop/getlastmodified";
private static final String GETCONTENTLENGTH_PATH =
"/multistatus/response/propstat/prop/getcontentlength";
private static final String CREATIONDATE_PATH =
"/multistatus/response/propstat/prop/creationdate";
/** <p>The {@link Location} for resolving all other links.</p> */
private final Location base;
/** <p>The {@link List} of all scavenged resources.</p> */
private final List list = new ArrayList();
/** <p>The resource currently being processed.</p> */
private Resource rsrc = null;
/** <p>A {@link StringBuffer} holding character data.</p> */
private StringBuffer buff = null;
/** <p>A {@link Stack} for quick-and-dirty pseudo XPath lookups.</p> */
private Stack stack = new Stack();
/**
* <p>Create a new instance specifying the base {@link Location}.</p>
*/
private Handler(Location location) {
this.base = location;
}
/**
* <p>Push an element name in the stack for pseudo-XPath lookups.</p>
*
* @return a {@link String} like <code>/element/element/element</code>.
*/
private String pushPath(String path) {
this.stack.push(path.toLowerCase());
final StringBuffer buffer = new StringBuffer();
for (int x = 0; x < this.stack.size(); x ++)
buffer.append('/').append(this.stack.get(x));
return buffer.toString();
}
/**
* <p>Pop the last element name from the pseudo-XPath lookup stack.</p>
*
* @return a {@link String} like <code>/element/element/element</code>.
*/
private String popPath(String path)
throws SAXException {
final StringBuffer buffer = new StringBuffer();
final String last = (String) this.stack.pop();
if (path.toLowerCase().equals(last)) {
for (int x = 0; x < this.stack.size(); x ++)
buffer.append('/').append(this.stack.get(x));
return buffer.append('/').append(last).toString();
}
throw new SAXException("Tag <" + path + "/> unbalanced at path \""
+ pushPath(last) + "\"");
}
/**
* <p>Handle the start-of-element SAX event.</p>
*/
public void startElement(String uri, String l, String q, Attributes a)
throws SAXException {
if (! "DAV:".equals(uri.toUpperCase())) return;
final String path = this.pushPath(l);
if (RESPONSE_PATH.equals(path)) {
this.rsrc = new Resource();
} else if (COLLECTION_PATH.equals(path)) {
if (this.rsrc != null) this.rsrc.collection = true;
} else if (GETCONTENTTYPE_PATH.equals(path) ||
GETLASTMODIFIED_PATH.equals(path) ||
GETCONTENTLENGTH_PATH.equals(path) ||
CREATIONDATE_PATH.equals(path) ||
HREF_PATH.equals(path)) {
this.buff = new StringBuffer();
}
}
/**
* <p>Handle the end-of-element SAX event.</p>
*/
public void endElement(String uri, String l, String q)
throws SAXException {
if (! "DAV:".equals(uri.toUpperCase())) return;
final String path = this.popPath(l);
final String data = this.resetBuffer();
if (RESPONSE_PATH.equals(path)) {
if (this.rsrc != null) {
if (this.rsrc.location != null) {
if (this.rsrc.location.isAbsolute()) {
final String z = this.rsrc.location.toString();
throw new SAXException("Unresolved location " + z);
} else {
this.list.add(this.rsrc);
}
} else {
throw new SAXException("Null location for resource");
}
}
} else if (HREF_PATH.equals(path)) {
if (this.rsrc != null) {
final Location resolved = this.base.resolve(data);
this.rsrc.location = this.base.relativize(resolved);
if (! this.rsrc.location.isRelative()) {
throw new SAXException("Unable to relativize location "
+ this.rsrc.location);
}
}
} else if (CREATIONDATE_PATH.equals(path)) {
if (this.rsrc != null)
this.rsrc.creationDate = StringTools.parseIsoDate(data);
} else if (GETCONTENTTYPE_PATH.equals(path)) {
if (this.rsrc != null) this.rsrc.contentType = data;
} else if (GETLASTMODIFIED_PATH.equals(path)) {
if (this.rsrc != null)
this.rsrc.lastModified = StringTools.parseHttpDate(data);
} else if (GETCONTENTLENGTH_PATH.equals(path)) {
if (this.rsrc != null) {
Long length = StringTools.parseNumber(data);
if (length != null) {
this.rsrc.contentLength = length.longValue();
}
}
}
}
/**
* <p>Handle SAX characters notification.</p>
*/
public void characters(char buffer[], int offset, int length) {
if (this.buff != null) this.buff.append(buffer, offset, length);
}
/**
* <p>Reset the current characters buffer and return it as a
* {@link String}.</p>
*/
private String resetBuffer() {
if (this.buff == null) return null;
if (this.buff.length() == 0) {
this.buff = null;
return null;
}
final String value = this.buff.toString();
this.buff = null;
return value;
}
}
/**
* <p>A simple class holding the core resource properties.</p>
*/
private static class Resource {
private Location location = null;
private boolean collection = false;
private long contentLength = -1;
private String contentType = null;
private Date lastModified = null;
private Date creationDate = null;
}
/* ====================================================================== */
/* COMMAND LINE CLIENT */
/* ====================================================================== */
/**
* <p>A command-line interface to a WebDAV repository.</p>
*
* <p>When invoked from the command line, this class requires one only
* argument, the URL location of the WebDAV repository to connect to.</p>
*
* <p>After connection this method will interact with the user using an
* extremely simple console-based interface.</p>
*/
public static void main(String args[])
throws IOException {
final InputStreamReader r = new InputStreamReader(System.in);
final BufferedReader in = new BufferedReader(r);
WebDavClient client = new WebDavClient(Location.parse(args[0]));
while (true) try {
System.out.print("[" + client.getLocation() + "] -> ");
args = parse(in.readLine());
if (args == null) break;
if (args[0].equals("list")) {
if (args[1] == null) list(client, System.out);
else list(client.open(args[1]), System.out);
} else if (args[0].equals("refresh")) {
client = client.refresh();
} else if (args[0].equals("get")) {
if (args[1] != null) {
final InputStream input = client.get(args[1]);
final File file = new File(args[2]).getCanonicalFile();
final OutputStream output = new FileOutputStream(file);
final long bytes = StreamTools.copy(input, output);
System.out.println("Fetched child \"" + args[1] +
"\" to file \"" + file + "\" (" +
bytes + " bytes)");
}
else System.out.print("Can't \"get\" null");
} else if (args[0].equals("put")) {
if (args[1] != null) {
final File file = new File(args[1]).getCanonicalFile();
final InputStream input = new FileInputStream(file);
final OutputStream output = client.put(args[2], file.length());
final long bytes = StreamTools.copy(input, output);
System.out.println("Uploaded file \"" + file +
"\" to child \"" + args[2] + "\" (" +
bytes + " bytes)");
}
else System.out.print("Can't \"put\" null");
} else if (args[0].equals("mkcol")) {
if (args[1] != null) {
client.mkcol(args[1]);
System.out.println("Created \"" + args[1] + "\"");
}
else System.out.print("Can't \"mkcol\" null");
} else if (args[0].equals("delete")) {
if (args[1] != null) {
client.delete(args[1]);
System.out.println("Deleted \"" + args[1] + "\"");
}
else System.out.print("Can't \"delete\" null");
} else if (args[0].equals("cd")) {
if (args[1] != null) client = client.open(args[1]);
else System.out.print("Can't \"cd\" to null");
} else if (args[0].equals("quit")) {
break;
} else {
System.out.print("Invalid command \"" + args[0] + "\". ");
System.out.println("Valid commands are:");
System.out.println(" - \"list\" list the children child");
System.out.println(" - \"get\" fetch the specified child");
System.out.println(" - \"put\" put the specified child");
System.out.println(" - \"mkcol\" create a collection");
System.out.println(" - \"delete\" delete a child");
System.out.println(" - \"put\" put the specified resource");
System.out.println(" - \"cd\" change the location");
System.out.println(" - \"refresh\" refresh this location");
System.out.println(" - \"quit\" quit this application");
}
} catch (Exception exception) {
exception.printStackTrace(System.err);
}
System.err.println();
}
/**
* <p>Parse a line entered by the user returning a three-tokens argument
* list (command, argument 1, argument 2)</p>
*/
private static String[] parse(String line) {
if (line == null) return null;
final String array[] = new String[3];
final StringTokenizer tokenizer = new StringTokenizer(line);
int offset = 0;
while (tokenizer.hasMoreTokens() && (offset < 3))
array[offset ++] = tokenizer.nextToken();
if (array[0] == null) return null;
if (array[2] == null) array[2] = array[1];
return array;
}
/**
* <p>Pseudo-nicely display a list of the children of a collection</p>
*/
private static void list(WebDavClient client, PrintStream out)
throws IOException {
out.print("C | ");
out.print("CONTENT TYPE | ");
out.print("CREATED | ");
out.print("MODIFIED | ");
out.print("SIZE | ");
out.println("NAME ");
for (Iterator iterator = client.iterator(); iterator.hasNext() ; ) {
final StringBuffer buffer = new StringBuffer();
String child = (String) iterator.next();
if (client.isCollection(child)) buffer.append("* | ");
else buffer.append(" | ");
format(buffer, client.getContentType(child), 15).append(" | ");
format(buffer, client.getCreationDate(child), 19).append(" | ");
format(buffer, client.getLastModified(child), 19).append(" | ");
format(buffer, client.getContentLength(child), 10).append(" | ");
out.println(buffer.append(child));
}
}
/** <p>Format a number aligning it to the right of a string.</p> */
private static StringBuffer format(StringBuffer buf, long num, int len) {
final String data;
if (num < 0) data = "";
else data = Long.toString(num);
final int spaces = len - data.length();
for (int x = 0; x < spaces; x++) buf.append(' ');
buf.append(data);
return buf;
}
/** <p>Format a string into an exact number of characters.</p> */
private static StringBuffer format(StringBuffer buf, Object obj, int len) {
final String string;
if (obj == null) {
string = ("[null]");
} else if (obj instanceof Date) {
SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
string = f.format((Date) obj);
} else {
string = obj.toString();
}
final StringBuffer buffer = new StringBuffer(string);
for (int x = string.length(); x < len; x ++) buffer.append(' ');
return buf.append(buffer.substring(0, len));
}
}
|
|
/* Copyright (c) 2001-2014, The HSQL Development Group
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the HSQL Development Group nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG,
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hsqldb.test;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Locale;
import org.hsqldb.lib.HashSet;
import org.hsqldb.lib.Iterator;
import org.hsqldb.lib.Set;
import org.hsqldb.types.Collation;
/**
* Test HSQLDBs collation capabilities
* @author [email protected]
*/
public class TestCollation extends TestBase {
Statement statement;
Connection connection;
Iterator collIterator;
Iterator localeIterator;
/** Creates a new instance of TestCollation */
public TestCollation(String name) {
super(name, "jdbc:hsqldb:file:test", false, false);
super.isNetwork = false;
}
protected void setUp() throws Exception {
super.setUp();
connection = super.newConnection();
statement = connection.createStatement();
collIterator = Collation.getCollationsIterator();
localeIterator = Collation.getLocalesIterator();
}
protected void tearDown() {
try {
statement = connection.createStatement();
statement.execute("SHUTDOWN");
} catch (Exception e) {}
super.tearDown();
}
/**
* checks whether expected locales are present and selectable
*/
public void testVerifyAvailability() {
// let's check whether unknown collation identifiers are rejected
try {
statement.execute(
getSetCollationStmt(
"ThisIsDefinitlyNoValidCollationIdentifier"));
fail("database did not reject invalid collation name");
} catch (SQLException e) {}
// let's check whether the DB accepts all known collations
int count = 0;
while (collIterator.hasNext()) {
String collationName = (String) collIterator.next();
try {
statement.execute(getSetCollationStmt(collationName));
} catch (SQLException e) {
fail("could not set collation '" + collationName
+ "'\n exception message: " + e.getMessage());
}
++count;
}
System.out.println("checked " + count
+ " collations for availability.");
// even if the above worked, we cannot be sure that all locales are really supported.
// The fact that SET DATABASE COLLATION succeeeded only means that a Collator could
// be instantiated with a Locale matching the given collation name. But what if
// Locale.Instance(...) lied, and returned a fallback Locale instance?
//
// Hmm, looking at the documentation of Locale.getAvailableLocales, I'm not sure
// whether it is really feasible. The doc states "returns a list of all installed Locales".
// The "installed" puzzles me - maybe this is really different per installation, and not only
// per JDK version?
Locale[] availableLocales = Locale.getAvailableLocales();
Set existenceCheck = new HashSet();
for (int i = 0; i < availableLocales.length; ++i) {
String availaleName = availableLocales[i].getLanguage();
if (availableLocales[i].getCountry().length() > 0) {
availaleName += "-" + availableLocales[i].getCountry();
}
existenceCheck.add(availaleName);
}
String notInstalled = "";
int expected = 0,
failed = 0;
while (localeIterator.hasNext()) {
String localeName = (String) localeIterator.next();
++expected;
if (!existenceCheck.contains(localeName)) {
if (notInstalled.length() > 0) {
notInstalled += "; ";
}
notInstalled += localeName;
++failed;
}
}
if (notInstalled.length() > 0) {
fail("the following locales are not installed:\n " + notInstalled
+ "\n (" + failed + " out of " + expected + ")");
}
}
/**
* checks whether sorting via a given collation works as expected
*/
public void testVerifyCollation() {
String failedCollations = "";
String failMessage = "";
while (collIterator.hasNext()) {
String collationName = (String) collIterator.next();
String message = checkSorting(collationName);
if (message.length() > 0) {
if (failedCollations.length() > 0) {
failedCollations += ", ";
}
failedCollations += collationName;
failMessage += message;
}
}
if (failedCollations.length() > 0) {
fail("test failed for following collations:\n" + failedCollations
+ "\n" + failMessage);
}
}
/**
* returns an SQL statement to set the database collation
*/
protected final String getSetCollationStmt(String collationName) {
final String setCollationStmtPre = "SET DATABASE COLLATION \"";
final String setCollationStmtPost = "\"";
return setCollationStmtPre + collationName + setCollationStmtPost;
}
/**
* checks sorting a table with according to a given collation
*/
protected String checkSorting(String collationName) {
String stmt1 = "DROP TABLE WORDLIST IF EXISTS;";
String stmt2 =
"CREATE TEXT TABLE WORDLIST ( ID INTEGER, WORD VARCHAR(50) );";
String stmt3 = "SET TABLE WORDLIST SOURCE \"" + collationName
+ ".csv;encoding=UTF-8\"";
String selectStmt = "SELECT ID, WORD FROM WORDLIST ORDER BY WORD";
String returnMessage = "";
try {
// set database collation
statement.execute(getSetCollationStmt(collationName));
statement.execute(stmt1);
statement.execute(stmt2);
statement.execute(stmt3);
ResultSet results = statement.executeQuery(selectStmt);
while (results.next()) {
int expectedPosition = results.getInt(1);
int foundPosition = results.getRow();
if (expectedPosition != foundPosition) {
String word = results.getString(2);
return "testing collation '" + collationName
+ "' failed\n" + " word : " + word
+ "\n" + " expected position : "
+ expectedPosition + "\n"
+ " found position : " + foundPosition + "\n";
}
}
} catch (SQLException e) {
return "testing collation '" + collationName
+ "' failed\n exception message: " + e.getMessage() + "\n";
}
return "";
}
public static void main(String[] argv) {
runWithResult(TestCollation.class, "testVerifyAvailability");
runWithResult(TestCollation.class, "testVerifyCollation");
}
}
|
|
package com.example.mac.miniprojet;
import android.support.v7.app.AppCompatActivity;
import android.app.Activity;
import android.support.v7.app.ActionBar;
import android.support.v4.app.Fragment;
import android.support.v4.app.ActionBarDrawerToggle;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.content.SharedPreferences;
import android.content.res.Configuration;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import android.widget.Toast;
import com.example.mac.miniprojet.R;
/**
* Fragment used for managing interactions for and presentation of a navigation drawer.
* See the <a href="https://developer.android.com/design/patterns/navigation-drawer.html#Interaction">
* design guidelines</a> for a complete explanation of the behaviors implemented here.
*/
public class NavigationDrawerFragment extends Fragment {
/**
* Remember the position of the selected item.
*/
private static final String STATE_SELECTED_POSITION = "selected_navigation_drawer_position";
/**
* Per the design guidelines, you should show the drawer on launch until the user manually
* expands it. This shared preference tracks this.
*/
private static final String PREF_USER_LEARNED_DRAWER = "navigation_drawer_learned";
/**
* A pointer to the current callbacks instance (the Activity).
*/
private NavigationDrawerCallbacks mCallbacks;
/**
* Helper component that ties the action bar to the navigation drawer.
*/
private ActionBarDrawerToggle mDrawerToggle;
private DrawerLayout mDrawerLayout;
private ListView mDrawerListView;
private View mFragmentContainerView;
private int mCurrentSelectedPosition = 0;
private boolean mFromSavedInstanceState;
private boolean mUserLearnedDrawer;
public NavigationDrawerFragment() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Read in the flag indicating whether or not the user has demonstrated awareness of the
// drawer. See PREF_USER_LEARNED_DRAWER for details.
SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(getActivity());
mUserLearnedDrawer = sp.getBoolean(PREF_USER_LEARNED_DRAWER, false);
if (savedInstanceState != null) {
mCurrentSelectedPosition = savedInstanceState.getInt(STATE_SELECTED_POSITION);
mFromSavedInstanceState = true;
}
// Select either the default item (0) or the last selected item.
selectItem(mCurrentSelectedPosition);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
// Indicate that this fragment would like to influence the set of actions in the action bar.
setHasOptionsMenu(true);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
mDrawerListView = (ListView) inflater.inflate(
R.layout.fragment_navigation_drawer, container, false);
mDrawerListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
selectItem(position);
}
});
mDrawerListView.setAdapter(new ArrayAdapter<String>(
getActionBar().getThemedContext(),
android.R.layout.simple_list_item_activated_1,
android.R.id.text1,
new String[]{
"Section 1",
"Section 1",
"Section 1",
"Section 1",
}));
mDrawerListView.setItemChecked(mCurrentSelectedPosition, true);
return mDrawerListView;
}
public boolean isDrawerOpen() {
return mDrawerLayout != null && mDrawerLayout.isDrawerOpen(mFragmentContainerView);
}
/**
* Users of this fragment must call this method to set up the navigation drawer interactions.
*
* @param fragmentId The android:id of this fragment in its activity's layout.
* @param drawerLayout The DrawerLayout containing this fragment's UI.
*/
public void setUp(int fragmentId, DrawerLayout drawerLayout) {
mFragmentContainerView = getActivity().findViewById(fragmentId);
mDrawerLayout = drawerLayout;
// set a custom shadow that overlays the main content when the drawer opens
mDrawerLayout.setDrawerShadow(R.drawable.drawer_shadow, GravityCompat.START);
// set up the drawer's list view with items and click listener
ActionBar actionBar = getActionBar();
actionBar.setDisplayHomeAsUpEnabled(true);
actionBar.setHomeButtonEnabled(true);
// ActionBarDrawerToggle ties together the the proper interactions
// between the navigation drawer and the action bar app icon.
mDrawerToggle = new ActionBarDrawerToggle(
getActivity(), /* host Activity */
mDrawerLayout, /* DrawerLayout object */
R.drawable.ic_drawer, /* nav drawer image to replace 'Up' caret */
R.string.navigation_drawer_open, /* "open drawer" description for accessibility */
R.string.navigation_drawer_close /* "close drawer" description for accessibility */
) {
@Override
public void onDrawerClosed(View drawerView) {
super.onDrawerClosed(drawerView);
if (!isAdded()) {
return;
}
getActivity().supportInvalidateOptionsMenu(); // calls onPrepareOptionsMenu()
}
@Override
public void onDrawerOpened(View drawerView) {
super.onDrawerOpened(drawerView);
if (!isAdded()) {
return;
}
if (!mUserLearnedDrawer) {
// The user manually opened the drawer; store this flag to prevent auto-showing
// the navigation drawer automatically in the future.
mUserLearnedDrawer = true;
SharedPreferences sp = PreferenceManager
.getDefaultSharedPreferences(getActivity());
sp.edit().putBoolean(PREF_USER_LEARNED_DRAWER, true).apply();
}
getActivity().supportInvalidateOptionsMenu(); // calls onPrepareOptionsMenu()
}
};
// If the user hasn't 'learned' about the drawer, open it to introduce them to the drawer,
// per the navigation drawer design guidelines.
if (!mUserLearnedDrawer && !mFromSavedInstanceState) {
mDrawerLayout.openDrawer(mFragmentContainerView);
}
// Defer code dependent on restoration of previous instance state.
mDrawerLayout.post(new Runnable() {
@Override
public void run() {
mDrawerToggle.syncState();
}
});
mDrawerLayout.setDrawerListener(mDrawerToggle);
}
private void selectItem(int position) {
mCurrentSelectedPosition = position;
if (mDrawerListView != null) {
mDrawerListView.setItemChecked(position, true);
}
if (mDrawerLayout != null) {
mDrawerLayout.closeDrawer(mFragmentContainerView);
}
if (mCallbacks != null) {
mCallbacks.onNavigationDrawerItemSelected(position);
}
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
try {
mCallbacks = (NavigationDrawerCallbacks) activity;
} catch (ClassCastException e) {
throw new ClassCastException("Activity must implement NavigationDrawerCallbacks.");
}
}
@Override
public void onDetach() {
super.onDetach();
mCallbacks = null;
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putInt(STATE_SELECTED_POSITION, mCurrentSelectedPosition);
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
// Forward the new configuration the drawer toggle component.
mDrawerToggle.onConfigurationChanged(newConfig);
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
// If the drawer is open, show the global app actions in the action bar. See also
// showGlobalContextActionBar, which controls the top-left area of the action bar.
if (mDrawerLayout != null && isDrawerOpen()) {
inflater.inflate(R.menu.global, menu);
showGlobalContextActionBar();
}
super.onCreateOptionsMenu(menu, inflater);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (mDrawerToggle.onOptionsItemSelected(item)) {
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* Per the navigation drawer design guidelines, updates the action bar to show the global app
* 'context', rather than just what's in the current screen.
*/
private void showGlobalContextActionBar() {
ActionBar actionBar = getActionBar();
actionBar.setDisplayShowTitleEnabled(true);
actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD);
actionBar.setTitle(R.string.app_name);
}
private ActionBar getActionBar() {
return ((AppCompatActivity) getActivity()).getSupportActionBar();
}
/**
* Callbacks interface that all activities using this fragment must implement.
*/
public static interface NavigationDrawerCallbacks {
/**
* Called when an item in the navigation drawer is selected.
*/
void onNavigationDrawerItemSelected(int position);
}
}
|
|
package edu.asu.plp.compile.compiler;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import edu.asu.plp.Token;
import edu.asu.plp.compile.lex.LexException;
import edu.asu.plp.compile.lex.Lexer;
import edu.asu.plp.compile.parser.ParseException;
import edu.asu.plp.compile.parser.constructs.ClassConstruct;
import edu.asu.plp.compile.parser.constructs.MethodConstruct;
import edu.asu.plp.compile.parser.constructs.Variable;
import edu.asu.plp.compile.parser.support.TreeSupport;
import edu.asu.plp.compile.parser.tree.DeclarationNode;
import edu.asu.plp.compile.parser.tree.NodeType;
import edu.asu.plp.compile.parser.tree.ParseNode;
import edu.asu.plp.compile.parser.tree.SequenceNode;
public class PlpCompiler
{
// TODO: Change to a list of classes. Temporary fix for presentation to
// group
private ClassConstruct mainClass;
private Map<String, Integer> typeSize; // Type size in bits
private Map<Integer, String> registers;
private Map<String, String> commands;
private final String[] operators = { "=", "+=", "+", "-", "*", "<<", ">>",
"|" };
private final String newLine = "\n";
private final String tab = "\t";
private int spaceRequired;
int registerCap = 17;
int registerFloor = 4;
public PlpCompiler(ClassConstruct classConstruct)
{
mainClass = classConstruct;
initializeTypeSizesAndRegisters();
initializeStaticVariableSpace();
}
private void initializeStaticVariableSpace()
{
spaceRequired = 0;
List<Variable> globals = mainClass.getGlobalVariables();
for (Variable global : globals)
{
spaceRequired += typeSize.get(global.getType());
}
}
public List<String> compile()
{
LinkedList<String> compiledPlp = new LinkedList<>();
int starting = (int) Integer.parseInt("10000000", 16) + spaceRequired;
compiledPlp
.add(".org " + ("0x" + Long.toHexString(starting)) + newLine);
if (mainClass.getMainMethod() != null)
{
compiledPlp.add(mainClass.getMainMethod().getSignature()
.getMethodName()
+ ":");
convertMethod(mainClass.getMainMethod(), compiledPlp);
}
compiledPlp.add("");
for (MethodConstruct method : mainClass.getClassMethods())
{
compiledPlp.add(method.getSignature().getMethodName() + ":");
// convertMethod(method, compiledPlp);
compiledPlp.add("");
}
for (MethodConstruct method : mainClass.getStaticMethods())
{
compiledPlp.add(method.getSignature().getMethodName() + ":");
// convertMethod(method, compiledPlp);
compiledPlp.add("");
}
return compiledPlp;
}
private void convertMethod(MethodConstruct mainMethod,
LinkedList<String> compiledPlp)
{
HashMap<String, String> registerMap = new HashMap<>();
int registerNumber = 4;
System.out.println("\n\t\t\t STATEMENTS\n");
for (ParseNode node : mainMethod.getBody().getChildren())
{
registerNumber = convertStatement((SequenceNode) node,
registerNumber, registerMap, compiledPlp);
}
}
private int convertStatement(SequenceNode node, int registerNumber,
HashMap<String, String> registerMap, LinkedList<String> compiledPlp)
{
int currentNumber = registerNumber;
List<String[]> executables = new LinkedList<>();
for (ParseNode childNode : node.getChildren())
{
if (childNode.getType() == NodeType.DECLARATION)
{
if (!registerMap.containsKey(childNode.getChildren().get(1)
.getValue()))
registerMap.put(childNode.getChildren().get(1).getValue(),
registers.get(currentNumber));
currentNumber = Support.incrementBetween(registerFloor,
registerCap, currentNumber);
}
else if (childNode.getType() == NodeType.OPERATOR)
{
convertOperator(childNode, executables);
}
else if (childNode.getType() == NodeType.SEQUENCE)
{
convertTree(childNode, executables);
}
}
System.out.println("\n<--------------Executables------------->");
for (String[] strings : executables)
{
for (String string : strings)
{
System.out.print(string + " ");
}
System.out.println();
}
for (int index = 0; index < executables.size(); index++)
{
String[] components = executables.get(index);
String left = components[0];
String operator = components[1];
String right = components[2];
String command = null;
String globalLeft = "";
String globalRight = "";
boolean isLeftEmpty = false;
boolean isRightEmpty = false;
boolean isLeftOperator = false;
boolean isRightOperator = false;
boolean isLeftLiteral = false;
boolean isRightLiteral = false;
boolean isLeftGlobal = false;
boolean isRightGlobal = false;
// $t0 is left value
// $t1 is right value
// $t2 is assignedValue
// $t3 is misc calculations
if (left.equals(""))
isLeftEmpty = true;
if (right.equals(""))
isRightEmpty = true;
if (!isLeftEmpty && Support.arrayContains(operators, left))
isLeftOperator = true;
else if (!isRightEmpty && Support.arrayContains(operators, right))
isRightOperator = true;
if (!isLeftEmpty && !isLeftOperator)
{
isLeftLiteral = Support.isNumber(left);
}
if (!isRightEmpty && !isRightOperator)
{
isRightLiteral = Support.isNumber(right);
}
if (!isLeftLiteral && !isLeftOperator)
{
if (registerMap.get(left) == null)
{
isLeftGlobal = true;
List<Variable> globals = mainClass.getGlobalVariables();
}
}
if (!isRightLiteral && !isRightOperator)
{
if (registerMap.get(right) == null)
isRightGlobal = true;
}
// Deal with equals
if (!isLeftOperator && !isRightOperator && !isLeftEmpty
&& !isRightEmpty)
{
if (!isLeftLiteral && !isRightLiteral)
{
command = commands.get(operator);
if (operator.equals("|"))
{
boolean isLeftBoolean = false;
boolean isRightBoolean = false;
if (left.equals("false"))
{
compiledPlp.add(tab + "li $t0, 0");
isLeftBoolean = true;
}
else if (left.equals("true"))
{
isLeftBoolean = true;
compiledPlp.add(tab + "li $t0, 1");
}
if (right.equals("false"))
{
isRightBoolean = true;
compiledPlp.add(tab + "li $t1, 0");
}
else if (right.equals("true"))
{
isRightBoolean = true;
compiledPlp.add(tab + "li $t1, 1");
}
compiledPlp.add(tab
+ command
+ " "
+ "$t2, "
+ ((isLeftBoolean) ? "$t0" : registerMap
.get(left))
+ ", "
+ ((isRightBoolean) ? "$t1" : registerMap
.get(right)));
}
else
compiledPlp.add(tab
+ command
+ " "
+ "$t2"
+ ", "
+ registerMap.get(left)
+ ", "
+ ((!isRightGlobal) ? registerMap.get(right)
: right));
}
else if (isLeftLiteral && !isRightLiteral)
{
if (operator.equals("+"))
command = commands.get(operator + "L");
else
command = commands.get(operator);
compiledPlp.add(tab + command + " " + "$t2, "
+ registerMap.get(right) + ", " + left);
}
else if (isRightLiteral && !isLeftLiteral)
{
if (operator.equals("+"))
command = commands.get(operator + "L");
else
command = commands.get(operator);
if (command == null)
{
if (operator.equals("="))
compiledPlp.add(tab + "li" + " "
+ registerMap.get(left) + ", " + right);
else if (operator.equals("+="))
compiledPlp.add(tab + "addiu" + " "
+ registerMap.get(left) + ", "
+ registerMap.get(left) + ", " + right);
else if (operator.equals("-="))
compiledPlp.add(tab + "addiu" + " "
+ registerMap.get(left) + ", "
+ registerMap.get(left) + ", " + (right));
else if (operator.equals("<<"))
compiledPlp.add(tab + "sll $t2, "
+ registerMap.get(left) + ", " + right);
else if (operator.equals(">>"))
compiledPlp.add("");
}
else
compiledPlp.add(tab + command + " $t2, "
+ registerMap.get(left) + ", " + right);
}
else if (isLeftLiteral && isRightLiteral)
{
if (operator.equals("+"))
command = commands.get(operator + "L");
else
command = commands.get(operator);
compiledPlp.add(tab + command + " $t2, " + left + ", "
+ right);
}
}
else if (isLeftOperator)
{
if (isRightLiteral && operator.equals("+"))
command = commands.get(operator + "L");
else
command = commands.get(operator);
if (isRightLiteral)
compiledPlp.add(tab + command + " " + "$t2, $t2, " + right);
else
compiledPlp.add(tab + command + " " + "$t2, $t2, "
+ registerMap.get(right));
}
else if (isRightOperator)
{
command = commands.get(operator);
if (command == null)
{
if (operator.equals("="))
compiledPlp.add(tab + "li" + " "
+ registerMap.get(left) + ", " + "$t2");
}
else
{
if (isLeftLiteral)
{
}
else
{
// compiledPlp.add(tab);
}
}
}
else if (isLeftEmpty || isRightEmpty)
{
if ((isLeftLiteral || isRightLiteral) && operator.equals("+"))
command = commands.get(operator + "L");
else
command = commands.get(operator);
if (isLeftLiteral)
compiledPlp.add(tab + command + " " + "$t2, $t2, " + left);
else if (isRightLiteral)
compiledPlp.add(tab + command + " " + "$t2, $t2, " + right);
else if (isLeftEmpty)
compiledPlp.add(tab + command + " " + "$t2, $t2, "
+ registerMap.get(right));
else if (isRightEmpty)
compiledPlp.add(tab + command + " " + "$t2, $t2, "
+ registerMap.get(left));
}
}
return currentNumber;
}
private void convertTree(ParseNode childNode, List<String[]> executables)
{
for (ParseNode nodeIt : childNode.getChildren())
{
if (nodeIt.getType() == NodeType.OPERATOR)
{
convertOperator(nodeIt, executables);
}
else if (nodeIt.getType() == NodeType.SEQUENCE)
{
convertTree((SequenceNode) nodeIt, executables);
}
else if (nodeIt.getType() == NodeType.VALUE)
{
// System.out.println("Value Node");
// System.out.println(nodeIt.getValue());
}
}
}
private void convertOperator(ParseNode childNode, List<String[]> executables)
{
// System.out.println("OPERATOR: " + childNode.getValue());
ParseNode leftChild = childNode.getChildren().get(0);
ParseNode rightChild = childNode.getChildren().get(1);
if (leftChild.getType() == NodeType.VALUE)
{
// System.out.println(childNode.getValue() + " ValLeft: " +
// leftChild.getValue());
}
else if (leftChild.getType() == NodeType.OPERATOR)
{
// System.out.print(childNode.getValue() + " OperLeft-> ");
convertOperator(leftChild, executables);
}
else if (leftChild.getType() == NodeType.SEQUENCE)
{
// System.out.print(childNode.getValue() + " SeqLeft-> ");
convertTree((SequenceNode) leftChild, executables);
}
if (rightChild.getType() == NodeType.VALUE)
{
// System.out.println(childNode.getValue() + " ValRight: " +
// rightChild.getValue());
}
else if (rightChild.getType() == NodeType.OPERATOR)
{
// System.out.print(childNode.getValue() + " OperRight-> ");
convertOperator(rightChild, executables);
}
else if (rightChild.getType() == NodeType.SEQUENCE)
{
// System.out.print(childNode.getValue() + " SeqRight-> ");
convertTree((SequenceNode) rightChild, executables);
}
String[] strings = new String[3];
strings[0] = leftChild.getValue();
strings[1] = childNode.getValue();
strings[2] = rightChild.getValue();
executables.add(strings);
}
public static void main(String[] args) throws FileNotFoundException,
LexException, ParseException
{
File inputFile = new File("sampleData/BasicArithmatic.java");
File outputFile = new File(
"sampleData/output/BasicArithmatic.java.lexed");
File dumpFile = new File("sampleData/BasicArithmatic.java.PREPROCESS");
File plpFile = new File("sampleData/main.asm");
Lexer lexer = new Lexer(inputFile);
lexer.dumpPreprocessData(dumpFile);
List<Token> tokens = lexer.lex();
// TODO replace with parser
ParseClass parseClass = new ParseClass(tokens);
ClassConstruct classConstruct = parseClass.parseClassConstruct(tokens);
PlpCompiler compiler = new PlpCompiler(classConstruct);
List<String> compiledPlp = compiler.compile();
PrintWriter compiledWriter = new PrintWriter(plpFile);
for (String line : compiledPlp)
compiledWriter.println(line);
compiledWriter.close();
}
private void initializeTypeSizesAndRegisters()
{
typeSize = new HashMap<String, Integer>();
typeSize.put("byte", 32);
typeSize.put("short", 32);
typeSize.put("int", 32);
typeSize.put("long", 64);
typeSize.put("boolean", 32);
typeSize.put("char", 32);
registers = new HashMap<>();
int tCap = registerFloor + 6;
for (int index = registerFloor; index <= registerCap; index++)
{
if (index < tCap)
registers.put(index, "$t" + index);
else
registers.put(index, "$s" + (index - tCap));
}
commands = new HashMap<>();
commands.put("+L", "addiu");
commands.put("+", "addu");
commands.put("|L", "ori");
commands.put("|", "or");
commands.put("-", "subu");
commands.put("*", "mullo");
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.blockmanagement;
import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.BLOCK_GROUP_INDEX_MASK;
import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.MAX_BLOCKS_IN_GROUP;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.spy;
import java.io.IOException;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.StripedFileTestUtil;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.Whitebox;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.stubbing.Answer;
/**
* Tests the sequential blockGroup ID generation mechanism and blockGroup ID
* collision handling.
*/
public class TestSequentialBlockGroupId {
private static final Logger LOG = LoggerFactory
.getLogger("TestSequentialBlockGroupId");
private final ErasureCodingPolicy ecPolicy =
StripedFileTestUtil.getDefaultECPolicy();
private final short REPLICATION = 1;
private final long SEED = 0;
private final int dataBlocks = ecPolicy.getNumDataUnits();
private final int parityBlocks = ecPolicy.getNumParityUnits();
private final int cellSize = ecPolicy.getCellSize();
private final int stripesPerBlock = 2;
private final int blockSize = cellSize * stripesPerBlock;
private final int numDNs = dataBlocks + parityBlocks + 2;
private final int blockGrpCount = 4;
private final int fileLen = blockSize * dataBlocks * blockGrpCount;
private MiniDFSCluster cluster;
private DistributedFileSystem fs;
private SequentialBlockGroupIdGenerator blockGrpIdGenerator;
private Path ecDir = new Path("/ecDir");
@Before
public void setup() throws Exception {
Configuration conf = new HdfsConfiguration();
conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1);
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize);
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDNs).build();
cluster.waitActive();
fs = cluster.getFileSystem();
fs.enableErasureCodingPolicy(
StripedFileTestUtil.getDefaultECPolicy().getName());
blockGrpIdGenerator = cluster.getNamesystem().getBlockManager()
.getBlockIdManager().getBlockGroupIdGenerator();
fs.mkdirs(ecDir);
cluster.getFileSystem().getClient().setErasureCodingPolicy("/ecDir",
StripedFileTestUtil.getDefaultECPolicy().getName());
}
@After
public void teardown() {
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
}
/**
* Test that blockGroup IDs are generating unique value.
*/
@Test(timeout = 60000)
public void testBlockGroupIdGeneration() throws IOException {
long blockGroupIdInitialValue = blockGrpIdGenerator.getCurrentValue();
// Create a file that is 4 blocks long.
Path path = new Path(ecDir, "testBlockGrpIdGeneration.dat");
DFSTestUtil.createFile(fs, path, cellSize, fileLen, blockSize, REPLICATION,
SEED);
List<LocatedBlock> blocks = DFSTestUtil.getAllBlocks(fs, path);
assertThat("Wrong BlockGrps", blocks.size(), is(blockGrpCount));
// initialising the block group generator for verifying the block id
blockGrpIdGenerator.setCurrentValue(blockGroupIdInitialValue);
// Ensure that the block IDs are generating unique value.
for (int i = 0; i < blocks.size(); ++i) {
blockGrpIdGenerator
.skipTo((blockGrpIdGenerator.getCurrentValue() & ~BLOCK_GROUP_INDEX_MASK)
+ MAX_BLOCKS_IN_GROUP);
long nextBlockExpectedId = blockGrpIdGenerator.getCurrentValue();
long nextBlockGrpId = blocks.get(i).getBlock().getBlockId();
LOG.info("BlockGrp" + i + " id is " + nextBlockGrpId);
assertThat("BlockGrpId mismatches!", nextBlockGrpId,
is(nextBlockExpectedId));
}
// verify that the blockGroupId resets on #clear call.
cluster.getNamesystem().getBlockManager().clear();
assertThat("BlockGrpId mismatches!", blockGrpIdGenerator.getCurrentValue(),
is(Long.MIN_VALUE));
}
/**
* Test that collisions in the blockGroup ID space are handled gracefully.
*/
@Test(timeout = 60000)
public void testTriggerBlockGroupIdCollision() throws IOException {
long blockGroupIdInitialValue = blockGrpIdGenerator.getCurrentValue();
// Create a file with a few blocks to rev up the global block ID
// counter.
Path path1 = new Path(ecDir, "testBlockGrpIdCollisionDetection_file1.dat");
DFSTestUtil.createFile(fs, path1, cellSize, fileLen, blockSize,
REPLICATION, SEED);
List<LocatedBlock> blocks1 = DFSTestUtil.getAllBlocks(fs, path1);
assertThat("Wrong BlockGrps", blocks1.size(), is(blockGrpCount));
// Rewind the block ID counter in the name system object. This will result
// in block ID collisions when we try to allocate new blocks.
blockGrpIdGenerator.setCurrentValue(blockGroupIdInitialValue);
// Trigger collisions by creating a new file.
Path path2 = new Path(ecDir, "testBlockGrpIdCollisionDetection_file2.dat");
DFSTestUtil.createFile(fs, path2, cellSize, fileLen, blockSize,
REPLICATION, SEED);
List<LocatedBlock> blocks2 = DFSTestUtil.getAllBlocks(fs, path2);
assertThat("Wrong BlockGrps", blocks2.size(), is(blockGrpCount));
// Make sure that file1 and file2 block IDs are different
for (LocatedBlock locBlock1 : blocks1) {
long blockId1 = locBlock1.getBlock().getBlockId();
for (LocatedBlock locBlock2 : blocks2) {
long blockId2 = locBlock2.getBlock().getBlockId();
assertThat("BlockGrpId mismatches!", blockId1, is(not(blockId2)));
}
}
}
/**
* Test that collisions in the blockGroup ID when the id is occupied by legacy
* block.
*/
@Test(timeout = 60000)
public void testTriggerBlockGroupIdCollisionWithLegacyBlockId()
throws Exception {
long blockGroupIdInitialValue = blockGrpIdGenerator.getCurrentValue();
blockGrpIdGenerator
.skipTo((blockGrpIdGenerator.getCurrentValue() & ~BLOCK_GROUP_INDEX_MASK)
+ MAX_BLOCKS_IN_GROUP);
final long curBlockGroupIdValue = blockGrpIdGenerator.getCurrentValue();
// Creates contiguous block with negative blockId so that it would trigger
// collision during blockGroup Id generation
FSNamesystem fsn = cluster.getNamesystem();
// Replace SequentialBlockIdGenerator with a spy
SequentialBlockIdGenerator blockIdGenerator = spy(fsn.getBlockManager()
.getBlockIdManager().getBlockIdGenerator());
Whitebox.setInternalState(fsn.getBlockManager().getBlockIdManager(),
"blockIdGenerator", blockIdGenerator);
SequentialBlockIdGenerator spySequentialBlockIdGenerator = new SequentialBlockIdGenerator(
null) {
@Override
public long nextValue() {
return curBlockGroupIdValue;
}
};
final Answer<Object> delegator = new GenericTestUtils.DelegateAnswer(
spySequentialBlockIdGenerator);
doAnswer(delegator).when(blockIdGenerator).nextValue();
Path path1 = new Path("/testCollisionWithLegacyBlock_file1.dat");
DFSTestUtil.createFile(fs, path1, 1024, REPLICATION, SEED);
List<LocatedBlock> contiguousBlocks = DFSTestUtil.getAllBlocks(fs, path1);
assertThat(contiguousBlocks.size(), is(1));
Assert.assertEquals("Unexpected BlockId!", curBlockGroupIdValue,
contiguousBlocks.get(0).getBlock().getBlockId());
// Reset back to the initial value to trigger collision
blockGrpIdGenerator.setCurrentValue(blockGroupIdInitialValue);
// Trigger collisions by creating a new file.
Path path2 = new Path(ecDir, "testCollisionWithLegacyBlock_file2.dat");
DFSTestUtil.createFile(fs, path2, cellSize, fileLen, blockSize,
REPLICATION, SEED);
List<LocatedBlock> blocks2 = DFSTestUtil.getAllBlocks(fs, path2);
assertThat("Wrong BlockGrps", blocks2.size(), is(blockGrpCount));
// Make sure that file1 and file2 block IDs are different
for (LocatedBlock locBlock1 : contiguousBlocks) {
long blockId1 = locBlock1.getBlock().getBlockId();
for (LocatedBlock locBlock2 : blocks2) {
long blockId2 = locBlock2.getBlock().getBlockId();
assertThat("BlockGrpId mismatches!", blockId1, is(not(blockId2)));
}
}
}
}
|
|
package org.uberfire.client.workbench;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import javax.enterprise.event.Event;
import javax.enterprise.event.Observes;
import javax.inject.Inject;
import org.jboss.errai.ioc.client.container.IOCBeanDef;
import org.jboss.errai.ioc.client.container.SyncBeanManager;
import org.uberfire.client.mvp.PerspectiveActivity;
import org.uberfire.client.mvp.UIPart;
import org.uberfire.client.workbench.events.BeforeClosePlaceEvent;
import org.uberfire.client.workbench.events.ChangeTitleWidgetEvent;
import org.uberfire.client.workbench.events.ClosePlaceEvent;
import org.uberfire.client.workbench.events.DropPlaceEvent;
import org.uberfire.client.workbench.events.MinimizePlaceEvent;
import org.uberfire.client.workbench.events.PanelFocusEvent;
import org.uberfire.client.workbench.events.PlaceGainFocusEvent;
import org.uberfire.client.workbench.events.PlaceLostFocusEvent;
import org.uberfire.client.workbench.events.RestorePlaceEvent;
import org.uberfire.client.workbench.events.SelectPlaceEvent;
import org.uberfire.client.workbench.panels.WorkbenchPanelPresenter;
import org.uberfire.client.workbench.panels.WorkbenchPanelView;
import org.uberfire.client.workbench.part.WorkbenchPartPresenter;
import org.uberfire.client.workbench.widgets.statusbar.WorkbenchStatusBarPresenter;
import org.uberfire.mvp.PlaceRequest;
import org.uberfire.workbench.model.ContextDisplayMode;
import org.uberfire.workbench.model.PanelDefinition;
import org.uberfire.workbench.model.PanelType;
import org.uberfire.workbench.model.PartDefinition;
import org.uberfire.workbench.model.PerspectiveDefinition;
import org.uberfire.workbench.model.Position;
import org.uberfire.workbench.model.impl.PanelDefinitionImpl;
import org.uberfire.workbench.model.menu.Menus;
import com.google.gwt.user.client.ui.IsWidget;
import com.google.gwt.user.client.ui.SimplePanel;
public abstract class AbstractPanelManagerImpl implements PanelManager {
@Inject
Event<BeforeClosePlaceEvent> beforeClosePlaceEvent;
@Inject
Event<PlaceGainFocusEvent> placeGainFocusEvent;
@Inject
Event<PlaceLostFocusEvent> placeLostFocusEvent;
@Inject
Event<PanelFocusEvent> panelFocusEvent;
@Inject
Event<SelectPlaceEvent> selectPlaceEvent;
@Inject
WorkbenchStatusBarPresenter statusBar;
@Inject
SyncBeanManager iocManager;
PanelDefinition root = null;
PerspectiveDefinition perspective;
Map<PartDefinition, WorkbenchPartPresenter> mapPartDefinitionToPresenter = new HashMap<PartDefinition, WorkbenchPartPresenter>();
Map<PanelDefinition, WorkbenchPanelPresenter> mapPanelDefinitionToPresenter = new HashMap<PanelDefinition, WorkbenchPanelPresenter>();
PartDefinition activePart = null;
@Override
public PerspectiveDefinition getPerspective() {
return this.perspective;
}
@Override
public void setPerspective( final PerspectiveDefinition perspective ) {
final PanelDefinition newRoot = perspective.getRoot();
final WorkbenchPanelPresenter oldPresenter = mapPanelDefinitionToPresenter.remove( root );
SimplePanel container;
if ( oldPresenter != null && oldPresenter.getPanelView().asWidget().getParent() != null ) {
container = (SimplePanel) oldPresenter.getPanelView().asWidget().getParent();
} else {
container = null;
}
getBeanFactory().destroy( root );
this.root = newRoot;
this.perspective = perspective;
WorkbenchPanelPresenter newPresenter = getWorkbenchPanelPresenter( newRoot );
if ( newPresenter == null ) {
newPresenter = getBeanFactory().newWorkbenchPanel( newRoot );
mapPanelDefinitionToPresenter.put( newRoot, newPresenter );
}
if ( container != null ) {
if ( oldPresenter != null ) {
oldPresenter.removePanel();
}
container.setWidget( newPresenter.getPanelView() );
}
}
protected abstract BeanFactory getBeanFactory();
@Override
public PanelDefinition getRoot() {
return this.root;
}
@Override
public void setRoot( final PanelDefinition panel ) {
if ( !panel.isRoot() ) {
throw new IllegalArgumentException( "Panel is not a root panel." );
}
if ( root == null ) {
this.root = panel;
} else {
throw new IllegalArgumentException( "Root has already been set. Unable to set root." );
}
WorkbenchPanelPresenter panelPresenter = getWorkbenchPanelPresenter( panel );
if ( panelPresenter == null ) {
panelPresenter = getBeanFactory().newWorkbenchPanel( panel );
mapPanelDefinitionToPresenter.put( panel,
panelPresenter );
}
onPanelFocus( panel );
}
public void addWorkbenchPart( final PlaceRequest place,
final PartDefinition part,
final PanelDefinition panel,
final Menus menus,
final UIPart uiPart ) {
addWorkbenchPart( place, part, panel, menus, uiPart, null );
}
@Override
public void addWorkbenchPart( final PlaceRequest place,
final PartDefinition part,
final PanelDefinition panel,
final Menus menus,
final UIPart uiPart,
final String contextId ) {
WorkbenchPartPresenter partPresenter = mapPartDefinitionToPresenter.get( part );
if ( partPresenter == null ) {
partPresenter = getBeanFactory().newWorkbenchPart( menus, uiPart.getTitle(), uiPart.getTitleDecoration(), part );
partPresenter.setWrappedWidget( uiPart.getWidget() );
partPresenter.setContextId( contextId );
mapPartDefinitionToPresenter.put( part, partPresenter );
}
if ( part.isMinimized() ) {
statusBar.addMinimizedPlace( part.getPlace() );
} else {
final WorkbenchPanelPresenter panelPresenter = getWorkbenchPanelPresenter( panel );
if ( panelPresenter == null ) {
throw new IllegalArgumentException( "Unable to add Part to Panel. Panel has not been created." );
}
panelPresenter.addPart( partPresenter.getPartView(), contextId );
}
//The model for a Perspective is already fully populated. Don't go adding duplicates.
if ( !panel.getParts().contains( part ) ) {
panel.addPart( part );
}
//Select newly inserted part
selectPlaceEvent.fire( new SelectPlaceEvent( place ) );
}
@Override
public PerspectiveActivity getDefaultPerspectiveActivity() {
PerspectiveActivity defaultPerspective = null;
final Collection<IOCBeanDef<PerspectiveActivity>> perspectives = iocManager.lookupBeans( PerspectiveActivity.class );
final Iterator<IOCBeanDef<PerspectiveActivity>> perspectivesIterator = perspectives.iterator();
while ( perspectivesIterator.hasNext() ) {
final IOCBeanDef<PerspectiveActivity> perspective = perspectivesIterator.next();
final PerspectiveActivity instance = perspective.getInstance();
if ( instance.isDefault() ) {
defaultPerspective = instance;
break;
} else {
iocManager.destroyBean( instance );
}
}
return defaultPerspective;
}
WorkbenchPanelPresenter getWorkbenchPanelPresenter( PanelDefinition panel ) {
return mapPanelDefinitionToPresenter.get( panel );
}
@Override
public PanelDefinition addWorkbenchPanel( PanelDefinition targetPanel,
final Position position ) {
validTargetPanel(targetPanel);
final PanelDefinition childPanel = new PanelDefinitionImpl( targetPanel.getDefaultChildPanelType() );
return addWorkbenchPanel( targetPanel,
childPanel,
position );
}
@Override
public PanelDefinition addWorkbenchPanel( PanelDefinition targetPanel,
final Position position,
final Integer height,
final Integer width,
final Integer minHeight,
final Integer minWidth ) {
validTargetPanel(targetPanel);
final PanelDefinition childPanel = new PanelDefinitionImpl( targetPanel.getDefaultChildPanelType() );
childPanel.setHeight( height );
childPanel.setWidth( width );
childPanel.setMinHeight( minHeight );
childPanel.setMinWidth( minWidth );
return addWorkbenchPanel( targetPanel,
childPanel,
position );
}
//TODO hernsys hack
private void validTargetPanel(PanelDefinition targetPanel){
if(targetPanel == null){
targetPanel = new PanelDefinitionImpl(PanelType.ROOT_LIST, PanelType.STATIC);
targetPanel.setHeight(785);
targetPanel.setWidth(1427);
targetPanel.setContextDisplayMode(ContextDisplayMode.SHOW);
targetPanel.setPosition(Position.ROOT);
}
}
@Override
public void onPartFocus( final PartDefinition part ) {
activePart = part;
panelFocusEvent.fire( new PanelFocusEvent( part.getParentPanel() ) );
placeGainFocusEvent.fire( new PlaceGainFocusEvent( part.getPlace() ) );
}
@Override
public void onPartLostFocus() {
if ( activePart == null ) {
return;
}
placeLostFocusEvent.fire( new PlaceLostFocusEvent( activePart.getPlace() ) );
activePart = null;
}
@Override
public void onPanelFocus( final PanelDefinition panel ) {
for ( Map.Entry<PanelDefinition, WorkbenchPanelPresenter> e : mapPanelDefinitionToPresenter.entrySet() ) {
e.getValue().setFocus( e.getKey().equals( panel ) );
}
}
@Override
public void onBeforePartClose( final PartDefinition part ) {
beforeClosePlaceEvent.fire( new BeforeClosePlaceEvent( part.getPlace() ) );
}
@SuppressWarnings("unused")
private void onSelectPlaceEvent( @Observes SelectPlaceEvent event ) {
final PlaceRequest place = event.getPlace();
for ( Map.Entry<PanelDefinition, WorkbenchPanelPresenter> e : mapPanelDefinitionToPresenter.entrySet() ) {
for ( PartDefinition part : e.getValue().getDefinition().getParts() ) {
if ( part.getPlace().equals( place ) ) {
e.getValue().selectPart( part );
onPanelFocus( e.getKey() );
}
}
}
}
@SuppressWarnings("unused")
private void onClosePlaceEvent( @Observes ClosePlaceEvent event ) {
final PartDefinition part = getPartForPlace( event.getPlace() );
if ( part != null ) {
removePart( part );
}
}
@SuppressWarnings("unused")
private void onDropPlaceEvent( @Observes DropPlaceEvent event ) {
final PartDefinition part = getPartForPlace( event.getPlace() );
if ( part != null ) {
removePart( part );
}
}
@SuppressWarnings("unused")
private void onMinimizePlaceEvent( @Observes MinimizePlaceEvent event ) {
final PlaceRequest placeToMinimize = event.getPlace();
final PartDefinition partToMinimize = getPartForPlace( placeToMinimize );
WorkbenchPanelPresenter presenterToMinimize = null;
for ( Map.Entry<PanelDefinition, WorkbenchPanelPresenter> e : mapPanelDefinitionToPresenter.entrySet() ) {
final PanelDefinition definition = e.getKey();
final WorkbenchPanelPresenter presenter = e.getValue();
if ( presenter.getDefinition().getParts().contains( partToMinimize ) ) {
partToMinimize.setMinimized( true );
presenter.removePart( partToMinimize );
if ( presenter.getDefinition().isMinimized() ) {
presenterToMinimize = presenter;
}
break;
}
}
if ( presenterToMinimize != null ) {
presenterToMinimize.removePanel();
getBeanFactory().destroy( presenterToMinimize );
mapPanelDefinitionToPresenter.remove( presenterToMinimize.getDefinition() );
}
}
@SuppressWarnings("unused")
private void onRestorePlaceEvent( @Observes RestorePlaceEvent event ) {
final PlaceRequest place = event.getPlace();
final PartDefinition partToRestore = getPartForPlace( place );
final PanelDefinition panelToRestore = partToRestore.getParentPanel();
final Integer height = panelToRestore.getHeight();
final Integer width = panelToRestore.getWidth();
final Integer minHeight = panelToRestore.getMinHeight();
final Integer minWidth = panelToRestore.getMinWidth();
partToRestore.setMinimized( false );
//Restore containing panel
if ( !mapPanelDefinitionToPresenter.containsKey( panelToRestore ) ) {
//TODO {manstis} Position needs to be looked up from model - will need "outer" panel feature :(
PanelDefinition targetPanel = findTargetPanel( panelToRestore,
root );
if ( targetPanel == null ) {
targetPanel = root;
}
addWorkbenchPanel( targetPanel,
panelToRestore,
panelToRestore.getPosition() );
}
//Restore part
final WorkbenchPartPresenter presenter = mapPartDefinitionToPresenter.get( partToRestore );
addWorkbenchPart( partToRestore.getPlace(),
partToRestore,
panelToRestore,
presenter.getMenus(),
new UIPart( presenter.getTitle(), presenter.getTitleDecoration(), presenter.getPartView() ) );
}
private PanelDefinition findTargetPanel( final PanelDefinition panelToFind,
final PanelDefinition panelToSearch ) {
final PanelDefinition northChild = panelToSearch.getChild( Position.NORTH );
final PanelDefinition southChild = panelToSearch.getChild( Position.SOUTH );
final PanelDefinition eastChild = panelToSearch.getChild( Position.EAST );
final PanelDefinition westChild = panelToSearch.getChild( Position.WEST );
PanelDefinition targetPanel = null;
if ( northChild != null ) {
if ( northChild.equals( panelToFind ) ) {
return panelToSearch;
} else {
targetPanel = findTargetPanel( panelToFind,
northChild );
}
}
if ( southChild != null ) {
if ( southChild.equals( panelToFind ) ) {
return panelToSearch;
} else {
targetPanel = findTargetPanel( panelToFind,
southChild );
}
}
if ( eastChild != null ) {
if ( eastChild.equals( panelToFind ) ) {
return panelToSearch;
} else {
targetPanel = findTargetPanel( panelToFind,
eastChild );
}
}
if ( westChild != null ) {
if ( westChild.equals( panelToFind ) ) {
return panelToSearch;
} else {
targetPanel = findTargetPanel( panelToFind,
westChild );
}
}
return targetPanel;
}
private PartDefinition getPartForPlace( final PlaceRequest place ) {
for ( PartDefinition part : mapPartDefinitionToPresenter.keySet() ) {
if ( part.getPlace().equals( place ) ) {
return part;
}
}
return null;
}
@SuppressWarnings("unused")
private void onChangeTitleWidgetEvent( @Observes ChangeTitleWidgetEvent event ) {
final PlaceRequest place = event.getPlaceRequest();
final IsWidget titleDecoration = event.getTitleDecoration();
final String title = event.getTitle();
for ( Map.Entry<PanelDefinition, WorkbenchPanelPresenter> e : mapPanelDefinitionToPresenter.entrySet() ) {
final PanelDefinition panel = e.getKey();
final WorkbenchPanelPresenter presenter = e.getValue();
for ( PartDefinition part : panel.getParts() ) {
if ( place.equals( part.getPlace() ) ) {
mapPartDefinitionToPresenter.get( part ).setTitle( title );
presenter.changeTitle( part, title, titleDecoration );
break;
}
}
}
}
private void removePart( final PartDefinition part ) {
getBeanFactory().destroy( mapPartDefinitionToPresenter.get( part ) );
mapPartDefinitionToPresenter.remove( part );
WorkbenchPanelPresenter presenterToRemove = null;
for ( Map.Entry<PanelDefinition, WorkbenchPanelPresenter> e : mapPanelDefinitionToPresenter.entrySet() ) {
final PanelDefinition definition = e.getKey();
final WorkbenchPanelPresenter presenter = e.getValue();
if ( presenter.getDefinition().getParts().contains( part ) ) {
presenter.removePart( part );
definition.getParts().remove( part );
if ( !definition.isRoot() && definition.getParts().size() == 0 ) {
presenterToRemove = presenter;
}
break;
}
}
if ( presenterToRemove != null ) {
presenterToRemove.removePanel();
getBeanFactory().destroy( presenterToRemove );
removePanel( presenterToRemove.getDefinition(),
root );
}
}
@Override
public WorkbenchPanelView getPanelView( final PanelDefinition panel ) {
return getWorkbenchPanelPresenter( panel ).getPanelView();
}
private void removePanel( final PanelDefinition panelToRemove,
final PanelDefinition panelToSearch ) {
final PanelDefinition northChild = panelToSearch.getChild( Position.NORTH );
final PanelDefinition southChild = panelToSearch.getChild( Position.SOUTH );
final PanelDefinition eastChild = panelToSearch.getChild( Position.EAST );
final PanelDefinition westChild = panelToSearch.getChild( Position.WEST );
if ( northChild != null ) {
if ( northChild.equals( panelToRemove ) ) {
mapPanelDefinitionToPresenter.remove( northChild );
removePanel( panelToRemove,
panelToSearch,
Position.NORTH );
} else {
removePanel( panelToRemove,
northChild );
}
}
if ( southChild != null ) {
if ( southChild.equals( panelToRemove ) ) {
mapPanelDefinitionToPresenter.remove( southChild );
removePanel( panelToRemove,
panelToSearch,
Position.SOUTH );
} else {
removePanel( panelToRemove,
southChild );
}
}
if ( eastChild != null ) {
if ( eastChild.equals( panelToRemove ) ) {
mapPanelDefinitionToPresenter.remove( eastChild );
removePanel( panelToRemove,
panelToSearch,
Position.EAST );
} else {
removePanel( panelToRemove,
eastChild );
}
}
if ( westChild != null ) {
if ( westChild.equals( panelToRemove ) ) {
mapPanelDefinitionToPresenter.remove( westChild );
removePanel( panelToRemove,
panelToSearch,
Position.WEST );
} else {
removePanel( panelToRemove,
westChild );
}
}
}
private void removePanel( final PanelDefinition panelToRemove,
final PanelDefinition panelToSearch,
final Position position ) {
panelToSearch.removeChild( position );
final PanelDefinition northOrphan = panelToRemove.getChild( Position.NORTH );
final PanelDefinition southOrphan = panelToRemove.getChild( Position.SOUTH );
final PanelDefinition eastOrphan = panelToRemove.getChild( Position.EAST );
final PanelDefinition westOrphan = panelToRemove.getChild( Position.WEST );
panelToSearch.appendChild( Position.NORTH,
northOrphan );
panelToSearch.appendChild( Position.SOUTH,
southOrphan );
panelToSearch.appendChild( Position.EAST,
eastOrphan );
panelToSearch.appendChild( Position.WEST,
westOrphan );
}
}
|
|
/*
* Copyright (c) 2008-2010, Matthias Mann
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of Matthias Mann nor the names of its contributors may
* be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package de.matthiasmann.twl;
import de.matthiasmann.twl.ListBox.CallbackReason;
import de.matthiasmann.twl.model.*;
import de.matthiasmann.twl.model.FileSystemModel.FileFilter;
import de.matthiasmann.twl.utils.CallbackSupport;
import de.matthiasmann.twl.utils.NaturalSortComparator;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.prefs.Preferences;
/**
* A File selector widget using FileSystemModel
*
* @author Matthias Mann
*/
public class FileSelector extends DialogLayout {
public interface Callback {
public void filesSelected(Object[] files);
public void canceled();
}
public interface Callback2 extends Callback {
public void folderChanged(Object folder);
public void selectionChanged(FileTable.Entry[] selection);
}
public static class NamedFileFilter {
private final String name;
private final FileSystemModel.FileFilter fileFilter;
public NamedFileFilter(String name, FileFilter fileFilter) {
this.name = name;
this.fileFilter = fileFilter;
}
public String getDisplayName() {
return name;
}
public FileSystemModel.FileFilter getFileFilter() {
return fileFilter;
}
}
public static final NamedFileFilter AllFilesFilter = new NamedFileFilter("All files", null);
private final IntegerModel flags;
private final MRUListModel<String> folderMRU;
final MRUListModel<String> filesMRU;
private final TreeComboBox currentFolder;
private final Label labelCurrentFolder;
private final FileTable fileTable;
private final ScrollPane fileTableSP;
private final Button btnUp;
private final Button btnHome;
private final Button btnFolderMRU;
private final Button btnFilesMRU;
private final Button btnOk;
private final Button btnCancel;
private final Button btnRefresh;
private final Button btnShowFolders;
private final Button btnShowHidden;
private final ComboBox<String> fileFilterBox;
private final FileFiltersModel fileFiltersModel;
private final EditFieldAutoCompletionWindow autoCompletion;
private boolean allowFolderSelection;
private Callback[] callbacks;
private NamedFileFilter activeFileFilter;
FileSystemModel fsm;
private FileSystemTreeModel model;
private Widget userWidgetBottom;
private Widget userWidgetRight;
private Object fileToSelectOnSetCurrentNode;
/**
* Create a FileSelector without persistent state
*/
public FileSelector() {
this(null, null);
}
public FileSelector(Preferences prefs, String prefsKey) {
if((prefs == null) != (prefsKey == null)) {
throw new IllegalArgumentException("'prefs' and 'prefsKey' must both be valid or both null");
}
if(prefs != null) {
flags = new PersistentIntegerModel(prefs, prefsKey.concat("_Flags"), 0, 0xFFFF, 0);
folderMRU = new PersistentMRUListModel<String>(10, String.class, prefs, prefsKey.concat("_foldersMRU"));
filesMRU = new PersistentMRUListModel<String>(20, String.class, prefs, prefsKey.concat("_filesMRU"));
} else {
flags = new SimpleIntegerModel(0, 0xFFFF, 0);
folderMRU = new SimpleMRUListModel<String>(10);
filesMRU = new SimpleMRUListModel<String>(20);
}
currentFolder = new TreeComboBox();
currentFolder.setTheme("currentFolder");
fileTable = new FileTable();
fileTable.setTheme("fileTable");
fileTable.addCallback(new FileTable.Callback() {
public void selectionChanged() {
FileSelector.this.selectionChanged();
}
public void sortingChanged() {
}
});
btnUp = new Button();
btnUp.setTheme("buttonUp");
btnUp.addCallback(new Runnable() {
public void run() {
goOneLevelUp();
}
});
btnHome = new Button();
btnHome.setTheme("buttonHome");
btnHome.addCallback(new Runnable() {
public void run() {
goHome();
}
});
btnFolderMRU = new Button();
btnFolderMRU.setTheme("buttonFoldersMRU");
btnFolderMRU.addCallback(new Runnable() {
public void run() {
showFolderMRU();
}
});
btnFilesMRU = new Button();
btnFilesMRU.setTheme("buttonFilesMRU");
btnFilesMRU.addCallback(new Runnable() {
public void run() {
showFilesMRU();
}
});
btnOk = new Button();
btnOk.setTheme("buttonOk");
btnOk.addCallback(new Runnable() {
public void run() {
acceptSelection();
}
});
btnCancel = new Button();
btnCancel.setTheme("buttonCancel");
btnCancel.addCallback(new Runnable() {
public void run() {
fireCanceled();
}
});
currentFolder.setPathResolver(new TreeComboBox.PathResolver() {
public TreeTableNode resolvePath(TreeTableModel model, String path) throws IllegalArgumentException {
return FileSelector.this.resolvePath(path);
}
});
currentFolder.addCallback(new TreeComboBox.Callback() {
public void selectedNodeChanged(TreeTableNode node, TreeTableNode previousChildNode) {
setCurrentNode(node, previousChildNode);
}
});
autoCompletion = new EditFieldAutoCompletionWindow(currentFolder.getEditField());
autoCompletion.setUseInvokeAsync(true);
currentFolder.getEditField().setAutoCompletionWindow(autoCompletion);
fileTable.setAllowMultiSelection(true);
fileTable.addCallback(new TableBase.Callback() {
public void mouseDoubleClicked(int row, int column) {
acceptSelection();
}
public void mouseRightClick(int row, int column, Event evt) {
}
public void columnHeaderClicked(int column) {
}
});
activeFileFilter = AllFilesFilter;
fileFiltersModel = new FileFiltersModel();
fileFilterBox = new ComboBox<String>(fileFiltersModel);
fileFilterBox.setTheme("fileFiltersBox");
fileFilterBox.setComputeWidthFromModel(true);
fileFilterBox.setVisible(false);
fileFilterBox.addCallback(new Runnable() {
public void run() {
fileFilterChanged();
}
});
labelCurrentFolder = new Label("Folder");
labelCurrentFolder.setLabelFor(currentFolder);
fileTableSP = new ScrollPane(fileTable);
Runnable showBtnCallback = new Runnable() {
public void run() {
refreshFileTable();
}
};
btnRefresh = new Button();
btnRefresh.setTheme("buttonRefresh");
btnRefresh.addCallback(showBtnCallback);
btnShowFolders = new Button(new ToggleButtonModel(new BitfieldBooleanModel(flags, 0), true));
btnShowFolders.setTheme("buttonShowFolders");
btnShowFolders.addCallback(showBtnCallback);
btnShowHidden = new Button(new ToggleButtonModel(new BitfieldBooleanModel(flags, 1), false));
btnShowHidden.setTheme("buttonShowHidden");
btnShowHidden.addCallback(showBtnCallback);
addActionMapping("goOneLevelUp", "goOneLevelUp");
addActionMapping("acceptSelection", "acceptSelection");
}
protected void createLayout() {
setHorizontalGroup(null);
setVerticalGroup(null);
removeAllChildren();
add(fileTableSP);
add(fileFilterBox);
add(btnOk);
add(btnCancel);
add(btnRefresh);
add(btnShowFolders);
add(btnShowHidden);
add(labelCurrentFolder);
add(currentFolder);
add(btnFolderMRU);
add(btnUp);
Group hCurrentFolder = createSequentialGroup()
.addWidget(labelCurrentFolder)
.addWidget(currentFolder)
.addWidget(btnFolderMRU)
.addWidget(btnUp)
.addWidget(btnHome);
Group vCurrentFolder = createParallelGroup()
.addWidget(labelCurrentFolder)
.addWidget(currentFolder)
.addWidget(btnFolderMRU)
.addWidget(btnUp)
.addWidget(btnHome);
Group hButtonGroup = createSequentialGroup()
.addWidget(btnRefresh)
.addGap(MEDIUM_GAP)
.addWidget(btnShowFolders)
.addWidget(btnShowHidden)
.addWidget(fileFilterBox)
.addGap("buttonBarLeft")
.addWidget(btnFilesMRU)
.addGap("buttonBarSpacer")
.addWidget(btnOk)
.addGap("buttonBarSpacer")
.addWidget(btnCancel)
.addGap("buttonBarRight");
Group vButtonGroup = createParallelGroup()
.addWidget(btnRefresh)
.addWidget(btnShowFolders)
.addWidget(btnShowHidden)
.addWidget(fileFilterBox)
.addWidget(btnFilesMRU)
.addWidget(btnOk)
.addWidget(btnCancel);
Group horz = createParallelGroup()
.addGroup(hCurrentFolder)
.addWidget(fileTableSP);
Group vert = createSequentialGroup()
.addGroup(vCurrentFolder)
.addWidget(fileTableSP);
if(userWidgetBottom != null) {
horz.addWidget(userWidgetBottom);
vert.addWidget(userWidgetBottom);
}
if(userWidgetRight != null) {
horz = createParallelGroup().addGroup(createSequentialGroup()
.addGroup(horz)
.addWidget(userWidgetRight));
vert = createSequentialGroup().addGroup(createParallelGroup()
.addGroup(vert)
.addWidget(userWidgetRight));
}
setHorizontalGroup(horz.addGroup(hButtonGroup));
setVerticalGroup(vert.addGroup(vButtonGroup));
}
@Override
protected void afterAddToGUI(GUI gui) {
super.afterAddToGUI(gui);
createLayout();
}
public FileSystemModel getFileSystemModel() {
return fsm;
}
public void setFileSystemModel(FileSystemModel fsm) {
this.fsm = fsm;
if(fsm == null) {
model = null;
currentFolder.setModel(null);
fileTable.setCurrentFolder(null, null);
autoCompletion.setDataSource(null);
} else {
model = new FileSystemTreeModel(fsm);
model.setSorter(new NameSorter(fsm));
currentFolder.setModel(model);
currentFolder.setSeparator(fsm.getSeparator());
autoCompletion.setDataSource(new FileSystemAutoCompletionDataSource(fsm,
FileSystemTreeModel.FolderFilter.instance));
if(!gotoFolderFromMRU(0) && !goHome()) {
setCurrentNode(model);
}
}
}
public boolean getAllowMultiSelection() {
return fileTable.getAllowMultiSelection();
}
/**
* Controls if multi selection is allowed.
*
* Default is true.
*
* @param allowMultiSelection true if multiple files can be selected.
*/
public void setAllowMultiSelection(boolean allowMultiSelection) {
fileTable.setAllowMultiSelection(allowMultiSelection);
}
public boolean getAllowFolderSelection() {
return allowFolderSelection;
}
/**
* Controls if folders can be selected. If false then the "Ok" button
* is disabled when a folder is selected.
*
* Default is false.
*
* @param allowFolderSelection true if folders can be selected
*/
public void setAllowFolderSelection(boolean allowFolderSelection) {
this.allowFolderSelection = allowFolderSelection;
selectionChanged();
}
public boolean getAllowHorizontalScrolling() {
return fileTableSP.getFixed() != ScrollPane.Fixed.HORIZONTAL;
}
/**
* Controls if the file table allows horizontal scrolling or not.
*
* Default is true.
*
* @param allowHorizontalScrolling true if horizontal scrolling is allowed
*/
public void setAllowHorizontalScrolling(boolean allowHorizontalScrolling) {
fileTableSP.setFixed(allowHorizontalScrolling
? ScrollPane.Fixed.NONE
: ScrollPane.Fixed.HORIZONTAL);
}
public void addCallback(Callback callback) {
callbacks = CallbackSupport.addCallbackToList(callbacks, callback, Callback.class);
}
public void removeCallback(Callback callback) {
callbacks = CallbackSupport.removeCallbackFromList(callbacks, callback);
}
public Widget getUserWidgetBottom() {
return userWidgetBottom;
}
public void setUserWidgetBottom(Widget userWidgetBottom) {
this.userWidgetBottom = userWidgetBottom;
createLayout();
}
public Widget getUserWidgetRight() {
return userWidgetRight;
}
public void setUserWidgetRight(Widget userWidgetRight) {
this.userWidgetRight = userWidgetRight;
createLayout();
}
public FileTable getFileTable() {
return fileTable;
}
public void setOkButtonEnabled(boolean enabled) {
btnOk.setEnabled(enabled);
}
public Object getCurrentFolder() {
Object node = currentFolder.getCurrentNode();
if(node instanceof FileSystemTreeModel.FolderNode) {
return ((FileSystemTreeModel.FolderNode)node).getFolder();
} else {
return null;
}
}
public boolean setCurrentFolder(Object folder) {
FileSystemTreeModel.FolderNode node = model.getNodeForFolder(folder);
if(node != null) {
setCurrentNode(node);
return true;
}
return false;
}
public boolean selectFile(Object file) {
if(fsm == null) {
return false;
}
Object parent = fsm.getParent(file);
if(setCurrentFolder(parent)) {
return fileTable.setSelection(file);
}
return false;
}
public void clearSelection() {
fileTable.clearSelection();
}
/**
* Adds a named file filter to the FileSelector.
*
* The first added file filter is selected as default.
*
* @param filter the file filter.
* @throws NullPointerException if filter is null
* @see #AllFilesFilter
*/
public void addFileFilter(NamedFileFilter filter) {
if(filter == null) {
throw new NullPointerException("filter");
}
fileFiltersModel.addFileFilter(filter);
fileFilterBox.setVisible(fileFiltersModel.getNumEntries() > 0);
if(fileFilterBox.getSelected() < 0) {
fileFilterBox.setSelected(0);
}
}
public void removeFileFilter(NamedFileFilter filter) {
if(filter == null) {
throw new NullPointerException("filter");
}
fileFiltersModel.removeFileFilter(filter);
if(fileFiltersModel.getNumEntries() == 0) {
fileFilterBox.setVisible(false);
setFileFilter(AllFilesFilter);
}
}
public void removeAllFileFilters() {
fileFiltersModel.removeAll();
fileFilterBox.setVisible(false);
setFileFilter(AllFilesFilter);
}
public void setFileFilter(NamedFileFilter filter) {
if(filter == null) {
throw new NullPointerException("filter");
}
int idx = fileFiltersModel.findFilter(filter);
if(idx < 0) {
throw new IllegalArgumentException("filter not registered");
}
fileFilterBox.setSelected(idx);
}
public NamedFileFilter getFileFilter() {
return activeFileFilter;
}
public boolean getShowFolders() {
return btnShowFolders.getModel().isSelected();
}
public void setShowFolders(boolean showFolders) {
btnShowFolders.getModel().setSelected(showFolders);
}
public boolean getShowHidden() {
return btnShowHidden.getModel().isSelected();
}
public void setShowHidden(boolean showHidden) {
btnShowHidden.getModel().setSelected(showHidden);
}
public void goOneLevelUp() {
TreeTableNode node = currentFolder.getCurrentNode();
TreeTableNode parent = node.getParent();
if(parent != null) {
setCurrentNode(parent, node);
}
}
public boolean goHome() {
if(fsm != null) {
Object folder = fsm.getSpecialFolder(FileSystemModel.SPECIAL_FOLDER_HOME);
if(folder != null) {
return setCurrentFolder(folder);
}
}
return false;
}
public void acceptSelection() {
FileTable.Entry[] selection = fileTable.getSelection();
if(selection.length == 1) {
FileTable.Entry entry = selection[0];
if(entry != null && entry.isFolder) {
setCurrentFolder(entry.obj);
return;
}
}
fireAcceptCallback(selection);
}
void fileFilterChanged() {
int idx = fileFilterBox.getSelected();
if(idx >= 0) {
NamedFileFilter filter = fileFiltersModel.getFileFilter(idx);
activeFileFilter = filter;
fileTable.setFileFilter(filter.getFileFilter());
}
}
void fireAcceptCallback(FileTable.Entry[] selection) {
if(callbacks != null) {
Object[] objects = new Object[selection.length];
for(int i=0 ; i<selection.length ; i++) {
FileTable.Entry e = selection[i];
if(e.isFolder && !allowFolderSelection) {
return;
}
objects[i] = e.obj;
}
addToMRU(selection);
for(Callback cb : callbacks) {
cb.filesSelected(objects);
}
}
}
void fireCanceled() {
if(callbacks != null) {
for(Callback cb : callbacks) {
cb.canceled();
}
}
}
void selectionChanged() {
boolean foldersSelected = false;
boolean filesSelected = false;
FileTable.Entry[] selection = fileTable.getSelection();
for(FileTable.Entry entry : selection) {
if(entry.isFolder) {
foldersSelected = true;
} else {
filesSelected = true;
}
}
if(allowFolderSelection) {
btnOk.setEnabled(filesSelected || foldersSelected);
} else {
btnOk.setEnabled(filesSelected && !foldersSelected);
}
if(callbacks != null) {
for(Callback cb : callbacks) {
if(cb instanceof Callback2) {
((Callback2)cb).selectionChanged(selection);
}
}
}
}
protected void setCurrentNode(TreeTableNode node, TreeTableNode childToSelect) {
if(childToSelect instanceof FileSystemTreeModel.FolderNode) {
fileToSelectOnSetCurrentNode = ((FileSystemTreeModel.FolderNode)childToSelect).getFolder();
}
setCurrentNode(node);
}
protected void setCurrentNode(TreeTableNode node) {
currentFolder.setCurrentNode(node);
refreshFileTable();
if(callbacks != null) {
Object curFolder = getCurrentFolder();
for(Callback cb : callbacks) {
if(cb instanceof Callback2) {
((Callback2)cb).folderChanged(curFolder);
}
}
}
if(fileToSelectOnSetCurrentNode != null) {
fileTable.setSelection(fileToSelectOnSetCurrentNode);
fileToSelectOnSetCurrentNode = null;
}
}
void refreshFileTable() {
fileTable.setShowFolders(btnShowFolders.getModel().isSelected());
fileTable.setShowHidden(btnShowHidden.getModel().isSelected());
fileTable.setCurrentFolder(fsm, getCurrentFolder());
}
TreeTableNode resolvePath(String path) throws IllegalArgumentException {
Object obj = fsm.getFile(path);
fileToSelectOnSetCurrentNode = null;
if(obj != null) {
if(fsm.isFile(obj)) {
fileToSelectOnSetCurrentNode = obj;
obj = fsm.getParent(obj);
}
FileSystemTreeModel.FolderNode node = model.getNodeForFolder(obj);
if(node != null) {
return node;
}
}
throw new IllegalArgumentException("Could not resolve: " + path);
}
void showFolderMRU() {
final PopupWindow popup = new PopupWindow(this);
final ListBox<String> listBox = new ListBox<String>(folderMRU);
popup.setTheme("fileselector-folderMRUpopup");
popup.add(listBox);
if(popup.openPopup()) {
popup.setInnerSize(getInnerWidth()*2/3, getInnerHeight()*2/3);
popup.setPosition(btnFolderMRU.getX() - popup.getWidth(), btnFolderMRU.getY());
listBox.addCallback(new CallbackWithReason<ListBox.CallbackReason>() {
public void callback(CallbackReason reason) {
if(reason.actionRequested()) {
popup.closePopup();
int idx = listBox.getSelected();
if(idx >= 0) {
gotoFolderFromMRU(idx);
}
}
}
});
}
}
void showFilesMRU() {
final PopupWindow popup = new PopupWindow(this);
final DialogLayout layout = new DialogLayout();
final ListBox<String> listBox = new ListBox<String>(filesMRU);
final Button popupBtnOk = new Button();
final Button popupBtnCancel = new Button();
popupBtnOk.setTheme("buttonOk");
popupBtnCancel.setTheme("buttonCancel");
popup.setTheme("fileselector-filesMRUpopup");
popup.add(layout);
layout.add(listBox);
layout.add(popupBtnOk);
layout.add(popupBtnCancel);
DialogLayout.Group hBtnGroup = layout.createSequentialGroup()
.addGap().addWidget(popupBtnOk).addWidget(popupBtnCancel);
DialogLayout.Group vBtnGroup = layout.createParallelGroup()
.addWidget(popupBtnOk).addWidget(popupBtnCancel);
layout.setHorizontalGroup(layout.createParallelGroup().addWidget(listBox).addGroup(hBtnGroup));
layout.setVerticalGroup(layout.createSequentialGroup().addWidget(listBox).addGroup(vBtnGroup));
if(popup.openPopup()) {
popup.setInnerSize(getInnerWidth()*2/3, getInnerHeight()*2/3);
popup.setPosition(getInnerX() + (getInnerWidth() - popup.getWidth())/2, btnFilesMRU.getY() - popup.getHeight());
final Runnable okCB = new Runnable() {
public void run() {
int idx = listBox.getSelected();
if(idx >= 0) {
Object obj = fsm.getFile(filesMRU.getEntry(idx));
if(obj != null) {
popup.closePopup();
fireAcceptCallback(new FileTable.Entry[] {
new FileTable.Entry(fsm, obj, fsm.getParent(obj) == null)
});
} else {
filesMRU.removeEntry(idx);
}
}
}
};
popupBtnOk.addCallback(okCB);
popupBtnCancel.addCallback(new Runnable() {
public void run() {
popup.closePopup();
}
});
listBox.addCallback(new CallbackWithReason<ListBox.CallbackReason>() {
public void callback(CallbackReason reason) {
if(reason.actionRequested()) {
okCB.run();
}
}
});
}
}
private void addToMRU(FileTable.Entry[] selection) {
for(FileTable.Entry entry : selection) {
filesMRU.addEntry(entry.getPath());
}
folderMRU.addEntry(fsm.getPath(getCurrentFolder()));
}
boolean gotoFolderFromMRU(int idx) {
if(idx >= folderMRU.getNumEntries()) {
return false;
}
String path = folderMRU.getEntry(idx);
try {
TreeTableNode node = resolvePath(path);
setCurrentNode(node);
return true;
} catch(IllegalArgumentException ex) {
folderMRU.removeEntry(idx);
return false;
}
}
static class FileFiltersModel extends SimpleListModel<String> {
private final ArrayList<NamedFileFilter> filters = new ArrayList<NamedFileFilter>();
public NamedFileFilter getFileFilter(int index) {
return filters.get(index);
}
public String getEntry(int index) {
NamedFileFilter filter = getFileFilter(index);
return filter.getDisplayName();
}
public int getNumEntries() {
return filters.size();
}
public void addFileFilter(NamedFileFilter filter) {
int index = filters.size();
filters.add(filter);
fireEntriesInserted(index, index);
}
public void removeFileFilter(NamedFileFilter filter) {
int idx = filters.indexOf(filter);
if(idx >= 0) {
filters.remove(idx);
fireEntriesDeleted(idx, idx);
}
}
public int findFilter(NamedFileFilter filter) {
return filters.indexOf(filter);
}
void removeAll() {
filters.clear();
fireAllChanged();
}
}
/**
* A file object comparator which delegates to a String comprataor to sort based
* on the name of the file objects.
*/
public static class NameSorter implements Comparator<Object> {
private final FileSystemModel fsm;
private final Comparator<String> nameComparator;
/**
* Creates a new comparator which uses {@code NaturalSortComparator.stringComparator} to sort the names
* @param fsm the file system model
*/
public NameSorter(FileSystemModel fsm) {
this.fsm = fsm;
this.nameComparator = NaturalSortComparator.stringComparator;
}
/**
* Creates a new comparator which uses the specified String comparator to sort the names
* @param fsm the file system model
* @param nameComparator the name comparator
*/
public NameSorter(FileSystemModel fsm, Comparator<String> nameComparator) {
this.fsm = fsm;
this.nameComparator = nameComparator;
}
public int compare(Object o1, Object o2) {
return nameComparator.compare(fsm.getName(o1), fsm.getName(o2));
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.jira.producer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import com.atlassian.jira.rest.client.api.IssueRestClient;
import com.atlassian.jira.rest.client.api.JiraRestClient;
import com.atlassian.jira.rest.client.api.JiraRestClientFactory;
import com.atlassian.jira.rest.client.api.domain.Issue;
import com.atlassian.jira.rest.client.api.domain.IssueLink;
import com.atlassian.jira.rest.client.api.domain.input.LinkIssuesInput;
import org.apache.camel.CamelContext;
import org.apache.camel.CamelExecutionException;
import org.apache.camel.EndpointInject;
import org.apache.camel.Produce;
import org.apache.camel.ProducerTemplate;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.jira.JiraComponent;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.spi.Registry;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import static org.apache.camel.component.jira.JiraConstants.CHILD_ISSUE_KEY;
import static org.apache.camel.component.jira.JiraConstants.JIRA;
import static org.apache.camel.component.jira.JiraConstants.JIRA_REST_CLIENT_FACTORY;
import static org.apache.camel.component.jira.JiraConstants.LINK_TYPE;
import static org.apache.camel.component.jira.JiraConstants.PARENT_ISSUE_KEY;
import static org.apache.camel.component.jira.JiraTestConstants.JIRA_CREDENTIALS;
import static org.apache.camel.component.jira.Utils.createIssue;
import static org.apache.camel.component.jira.Utils.createIssueWithLinks;
import static org.apache.camel.component.jira.Utils.newIssueLink;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class AddIssueLinkProducerTest extends CamelTestSupport {
@Mock
private JiraRestClient jiraClient;
@Mock
private JiraRestClientFactory jiraRestClientFactory;
@Mock
private IssueRestClient issueRestClient;
@Produce("direct:start")
private ProducerTemplate template;
@EndpointInject("mock:result")
private MockEndpoint mockResult;
private Issue parentIssue;
private Issue childIssue;
@Override
protected void bindToRegistry(Registry registry) {
registry.bind(JIRA_REST_CLIENT_FACTORY, jiraRestClientFactory);
}
@Override
protected CamelContext createCamelContext() throws Exception {
setMocks();
CamelContext camelContext = super.createCamelContext();
camelContext.disableJMX();
JiraComponent component = new JiraComponent(camelContext);
camelContext.addComponent(JIRA, component);
return camelContext;
}
public void setMocks() {
when(jiraRestClientFactory.createWithBasicHttpAuthentication(any(), any(), any())).thenReturn(jiraClient);
when(jiraClient.getIssueClient()).thenReturn(issueRestClient);
parentIssue = createIssue(1);
childIssue = createIssue(2);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.to("jira://addIssueLink?jiraUrl=" + JIRA_CREDENTIALS)
.to(mockResult);
}
};
}
@Test
public void testAddIssueLink() throws InterruptedException {
String comment = "A new test comment " + new Date();
String linkType = "Relates";
Map<String, Object> headers = new HashMap<>();
headers.put(PARENT_ISSUE_KEY, parentIssue.getKey());
headers.put(CHILD_ISSUE_KEY, childIssue.getKey());
headers.put(LINK_TYPE, linkType);
when(issueRestClient.linkIssue(any(LinkIssuesInput.class)))
.then((Answer<Void>) inv -> {
Collection<IssueLink> links = new ArrayList<>();
links.add(newIssueLink(childIssue.getId(), 1, comment));
parentIssue = createIssueWithLinks(parentIssue.getId(), links);
return null;
});
template.sendBodyAndHeaders(comment, headers);
mockResult.expectedMessageCount(1);
mockResult.assertIsSatisfied();
verify(issueRestClient).linkIssue(any(LinkIssuesInput.class));
}
@Test
public void testAddIssueLinkNoComment() throws InterruptedException {
String linkType = "Relates";
Map<String, Object> headers = new HashMap<>();
headers.put(PARENT_ISSUE_KEY, parentIssue.getKey());
headers.put(CHILD_ISSUE_KEY, childIssue.getKey());
headers.put(LINK_TYPE, linkType);
when(issueRestClient.linkIssue(any(LinkIssuesInput.class)))
.then((Answer<Void>) inv -> {
Collection<IssueLink> links = new ArrayList<>();
links.add(newIssueLink(childIssue.getId(), 1, null));
parentIssue = createIssueWithLinks(parentIssue.getId(), links);
return null;
});
template.sendBodyAndHeaders(null, headers);
mockResult.expectedMessageCount(1);
mockResult.assertIsSatisfied();
verify(issueRestClient).linkIssue(any(LinkIssuesInput.class));
}
@Test
public void testAddIssueLinkMissingParentIssueKey() throws InterruptedException {
String comment = "A new test comment " + new Date();
String linkType = "Relates";
Map<String, Object> headers = new HashMap<>();
headers.put(CHILD_ISSUE_KEY, childIssue.getKey());
headers.put(LINK_TYPE, linkType);
try {
template.sendBodyAndHeaders(comment, headers);
fail("Should have thrown an exception");
} catch (CamelExecutionException e) {
IllegalArgumentException cause = assertIsInstanceOf(IllegalArgumentException.class, e.getCause());
assertStringContains(cause.getMessage(), PARENT_ISSUE_KEY);
}
mockResult.expectedMessageCount(0);
mockResult.assertIsSatisfied();
verify(issueRestClient, never()).linkIssue(any(LinkIssuesInput.class));
}
@Test
public void testAddIssueLinkMissingChildIssueKey() throws InterruptedException {
String comment = "A new test comment " + new Date();
String linkType = "Relates";
Map<String, Object> headers = new HashMap<>();
headers.put(PARENT_ISSUE_KEY, parentIssue.getKey());
headers.put(LINK_TYPE, linkType);
try {
template.sendBodyAndHeaders(comment, headers);
fail("Should have thrown an exception");
} catch (CamelExecutionException e) {
IllegalArgumentException cause = assertIsInstanceOf(IllegalArgumentException.class, e.getCause());
assertStringContains(cause.getMessage(), CHILD_ISSUE_KEY);
}
mockResult.expectedMessageCount(0);
mockResult.assertIsSatisfied();
verify(issueRestClient, never()).linkIssue(any(LinkIssuesInput.class));
}
@Test
public void testAddIssueLinkMissingLinkType() throws InterruptedException {
String comment = "A new test comment " + new Date();
Map<String, Object> headers = new HashMap<>();
headers.put(PARENT_ISSUE_KEY, parentIssue.getKey());
headers.put(CHILD_ISSUE_KEY, childIssue.getKey());
try {
template.sendBodyAndHeaders(comment, headers);
fail("Should have thrown an exception");
} catch (CamelExecutionException e) {
IllegalArgumentException cause = assertIsInstanceOf(IllegalArgumentException.class, e.getCause());
assertStringContains(cause.getMessage(), LINK_TYPE);
}
mockResult.expectedMessageCount(0);
mockResult.assertIsSatisfied();
verify(issueRestClient, never()).linkIssue(any(LinkIssuesInput.class));
}
}
|
|
/*
MIT License
Copyright (c) 2017 Ryan French
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
package main.com.rfrench.jvm.ui;
import java.util.ArrayList;
import javafx.geometry.VPos;
import javafx.scene.canvas.Canvas;
import javafx.scene.canvas.GraphicsContext;
import javafx.scene.paint.Color;
import javafx.scene.paint.Paint;
import javafx.scene.text.TextAlignment;
/*
Program Title: LocalVariablePane.java
Author: Ryan French
Created: 05-Feb-2016
Version: 1.0
*/
public class LocalVariablePane
{
private Canvas local_var_canvas;
private GraphicsContext gc;
private final double RECT_WIDTH;
private final double RECT_HEIGHT;
private final double RECT_X_OFFSET;
private final double CANVAS_HEIGHT;
private ArrayList<ArrayList<String>> method_local_frames_list;
private ArrayList<Paint> frame_colours;
private ArrayList<String> local_frame_text;
private int number_of_frames; // used to calculate where to put next frame in UI
public LocalVariablePane(Canvas local_var_canvas)
{
this.local_var_canvas = local_var_canvas;
this.gc = this.local_var_canvas.getGraphicsContext2D();
RECT_WIDTH = local_var_canvas.getWidth() * 0.8;
RECT_HEIGHT = local_var_canvas.getHeight() * 0.05;
RECT_X_OFFSET = 75;
CANVAS_HEIGHT = local_var_canvas.getHeight();
number_of_frames = 0;
method_local_frames_list = new ArrayList<ArrayList<String>>();
setupFrameColors();
}
private void setupFrameColors()
{
frame_colours = new ArrayList<Paint>();
frame_colours.add(Color.YELLOW);
frame_colours.add(Color.YELLOWGREEN);
frame_colours.add(Color.DARKGOLDENROD);
frame_colours.add(Color.DARKORANGE);
frame_colours.add(Color.SANDYBROWN);
frame_colours.add(Color.CHOCOLATE);
}
public void addMethodLocalFrame(String[] frame_text, int current_method_count, int max_local_var)
{
local_frame_text = new ArrayList<String>();
gc.setTextAlign(TextAlignment.CENTER);
gc.setTextBaseline(VPos.CENTER);
int colour_index = current_method_count;
if(current_method_count > frame_colours.size())
{
colour_index = current_method_count % frame_colours.size();
}
System.out.println("current method count: " + current_method_count);
System.out.println("Adding stuff, colour index is: " + colour_index);
Paint frame_colour = frame_colours.get(colour_index);
for(int i = 0; i < max_local_var; i++)
{
double rect_y_pos = CANVAS_HEIGHT - ((number_of_frames * RECT_HEIGHT) + 100);
gc.setFill(frame_colour);
gc.fillRect(RECT_X_OFFSET, rect_y_pos, RECT_WIDTH, RECT_HEIGHT);
gc.setStroke(Color.BLACK);
gc.strokeRect(RECT_X_OFFSET, rect_y_pos, RECT_WIDTH, RECT_HEIGHT);
String frame_element_text = "";
if(i < frame_text.length)
{
frame_element_text = frame_text[i];
}
double text_x_pos = RECT_X_OFFSET + (RECT_WIDTH / 2);
double text_y_pos = CANVAS_HEIGHT - ((number_of_frames * RECT_HEIGHT) + 100) + (RECT_HEIGHT / 2);
gc.setStroke(Color.BLACK);
gc.strokeText(frame_element_text, text_x_pos, text_y_pos);
number_of_frames++;
local_frame_text.add(frame_element_text);
}
method_local_frames_list.add(local_frame_text);
}
public void removeFrameUI(int current_method_count)
{
int number_of_local_vars = method_local_frames_list.get(current_method_count).size();
System.out.println("no: " + number_of_local_vars);
for(int i = 0; i < number_of_local_vars; i++)
{
number_of_frames--;
double rect_y_pos = CANVAS_HEIGHT - ((number_of_frames * RECT_HEIGHT) + 100);
gc.clearRect(RECT_X_OFFSET, rect_y_pos, RECT_WIDTH+1, RECT_HEIGHT);
}
}
public int getNumberOfFrames()
{
return number_of_frames;
}
public String getFrameName(int index)
{
return local_frame_text.get(index);
}
public void updateFrameLabel(int current_method_count, int index, String new_text)
{
//CAN CHANGE TO HAVE RUNNING COUNT OF ALL FRAMES - (MAX_LOCAL_VAR_CURRENT_FRAME - index)
int number_of_elements_in_frame = 0;
for(int i = 0; i < current_method_count; i++)
{
number_of_elements_in_frame += method_local_frames_list.get(i).size();
}
int index_frame_to_update = number_of_elements_in_frame + index;
System.out.println("index: " + index_frame_to_update);
double rect_y_pos = CANVAS_HEIGHT - ((index_frame_to_update * RECT_HEIGHT) + 100);
int colour_index = current_method_count;
if(current_method_count > frame_colours.size())
{
colour_index = current_method_count % frame_colours.size();
}
Paint frame_colour = frame_colours.get(colour_index);
gc.setFill(frame_colour);
gc.setStroke(Color.BLACK);
gc.clearRect(RECT_X_OFFSET, rect_y_pos, RECT_WIDTH, RECT_HEIGHT);
gc.fillRect(RECT_X_OFFSET, rect_y_pos, RECT_WIDTH, RECT_HEIGHT);
gc.strokeRect(RECT_X_OFFSET, rect_y_pos, RECT_WIDTH, RECT_HEIGHT);
double text_x_pos = RECT_X_OFFSET + (RECT_WIDTH / 2);
double text_y_pos = CANVAS_HEIGHT - ((index_frame_to_update * RECT_HEIGHT) + 100) + (RECT_HEIGHT / 2);
gc.strokeText(new_text, text_x_pos, text_y_pos);
method_local_frames_list.get(current_method_count).set(index, new_text);
}
}
|
|
package com.github.cchao.touchnews.widget;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.PointF;
import android.graphics.RectF;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.Build;
import android.os.Build.VERSION;
import android.os.Build.VERSION_CODES;
import android.os.Bundle;
import android.os.Parcelable;
import android.util.AttributeSet;
import android.util.Log;
import android.view.GestureDetector;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
import android.view.View;
import android.view.animation.AccelerateDecelerateInterpolator;
import android.widget.ImageView;
import android.widget.OverScroller;
import android.widget.Scroller;
/*
* TouchImageView.java
* By: Michael Ortiz
* Updated By: Patrick Lackemacher
* Updated By: Babay88
* Updated By: @ipsilondev
* Updated By: hank-cp
* Updated By: singpolyma
* -------------------
* Extends Android ImageView to include pinch zooming, panning, fling and double tap zoom.
*/
public class TouchImageView extends ImageView {
private static final String DEBUG = "DEBUG";
//
// SuperMin and SuperMax multipliers. Determine how much the image can be
// zoomed below or above the zoom boundaries, before animating back to the
// min/max zoom boundary.
//
private static final float SUPER_MIN_MULTIPLIER = .75f;
private static final float SUPER_MAX_MULTIPLIER = 1.25f;
//
// Scale of image ranges from minScale to maxScale, where minScale == 1
// when the image is stretched to fit view.
//
private float normalizedScale;
//
// Matrix applied to image. MSCALE_X and MSCALE_Y should always be equal.
// MTRANS_X and MTRANS_Y are the other values used. prevMatrix is the matrix
// saved prior to the screen rotating.
//
private Matrix matrix, prevMatrix;
private static enum State {NONE, DRAG, ZOOM, FLING, ANIMATE_ZOOM}
;
private State state;
private float minScale;
private float maxScale;
private float superMinScale;
private float superMaxScale;
private float[] m;
private Context context;
private Fling fling;
private ScaleType mScaleType;
private boolean imageRenderedAtLeastOnce;
private boolean onDrawReady;
private ZoomVariables delayedZoomVariables;
//
// Size of view and previous view size (ie before rotation)
//
private int viewWidth, viewHeight, prevViewWidth, prevViewHeight;
//
// Size of image when it is stretched to fit view. Before and After rotation.
//
private float matchViewWidth, matchViewHeight, prevMatchViewWidth, prevMatchViewHeight;
private ScaleGestureDetector mScaleDetector;
private GestureDetector mGestureDetector;
private GestureDetector.OnDoubleTapListener doubleTapListener = null;
private OnTouchListener userTouchListener = null;
private OnTouchImageViewListener touchImageViewListener = null;
public TouchImageView(Context context) {
super(context);
sharedConstructing(context);
}
public TouchImageView(Context context, AttributeSet attrs) {
super(context, attrs);
sharedConstructing(context);
}
public TouchImageView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
sharedConstructing(context);
}
private void sharedConstructing(Context context) {
super.setClickable(true);
this.context = context;
mScaleDetector = new ScaleGestureDetector(context, new ScaleListener());
mGestureDetector = new GestureDetector(context, new GestureListener());
matrix = new Matrix();
prevMatrix = new Matrix();
m = new float[9];
normalizedScale = 1;
if (mScaleType == null) {
mScaleType = ScaleType.FIT_CENTER;
}
minScale = 1;
maxScale = 3;
superMinScale = SUPER_MIN_MULTIPLIER * minScale;
superMaxScale = SUPER_MAX_MULTIPLIER * maxScale;
setImageMatrix(matrix);
setScaleType(ScaleType.MATRIX);
setState(State.NONE);
onDrawReady = false;
super.setOnTouchListener(new PrivateOnTouchListener());
}
@Override
public void setOnTouchListener(View.OnTouchListener l) {
userTouchListener = l;
}
public void setOnTouchImageViewListener(OnTouchImageViewListener l) {
touchImageViewListener = l;
}
public void setOnDoubleTapListener(GestureDetector.OnDoubleTapListener l) {
doubleTapListener = l;
}
@Override
public void setImageResource(int resId) {
super.setImageResource(resId);
savePreviousImageValues();
fitImageToView();
}
@Override
public void setImageBitmap(Bitmap bm) {
super.setImageBitmap(bm);
savePreviousImageValues();
fitImageToView();
}
@Override
public void setImageDrawable(Drawable drawable) {
super.setImageDrawable(drawable);
savePreviousImageValues();
fitImageToView();
}
@Override
public void setImageURI(Uri uri) {
super.setImageURI(uri);
savePreviousImageValues();
fitImageToView();
}
@Override
public void setScaleType(ScaleType type) {
if (type == ScaleType.FIT_START || type == ScaleType.FIT_END) {
throw new UnsupportedOperationException("TouchImageView does not support FIT_START or FIT_END");
}
if (type == ScaleType.MATRIX) {
super.setScaleType(ScaleType.MATRIX);
} else {
mScaleType = type;
if (onDrawReady) {
//
// If the image is already rendered, scaleType has been called programmatically
// and the TouchImageView should be updated with the new scaleType.
//
setZoom(this);
}
}
}
@Override
public ScaleType getScaleType() {
return mScaleType;
}
/**
* Returns false if image is in initial, unzoomed state. False, otherwise.
*
* @return true if image is zoomed
*/
public boolean isZoomed() {
return normalizedScale != 1;
}
/**
* Return a Rect representing the zoomed image.
*
* @return rect representing zoomed image
*/
public RectF getZoomedRect() {
if (mScaleType == ScaleType.FIT_XY) {
throw new UnsupportedOperationException("getZoomedRect() not supported with FIT_XY");
}
PointF topLeft = transformCoordTouchToBitmap(0, 0, true);
PointF bottomRight = transformCoordTouchToBitmap(viewWidth, viewHeight, true);
float w = getDrawable().getIntrinsicWidth();
float h = getDrawable().getIntrinsicHeight();
return new RectF(topLeft.x / w, topLeft.y / h, bottomRight.x / w, bottomRight.y / h);
}
/**
* Save the current matrix and view dimensions
* in the prevMatrix and prevView variables.
*/
private void savePreviousImageValues() {
if (matrix != null && viewHeight != 0 && viewWidth != 0) {
matrix.getValues(m);
prevMatrix.setValues(m);
prevMatchViewHeight = matchViewHeight;
prevMatchViewWidth = matchViewWidth;
prevViewHeight = viewHeight;
prevViewWidth = viewWidth;
}
}
@Override
public Parcelable onSaveInstanceState() {
Bundle bundle = new Bundle();
bundle.putParcelable("instanceState", super.onSaveInstanceState());
bundle.putFloat("saveScale", normalizedScale);
bundle.putFloat("matchViewHeight", matchViewHeight);
bundle.putFloat("matchViewWidth", matchViewWidth);
bundle.putInt("viewWidth", viewWidth);
bundle.putInt("viewHeight", viewHeight);
matrix.getValues(m);
bundle.putFloatArray("matrix", m);
bundle.putBoolean("imageRendered", imageRenderedAtLeastOnce);
return bundle;
}
@Override
public void onRestoreInstanceState(Parcelable state) {
if (state instanceof Bundle) {
Bundle bundle = (Bundle) state;
normalizedScale = bundle.getFloat("saveScale");
m = bundle.getFloatArray("matrix");
prevMatrix.setValues(m);
prevMatchViewHeight = bundle.getFloat("matchViewHeight");
prevMatchViewWidth = bundle.getFloat("matchViewWidth");
prevViewHeight = bundle.getInt("viewHeight");
prevViewWidth = bundle.getInt("viewWidth");
imageRenderedAtLeastOnce = bundle.getBoolean("imageRendered");
super.onRestoreInstanceState(bundle.getParcelable("instanceState"));
return;
}
super.onRestoreInstanceState(state);
}
@Override
protected void onDraw(Canvas canvas) {
onDrawReady = true;
imageRenderedAtLeastOnce = true;
if (delayedZoomVariables != null) {
setZoom(delayedZoomVariables.scale, delayedZoomVariables.focusX, delayedZoomVariables.focusY, delayedZoomVariables.scaleType);
delayedZoomVariables = null;
}
super.onDraw(canvas);
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
savePreviousImageValues();
}
/**
* Get the max zoom multiplier.
*
* @return max zoom multiplier.
*/
public float getMaxZoom() {
return maxScale;
}
/**
* Set the max zoom multiplier. Default value: 3.
*
* @param max max zoom multiplier.
*/
public void setMaxZoom(float max) {
maxScale = max;
superMaxScale = SUPER_MAX_MULTIPLIER * maxScale;
}
/**
* Get the min zoom multiplier.
*
* @return min zoom multiplier.
*/
public float getMinZoom() {
return minScale;
}
/**
* Get the current zoom. This is the zoom relative to the initial
* scale, not the original resource.
*
* @return current zoom multiplier.
*/
public float getCurrentZoom() {
return normalizedScale;
}
/**
* Set the min zoom multiplier. Default value: 1.
*
* @param min min zoom multiplier.
*/
public void setMinZoom(float min) {
minScale = min;
superMinScale = SUPER_MIN_MULTIPLIER * minScale;
}
/**
* Reset zoom and translation to initial state.
*/
public void resetZoom() {
normalizedScale = 1;
fitImageToView();
}
/**
* Set zoom to the specified scale. Image will be centered by default.
*
* @param scale
*/
public void setZoom(float scale) {
setZoom(scale, 0.5f, 0.5f);
}
/**
* Set zoom to the specified scale. Image will be centered around the point
* (focusX, focusY). These floats range from 0 to 1 and denote the focus point
* as a fraction from the left and top of the view. For example, the top left
* corner of the image would be (0, 0). And the bottom right corner would be (1, 1).
*
* @param scale
* @param focusX
* @param focusY
*/
public void setZoom(float scale, float focusX, float focusY) {
setZoom(scale, focusX, focusY, mScaleType);
}
/**
* Set zoom to the specified scale. Image will be centered around the point
* (focusX, focusY). These floats range from 0 to 1 and denote the focus point
* as a fraction from the left and top of the view. For example, the top left
* corner of the image would be (0, 0). And the bottom right corner would be (1, 1).
*
* @param scale
* @param focusX
* @param focusY
* @param scaleType
*/
public void setZoom(float scale, float focusX, float focusY, ScaleType scaleType) {
//
// setZoom can be called before the image is on the screen, but at this point,
// image and view sizes have not yet been calculated in onMeasure. Thus, we should
// delay calling setZoom until the view has been measured.
//
if (!onDrawReady) {
delayedZoomVariables = new ZoomVariables(scale, focusX, focusY, scaleType);
return;
}
if (scaleType != mScaleType) {
setScaleType(scaleType);
}
resetZoom();
scaleImage(scale, viewWidth / 2, viewHeight / 2, true);
matrix.getValues(m);
m[Matrix.MTRANS_X] = -((focusX * getImageWidth()) - (viewWidth * 0.5f));
m[Matrix.MTRANS_Y] = -((focusY * getImageHeight()) - (viewHeight * 0.5f));
matrix.setValues(m);
fixTrans();
setImageMatrix(matrix);
}
/**
* Set zoom parameters equal to another TouchImageView. Including scale, position,
* and ScaleType.
*/
public void setZoom(TouchImageView img) {
PointF center = img.getScrollPosition();
setZoom(img.getCurrentZoom(), center.x, center.y, img.getScaleType());
}
/**
* Return the point at the center of the zoomed image. The PointF coordinates range
* in value between 0 and 1 and the focus point is denoted as a fraction from the left
* and top of the view. For example, the top left corner of the image would be (0, 0).
* And the bottom right corner would be (1, 1).
*
* @return PointF representing the scroll position of the zoomed image.
*/
public PointF getScrollPosition() {
Drawable drawable = getDrawable();
if (drawable == null) {
return null;
}
int drawableWidth = drawable.getIntrinsicWidth();
int drawableHeight = drawable.getIntrinsicHeight();
PointF point = transformCoordTouchToBitmap(viewWidth / 2, viewHeight / 2, true);
point.x /= drawableWidth;
point.y /= drawableHeight;
return point;
}
/**
* Set the focus point of the zoomed image. The focus points are denoted as a fraction from the
* left and top of the view. The focus points can range in value between 0 and 1.
*
* @param focusX
* @param focusY
*/
public void setScrollPosition(float focusX, float focusY) {
setZoom(normalizedScale, focusX, focusY);
}
/**
* Performs boundary checking and fixes the image matrix if it
* is out of bounds.
*/
private void fixTrans() {
matrix.getValues(m);
float transX = m[Matrix.MTRANS_X];
float transY = m[Matrix.MTRANS_Y];
float fixTransX = getFixTrans(transX, viewWidth, getImageWidth());
float fixTransY = getFixTrans(transY, viewHeight, getImageHeight());
if (fixTransX != 0 || fixTransY != 0) {
matrix.postTranslate(fixTransX, fixTransY);
}
}
/**
* When transitioning from zooming from focus to zoom from center (or vice versa)
* the image can become unaligned within the view. This is apparent when zooming
* quickly. When the content size is less than the view size, the content will often
* be centered incorrectly within the view. fixScaleTrans first calls fixTrans() and
* then makes sure the image is centered correctly within the view.
*/
private void fixScaleTrans() {
fixTrans();
matrix.getValues(m);
if (getImageWidth() < viewWidth) {
m[Matrix.MTRANS_X] = (viewWidth - getImageWidth()) / 2;
}
if (getImageHeight() < viewHeight) {
m[Matrix.MTRANS_Y] = (viewHeight - getImageHeight()) / 2;
}
matrix.setValues(m);
}
private float getFixTrans(float trans, float viewSize, float contentSize) {
float minTrans, maxTrans;
if (contentSize <= viewSize) {
minTrans = 0;
maxTrans = viewSize - contentSize;
} else {
minTrans = viewSize - contentSize;
maxTrans = 0;
}
if (trans < minTrans)
return -trans + minTrans;
if (trans > maxTrans)
return -trans + maxTrans;
return 0;
}
private float getFixDragTrans(float delta, float viewSize, float contentSize) {
if (contentSize <= viewSize) {
return 0;
}
return delta;
}
private float getImageWidth() {
return matchViewWidth * normalizedScale;
}
private float getImageHeight() {
return matchViewHeight * normalizedScale;
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
Drawable drawable = getDrawable();
if (drawable == null || drawable.getIntrinsicWidth() == 0 || drawable.getIntrinsicHeight() == 0) {
setMeasuredDimension(0, 0);
return;
}
int drawableWidth = drawable.getIntrinsicWidth();
int drawableHeight = drawable.getIntrinsicHeight();
int widthSize = MeasureSpec.getSize(widthMeasureSpec);
int widthMode = MeasureSpec.getMode(widthMeasureSpec);
int heightSize = MeasureSpec.getSize(heightMeasureSpec);
int heightMode = MeasureSpec.getMode(heightMeasureSpec);
viewWidth = setViewSize(widthMode, widthSize, drawableWidth);
viewHeight = setViewSize(heightMode, heightSize, drawableHeight);
//
// Set view dimensions
//
setMeasuredDimension(viewWidth, viewHeight);
//
// Fit content within view
//
fitImageToView();
}
/**
* If the normalizedScale is equal to 1, then the image is made to fit the screen. Otherwise,
* it is made to fit the screen according to the dimensions of the previous image matrix. This
* allows the image to maintain its zoom after rotation.
*/
private void fitImageToView() {
Drawable drawable = getDrawable();
if (drawable == null || drawable.getIntrinsicWidth() == 0 || drawable.getIntrinsicHeight() == 0) {
return;
}
if (matrix == null || prevMatrix == null) {
return;
}
int drawableWidth = drawable.getIntrinsicWidth();
int drawableHeight = drawable.getIntrinsicHeight();
//
// Scale image for view
//
float scaleX = (float) viewWidth / drawableWidth;
float scaleY = (float) viewHeight / drawableHeight;
switch (mScaleType) {
case CENTER:
scaleX = scaleY = 1;
break;
case CENTER_CROP:
scaleX = scaleY = Math.max(scaleX, scaleY);
break;
case CENTER_INSIDE:
scaleX = scaleY = Math.min(1, Math.min(scaleX, scaleY));
case FIT_CENTER:
scaleX = scaleY = Math.min(scaleX, scaleY);
break;
case FIT_XY:
break;
default:
//
// FIT_START and FIT_END not supported
//
throw new UnsupportedOperationException("TouchImageView does not support FIT_START or FIT_END");
}
//
// Center the image
//
float redundantXSpace = viewWidth - (scaleX * drawableWidth);
float redundantYSpace = viewHeight - (scaleY * drawableHeight);
matchViewWidth = viewWidth - redundantXSpace;
matchViewHeight = viewHeight - redundantYSpace;
if (!isZoomed() && !imageRenderedAtLeastOnce) {
//
// Stretch and center image to fit view
//
matrix.setScale(scaleX, scaleY);
matrix.postTranslate(redundantXSpace / 2, redundantYSpace / 2);
normalizedScale = 1;
} else {
//
// These values should never be 0 or we will set viewWidth and viewHeight
// to NaN in translateMatrixAfterRotate. To avoid this, call savePreviousImageValues
// to set them equal to the current values.
//
if (prevMatchViewWidth == 0 || prevMatchViewHeight == 0) {
savePreviousImageValues();
}
prevMatrix.getValues(m);
//
// Rescale Matrix after rotation
//
m[Matrix.MSCALE_X] = matchViewWidth / drawableWidth * normalizedScale;
m[Matrix.MSCALE_Y] = matchViewHeight / drawableHeight * normalizedScale;
//
// TransX and TransY from previous matrix
//
float transX = m[Matrix.MTRANS_X];
float transY = m[Matrix.MTRANS_Y];
//
// Width
//
float prevActualWidth = prevMatchViewWidth * normalizedScale;
float actualWidth = getImageWidth();
translateMatrixAfterRotate(Matrix.MTRANS_X, transX, prevActualWidth, actualWidth, prevViewWidth, viewWidth, drawableWidth);
//
// Height
//
float prevActualHeight = prevMatchViewHeight * normalizedScale;
float actualHeight = getImageHeight();
translateMatrixAfterRotate(Matrix.MTRANS_Y, transY, prevActualHeight, actualHeight, prevViewHeight, viewHeight, drawableHeight);
//
// Set the matrix to the adjusted scale and translate values.
//
matrix.setValues(m);
}
fixTrans();
setImageMatrix(matrix);
}
/**
* Set view dimensions based on layout params
*
* @param mode
* @param size
* @param drawableWidth
* @return
*/
private int setViewSize(int mode, int size, int drawableWidth) {
int viewSize;
switch (mode) {
case MeasureSpec.EXACTLY:
viewSize = size;
break;
case MeasureSpec.AT_MOST:
viewSize = Math.min(drawableWidth, size);
break;
case MeasureSpec.UNSPECIFIED:
viewSize = drawableWidth;
break;
default:
viewSize = size;
break;
}
return viewSize;
}
/**
* After rotating, the matrix needs to be translated. This function finds the area of image
* which was previously centered and adjusts translations so that is again the center, post-rotation.
*
* @param axis Matrix.MTRANS_X or Matrix.MTRANS_Y
* @param trans the value of trans in that axis before the rotation
* @param prevImageSize the width/height of the image before the rotation
* @param imageSize width/height of the image after rotation
* @param prevViewSize width/height of view before rotation
* @param viewSize width/height of view after rotation
* @param drawableSize width/height of drawable
*/
private void translateMatrixAfterRotate(int axis, float trans, float prevImageSize, float imageSize, int prevViewSize, int viewSize, int drawableSize) {
if (imageSize < viewSize) {
//
// The width/height of image is less than the view's width/height. Center it.
//
m[axis] = (viewSize - (drawableSize * m[Matrix.MSCALE_X])) * 0.5f;
} else if (trans > 0) {
//
// The image is larger than the view, but was not before rotation. Center it.
//
m[axis] = -((imageSize - viewSize) * 0.5f);
} else {
//
// Find the area of the image which was previously centered in the view. Determine its distance
// from the left/top side of the view as a fraction of the entire image's width/height. Use that percentage
// to calculate the trans in the new view width/height.
//
float percentage = (Math.abs(trans) + (0.5f * prevViewSize)) / prevImageSize;
m[axis] = -((percentage * imageSize) - (viewSize * 0.5f));
}
}
private void setState(State state) {
this.state = state;
}
public boolean canScrollHorizontallyFroyo(int direction) {
return canScrollHorizontally(direction);
}
@Override
public boolean canScrollHorizontally(int direction) {
matrix.getValues(m);
float x = m[Matrix.MTRANS_X];
if (getImageWidth() < viewWidth) {
return false;
} else if (x >= -1 && direction < 0) {
return false;
} else if (Math.abs(x) + viewWidth + 1 >= getImageWidth() && direction > 0) {
return false;
}
return true;
}
/**
* Gesture Listener detects a single click or long click and passes that on
* to the view's listener.
*
* @author Ortiz
*/
private class GestureListener extends GestureDetector.SimpleOnGestureListener {
@Override
public boolean onSingleTapConfirmed(MotionEvent e) {
if (doubleTapListener != null) {
return doubleTapListener.onSingleTapConfirmed(e);
}
return performClick();
}
@Override
public void onLongPress(MotionEvent e) {
performLongClick();
}
@Override
public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) {
if (fling != null) {
//
// If a previous fling is still active, it should be cancelled so that two flings
// are not run simultaenously.
//
fling.cancelFling();
}
fling = new Fling((int) velocityX, (int) velocityY);
compatPostOnAnimation(fling);
return super.onFling(e1, e2, velocityX, velocityY);
}
@Override
public boolean onDoubleTap(MotionEvent e) {
boolean consumed = false;
if (doubleTapListener != null) {
consumed = doubleTapListener.onDoubleTap(e);
}
if (state == State.NONE) {
float targetZoom = (normalizedScale == minScale) ? maxScale : minScale;
DoubleTapZoom doubleTap = new DoubleTapZoom(targetZoom, e.getX(), e.getY(), false);
compatPostOnAnimation(doubleTap);
consumed = true;
}
return consumed;
}
@Override
public boolean onDoubleTapEvent(MotionEvent e) {
if (doubleTapListener != null) {
return doubleTapListener.onDoubleTapEvent(e);
}
return false;
}
}
public interface OnTouchImageViewListener {
public void onMove();
}
/**
* Responsible for all touch events. Handles the heavy lifting of drag and also sends
* touch events to Scale Detector and Gesture Detector.
*
* @author Ortiz
*/
private class PrivateOnTouchListener implements OnTouchListener {
//
// Remember last point position for dragging
//
private PointF last = new PointF();
@Override
public boolean onTouch(View v, MotionEvent event) {
mScaleDetector.onTouchEvent(event);
mGestureDetector.onTouchEvent(event);
PointF curr = new PointF(event.getX(), event.getY());
if (state == State.NONE || state == State.DRAG || state == State.FLING) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
last.set(curr);
if (fling != null)
fling.cancelFling();
setState(State.DRAG);
break;
case MotionEvent.ACTION_MOVE:
if (state == State.DRAG) {
float deltaX = curr.x - last.x;
float deltaY = curr.y - last.y;
float fixTransX = getFixDragTrans(deltaX, viewWidth, getImageWidth());
float fixTransY = getFixDragTrans(deltaY, viewHeight, getImageHeight());
matrix.postTranslate(fixTransX, fixTransY);
fixTrans();
last.set(curr.x, curr.y);
}
break;
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_POINTER_UP:
setState(State.NONE);
break;
}
}
setImageMatrix(matrix);
//
// User-defined OnTouchListener
//
if (userTouchListener != null) {
userTouchListener.onTouch(v, event);
}
//
// OnTouchImageViewListener is set: TouchImageView dragged by user.
//
if (touchImageViewListener != null) {
touchImageViewListener.onMove();
}
//
// indicate event was handled
//
return true;
}
}
/**
* ScaleListener detects user two finger scaling and scales image.
*
* @author Ortiz
*/
private class ScaleListener extends ScaleGestureDetector.SimpleOnScaleGestureListener {
@Override
public boolean onScaleBegin(ScaleGestureDetector detector) {
setState(State.ZOOM);
return true;
}
@Override
public boolean onScale(ScaleGestureDetector detector) {
scaleImage(detector.getScaleFactor(), detector.getFocusX(), detector.getFocusY(), true);
//
// OnTouchImageViewListener is set: TouchImageView pinch zoomed by user.
//
if (touchImageViewListener != null) {
touchImageViewListener.onMove();
}
return true;
}
@Override
public void onScaleEnd(ScaleGestureDetector detector) {
super.onScaleEnd(detector);
setState(State.NONE);
boolean animateToZoomBoundary = false;
float targetZoom = normalizedScale;
if (normalizedScale > maxScale) {
targetZoom = maxScale;
animateToZoomBoundary = true;
} else if (normalizedScale < minScale) {
targetZoom = minScale;
animateToZoomBoundary = true;
}
if (animateToZoomBoundary) {
DoubleTapZoom doubleTap = new DoubleTapZoom(targetZoom, viewWidth / 2, viewHeight / 2, true);
compatPostOnAnimation(doubleTap);
}
}
}
private void scaleImage(double deltaScale, float focusX, float focusY, boolean stretchImageToSuper) {
float lowerScale, upperScale;
if (stretchImageToSuper) {
lowerScale = superMinScale;
upperScale = superMaxScale;
} else {
lowerScale = minScale;
upperScale = maxScale;
}
float origScale = normalizedScale;
normalizedScale *= deltaScale;
if (normalizedScale > upperScale) {
normalizedScale = upperScale;
deltaScale = upperScale / origScale;
} else if (normalizedScale < lowerScale) {
normalizedScale = lowerScale;
deltaScale = lowerScale / origScale;
}
matrix.postScale((float) deltaScale, (float) deltaScale, focusX, focusY);
fixScaleTrans();
}
/**
* DoubleTapZoom calls a series of runnables which apply
* an animated zoom in/out graphic to the image.
*
* @author Ortiz
*/
private class DoubleTapZoom implements Runnable {
private long startTime;
private static final float ZOOM_TIME = 500;
private float startZoom, targetZoom;
private float bitmapX, bitmapY;
private boolean stretchImageToSuper;
private AccelerateDecelerateInterpolator interpolator = new AccelerateDecelerateInterpolator();
private PointF startTouch;
private PointF endTouch;
DoubleTapZoom(float targetZoom, float focusX, float focusY, boolean stretchImageToSuper) {
setState(State.ANIMATE_ZOOM);
startTime = System.currentTimeMillis();
this.startZoom = normalizedScale;
this.targetZoom = targetZoom;
this.stretchImageToSuper = stretchImageToSuper;
PointF bitmapPoint = transformCoordTouchToBitmap(focusX, focusY, false);
this.bitmapX = bitmapPoint.x;
this.bitmapY = bitmapPoint.y;
//
// Used for translating image during scaling
//
startTouch = transformCoordBitmapToTouch(bitmapX, bitmapY);
endTouch = new PointF(viewWidth / 2, viewHeight / 2);
}
@Override
public void run() {
float t = interpolate();
double deltaScale = calculateDeltaScale(t);
scaleImage(deltaScale, bitmapX, bitmapY, stretchImageToSuper);
translateImageToCenterTouchPosition(t);
fixScaleTrans();
setImageMatrix(matrix);
//
// OnTouchImageViewListener is set: double tap runnable updates listener
// with every frame.
//
if (touchImageViewListener != null) {
touchImageViewListener.onMove();
}
if (t < 1f) {
//
// We haven't finished zooming
//
compatPostOnAnimation(this);
} else {
//
// Finished zooming
//
setState(State.NONE);
}
}
/**
* Interpolate between where the image should start and end in order to translate
* the image so that the point that is touched is what ends up centered at the end
* of the zoom.
*
* @param t
*/
private void translateImageToCenterTouchPosition(float t) {
float targetX = startTouch.x + t * (endTouch.x - startTouch.x);
float targetY = startTouch.y + t * (endTouch.y - startTouch.y);
PointF curr = transformCoordBitmapToTouch(bitmapX, bitmapY);
matrix.postTranslate(targetX - curr.x, targetY - curr.y);
}
/**
* Use interpolator to get t
*
* @return
*/
private float interpolate() {
long currTime = System.currentTimeMillis();
float elapsed = (currTime - startTime) / ZOOM_TIME;
elapsed = Math.min(1f, elapsed);
return interpolator.getInterpolation(elapsed);
}
/**
* Interpolate the current targeted zoom and get the delta
* from the current zoom.
*
* @param t
* @return
*/
private double calculateDeltaScale(float t) {
double zoom = startZoom + t * (targetZoom - startZoom);
return zoom / normalizedScale;
}
}
/**
* This function will transform the coordinates in the touch event to the coordinate
* system of the drawable that the imageview contain
*
* @param x x-coordinate of touch event
* @param y y-coordinate of touch event
* @param clipToBitmap Touch event may occur within view, but outside image content. True, to clip return value
* to the bounds of the bitmap size.
* @return Coordinates of the point touched, in the coordinate system of the original drawable.
*/
private PointF transformCoordTouchToBitmap(float x, float y, boolean clipToBitmap) {
matrix.getValues(m);
float origW = getDrawable().getIntrinsicWidth();
float origH = getDrawable().getIntrinsicHeight();
float transX = m[Matrix.MTRANS_X];
float transY = m[Matrix.MTRANS_Y];
float finalX = ((x - transX) * origW) / getImageWidth();
float finalY = ((y - transY) * origH) / getImageHeight();
if (clipToBitmap) {
finalX = Math.min(Math.max(finalX, 0), origW);
finalY = Math.min(Math.max(finalY, 0), origH);
}
return new PointF(finalX, finalY);
}
/**
* Inverse of transformCoordTouchToBitmap. This function will transform the coordinates in the
* drawable's coordinate system to the view's coordinate system.
*
* @param bx x-coordinate in original bitmap coordinate system
* @param by y-coordinate in original bitmap coordinate system
* @return Coordinates of the point in the view's coordinate system.
*/
private PointF transformCoordBitmapToTouch(float bx, float by) {
matrix.getValues(m);
float origW = getDrawable().getIntrinsicWidth();
float origH = getDrawable().getIntrinsicHeight();
float px = bx / origW;
float py = by / origH;
float finalX = m[Matrix.MTRANS_X] + getImageWidth() * px;
float finalY = m[Matrix.MTRANS_Y] + getImageHeight() * py;
return new PointF(finalX, finalY);
}
/**
* Fling launches sequential runnables which apply
* the fling graphic to the image. The values for the translation
* are interpolated by the Scroller.
*
* @author Ortiz
*/
private class Fling implements Runnable {
CompatScroller scroller;
int currX, currY;
Fling(int velocityX, int velocityY) {
setState(State.FLING);
scroller = new CompatScroller(context);
matrix.getValues(m);
int startX = (int) m[Matrix.MTRANS_X];
int startY = (int) m[Matrix.MTRANS_Y];
int minX, maxX, minY, maxY;
if (getImageWidth() > viewWidth) {
minX = viewWidth - (int) getImageWidth();
maxX = 0;
} else {
minX = maxX = startX;
}
if (getImageHeight() > viewHeight) {
minY = viewHeight - (int) getImageHeight();
maxY = 0;
} else {
minY = maxY = startY;
}
scroller.fling(startX, startY, (int) velocityX, (int) velocityY, minX,
maxX, minY, maxY);
currX = startX;
currY = startY;
}
public void cancelFling() {
if (scroller != null) {
setState(State.NONE);
scroller.forceFinished(true);
}
}
@Override
public void run() {
//
// OnTouchImageViewListener is set: TouchImageView listener has been flung by user.
// Listener runnable updated with each frame of fling animation.
//
if (touchImageViewListener != null) {
touchImageViewListener.onMove();
}
if (scroller.isFinished()) {
scroller = null;
return;
}
if (scroller.computeScrollOffset()) {
int newX = scroller.getCurrX();
int newY = scroller.getCurrY();
int transX = newX - currX;
int transY = newY - currY;
currX = newX;
currY = newY;
matrix.postTranslate(transX, transY);
fixTrans();
setImageMatrix(matrix);
compatPostOnAnimation(this);
}
}
}
@TargetApi(Build.VERSION_CODES.GINGERBREAD)
private class CompatScroller {
Scroller scroller;
OverScroller overScroller;
boolean isPreGingerbread;
public CompatScroller(Context context) {
if (VERSION.SDK_INT < VERSION_CODES.GINGERBREAD) {
isPreGingerbread = true;
scroller = new Scroller(context);
} else {
isPreGingerbread = false;
overScroller = new OverScroller(context);
}
}
public void fling(int startX, int startY, int velocityX, int velocityY, int minX, int maxX, int minY, int maxY) {
if (isPreGingerbread) {
scroller.fling(startX, startY, velocityX, velocityY, minX, maxX, minY, maxY);
} else {
overScroller.fling(startX, startY, velocityX, velocityY, minX, maxX, minY, maxY);
}
}
public void forceFinished(boolean finished) {
if (isPreGingerbread) {
scroller.forceFinished(finished);
} else {
overScroller.forceFinished(finished);
}
}
public boolean isFinished() {
if (isPreGingerbread) {
return scroller.isFinished();
} else {
return overScroller.isFinished();
}
}
public boolean computeScrollOffset() {
if (isPreGingerbread) {
return scroller.computeScrollOffset();
} else {
overScroller.computeScrollOffset();
return overScroller.computeScrollOffset();
}
}
public int getCurrX() {
if (isPreGingerbread) {
return scroller.getCurrX();
} else {
return overScroller.getCurrX();
}
}
public int getCurrY() {
if (isPreGingerbread) {
return scroller.getCurrY();
} else {
return overScroller.getCurrY();
}
}
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
private void compatPostOnAnimation(Runnable runnable) {
if (VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN) {
postOnAnimation(runnable);
} else {
postDelayed(runnable, 1000 / 60);
}
}
private class ZoomVariables {
public float scale;
public float focusX;
public float focusY;
public ScaleType scaleType;
public ZoomVariables(float scale, float focusX, float focusY, ScaleType scaleType) {
this.scale = scale;
this.focusX = focusX;
this.focusY = focusY;
this.scaleType = scaleType;
}
}
private void printMatrixInfo() {
float[] n = new float[9];
matrix.getValues(n);
Log.d(DEBUG, "Scale: " + n[Matrix.MSCALE_X] + " TransX: " + n[Matrix.MTRANS_X] + " TransY: " + n[Matrix.MTRANS_Y]);
}
}
|
|
/*
* Copyright (C) 2014 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.inject.testing.fieldbinder;
import static com.google.inject.Asserts.assertContains;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import com.google.inject.BindingAnnotation;
import com.google.inject.ConfigurationException;
import com.google.inject.CreationException;
import com.google.inject.Guice;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.google.inject.Key;
import com.google.inject.Provider;
import com.google.inject.ProvisionException;
import com.google.inject.name.Named;
import com.google.inject.name.Names;
import com.google.inject.util.Providers;
import junit.framework.TestCase;
import java.lang.annotation.Retention;
import java.util.Arrays;
import java.util.List;
import javax.inject.Qualifier;
/** Unit tests for {@link BoundFieldModule}. */
public class BoundFieldModuleTest extends TestCase {
public void testBindingNothing() {
Object instance = new Object() {};
BoundFieldModule module = BoundFieldModule.of(instance);
Guice.createInjector(module);
// If we didn't throw an exception, we succeeded.
}
public void testBindingOnePrivate() {
final Integer testValue = 1024;
Object instance = new Object() {
@Bind private Integer anInt = testValue;
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testValue, injector.getInstance(Integer.class));
}
public void testBindingOnePublic() {
final Integer testValue = 1024;
Object instance = new Object() {
@Bind public Integer anInt = testValue;
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testValue, injector.getInstance(Integer.class));
}
private static class FieldBindableClass {
@Bind Integer anInt;
FieldBindableClass(Integer anInt) {
this.anInt = anInt;
}
}
private static class FieldBindableSubclass extends FieldBindableClass {
FieldBindableSubclass(Integer anInt) {
super(anInt);
}
}
public void testSuperTypeBinding() {
FieldBindableSubclass instance = new FieldBindableSubclass(1024);
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(instance.anInt, injector.getInstance(Integer.class));
}
public void testBindingTwo() {
final Integer testValue = 1024;
final String testString = "Hello World!";
Object instance = new Object() {
@Bind private Integer anInt = testValue;
@Bind private String aString = testString;
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testValue, injector.getInstance(Integer.class));
assertEquals(testString, injector.getInstance(String.class));
}
public void testBindingSuperType() {
final Integer testValue = 1024;
Object instance = new Object() {
@Bind(to = Number.class) private Integer anInt = testValue;
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testValue, injector.getInstance(Number.class));
}
public void testBindingSuperTypeAccessSubType() {
final Integer testValue = 1024;
Object instance = new Object() {
@Bind(to = Number.class) private Integer anInt = testValue;
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
try {
injector.getInstance(Integer.class);
fail();
} catch (ConfigurationException e) {
assertContains(
e.getMessage(),
"Could not find a suitable constructor in java.lang.Integer");
}
}
public void testBindingIncorrectTypeProviderFails() {
final Integer testValue = 1024;
Object instance = new Object() {
@Bind(to = String.class) private Provider<Integer> anIntProvider = new Provider<Integer>() {
@Override public Integer get() {
return testValue;
}
};
};
BoundFieldModule module = BoundFieldModule.of(instance);
try {
Guice.createInjector(module);
fail();
} catch (CreationException e) {
assertContains(
e.getMessage(),
"Requested binding type \"java.lang.String\" is not "
+ "assignable from field binding type \"java.lang.Integer\"");
}
}
@BindingAnnotation
@Retention(RUNTIME)
private static @interface SomeBindingAnnotation {}
public void testBindingWithBindingAnnotation() {
final Integer testValue1 = 1024, testValue2 = 2048;
Object instance = new Object() {
@Bind private Integer anInt = testValue1;
@Bind
@SomeBindingAnnotation
private Integer anotherInt = testValue2;
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testValue1, injector.getInstance(Integer.class));
assertEquals(
testValue2,
injector.getInstance(Key.get(Integer.class, SomeBindingAnnotation.class)));
}
@Qualifier
@Retention(RUNTIME)
private static @interface SomeQualifier {}
public void testBindingWithQualifier() {
final Integer testValue1 = 1024, testValue2 = 2048;
Object instance = new Object() {
@Bind private Integer anInt = testValue1;
@Bind
@SomeQualifier
private Integer anotherInt = testValue2;
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testValue1, injector.getInstance(Integer.class));
assertEquals(
testValue2,
injector.getInstance(Key.get(Integer.class, SomeQualifier.class)));
}
public void testCanReuseBindingAnnotationsWithDifferentValues() {
final Integer testValue1 = 1024, testValue2 = 2048;
final String name1 = "foo", name2 = "bar";
Object instance = new Object() {
@Bind
@Named(name1)
private Integer anInt = testValue1;
@Bind
@Named(name2)
private Integer anotherInt = testValue2;
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(
testValue1,
injector.getInstance(Key.get(Integer.class, Names.named(name1))));
assertEquals(
testValue2,
injector.getInstance(Key.get(Integer.class, Names.named(name2))));
}
public void testBindingWithValuedBindingAnnotation() {
final Integer testValue1 = 1024, testValue2 = 2048;
final String name = "foo";
Object instance = new Object() {
@Bind private Integer anInt = testValue1;
@Bind
@Named(name)
private Integer anotherInt = testValue2;
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testValue1, injector.getInstance(Integer.class));
assertEquals(
testValue2,
injector.getInstance(Key.get(Integer.class, Names.named(name))));
}
public void testBindingWithGenerics() {
final List<Integer> testIntList = Arrays.asList(new Integer[] {1, 2, 3});
final List<Boolean> testBoolList = Arrays.asList(new Boolean[] {true, true, false});
Object instance = new Object() {
@Bind private List<Integer> anIntList = testIntList;
@Bind private List<Boolean> aBoolList = testBoolList;
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testIntList, injector.getInstance(new Key<List<Integer>>() {}));
assertEquals(testBoolList, injector.getInstance(new Key<List<Boolean>>() {}));
}
public void testBoundValueDoesntChange() {
Integer testValue = 1024;
FieldBindableClass instance = new FieldBindableClass(testValue);
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testValue, injector.getInstance(Integer.class));
instance.anInt++;
assertEquals(testValue, injector.getInstance(Integer.class));
}
public void testIncompatibleBindingType() {
final Integer testInt = 1024;
Object instance = new Object() {
@Bind(to = String.class) private Integer anInt = testInt;
};
BoundFieldModule module = BoundFieldModule.of(instance);
try {
Guice.createInjector(module);
fail();
} catch (CreationException e) {
assertContains(e.getMessage(),
"Requested binding type \"java.lang.String\" is not assignable from field binding type "
+ "\"java.lang.Integer\"");
}
}
public void testFailureOnMultipleBindingAnnotations() {
final Integer testInt = 1024;
Object instance = new Object() {
@Bind
@Named("a")
@SomeBindingAnnotation
private Integer anInt = testInt;
};
BoundFieldModule module = BoundFieldModule.of(instance);
try {
Guice.createInjector(module);
fail();
} catch (CreationException e) {
assertContains(e.getMessage(), "More than one annotation is specified for this binding.");
}
}
public void testBindingSuperTypeAndBindingAnnotation() {
final Integer testValue = 1024;
Object instance = new Object() {
@Bind(to = Number.class)
@Named("foo")
private Integer anInt = testValue;
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testValue, injector.getInstance(Key.get(Number.class, Names.named("foo"))));
}
public void testBindingProvider() {
final Integer testValue = 1024;
Object instance = new Object() {
@Bind private Provider<Integer> anInt = new Provider<Integer>() {
@Override public Integer get() {
return testValue;
}
};
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testValue, injector.getInstance(Integer.class));
}
public void testBindingJavaxProvider() {
final Integer testValue = 1024;
Object instance = new Object() {
@Bind private javax.inject.Provider<Integer> anInt = new javax.inject.Provider<Integer>() {
@Override public Integer get() {
return testValue;
}
};
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testValue, injector.getInstance(Integer.class));
}
public void testBindingNonNullableNullField() {
Object instance = new Object() {
@Bind private Integer anInt = null;
};
BoundFieldModule module = BoundFieldModule.of(instance);
try {
Guice.createInjector(module);
fail();
} catch (CreationException e) {
assertContains(e.getMessage(),
"Binding to null values is only allowed for fields that are annotated @Nullable.");
}
}
@Retention(RUNTIME)
private @interface Nullable {}
public void testBindingNullableNullField() {
Object instance = new Object() {
@Bind @Nullable private Integer anInt = null;
};
Injector injector = Guice.createInjector(BoundFieldModule.of(instance));
assertNull(injector.getInstance(Integer.class));
}
public void testBindingNullProvider() {
Object instance = new Object() {
@Bind private Provider<Integer> anIntProvider = null;
};
BoundFieldModule module = BoundFieldModule.of(instance);
try {
Guice.createInjector(module);
fail();
} catch (CreationException e) {
assertContains(e.getMessage(),
"Binding to null is not allowed. Use Providers.of(null) if this is your intended "
+ "behavior.");
}
}
public void testBindingNullableNullProvider() {
Object instance = new Object() {
@Bind @Nullable private Provider<Integer> anIntProvider = null;
};
BoundFieldModule module = BoundFieldModule.of(instance);
try {
Guice.createInjector(module);
fail();
} catch (CreationException e) {
assertContains(e.getMessage(),
"Binding to null is not allowed. Use Providers.of(null) if this is your intended "
+ "behavior.");
}
}
private static class IntegerProvider implements Provider<Integer> {
private final Integer value;
IntegerProvider(Integer value) {
this.value = value;
}
@Override public Integer get() {
return value;
}
}
public void testProviderSubclassesBindToTheProviderItself() {
final IntegerProvider integerProvider = new IntegerProvider(1024);
Object instance = new Object() {
@Bind private IntegerProvider anIntProvider = integerProvider;
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(integerProvider, injector.getInstance(IntegerProvider.class));
}
public void testProviderSubclassesDoNotBindParameterizedType() {
final Integer testValue = 1024;
Object instance = new Object() {
@Bind private IntegerProvider anIntProvider = new IntegerProvider(testValue);
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
try {
injector.getInstance(Integer.class);
fail();
} catch (ConfigurationException e) {
assertContains(e.getMessage(), "Could not find a suitable constructor in java.lang.Integer.");
}
}
public void testNullableProviderSubclassesAllowNull() {
Object instance = new Object() {
@Bind @Nullable private IntegerProvider anIntProvider = null;
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertNull(injector.getInstance(IntegerProvider.class));
}
private static class ParameterizedObject<T> {
ParameterizedObject(T instance) {
this.instance = instance;
}
@Bind private T instance;
}
public void testBindParameterizedTypeFails() {
ParameterizedObject<Integer> instance = new ParameterizedObject<Integer>(0);
BoundFieldModule module = BoundFieldModule.of(instance);
try {
Guice.createInjector(module);
fail();
} catch (CreationException e) {
assertContains(e.getMessage(), "T cannot be used as a key; It is not fully specified.");
}
}
public void testBindSubclassOfParameterizedTypeSucceeds() {
final Integer testValue = 1024;
ParameterizedObject<Integer> instance = new ParameterizedObject<Integer>(testValue) {};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testValue, injector.getInstance(Integer.class));
}
public void testBindArray() {
final Integer[] testArray = new Integer[] { 1024, 2048 };
Object instance = new Object() {
@Bind private Integer[] anIntArray = testArray;
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testArray, injector.getInstance(Integer[].class));
}
public void testRawProviderCannotBeBound() {
final Integer testValue = 1024;
Object instance = new Object() {
@Bind private Provider anIntProvider = new Provider() {
@Override public Object get() {
return testValue;
}
};
};
BoundFieldModule module = BoundFieldModule.of(instance);
try {
Guice.createInjector(module);
fail();
} catch (CreationException e) {
assertContains(e.getMessage(),
"Non parameterized Provider fields must have an "
+ "explicit binding class via @Bind(to = Foo.class)");
}
}
public void testExplicitlyBoundRawProviderCanBeBound() {
final Integer testValue = 1024;
Object instance = new Object() {
@Bind(to = Integer.class) private Provider anIntProvider = new Provider() {
@Override public Object get() {
return testValue;
}
};
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testValue, injector.getInstance(Integer.class));
}
public void testRawProviderCanBindToIncorrectType() {
final Integer testValue = 1024;
Object instance = new Object() {
@Bind(to = String.class) private Provider anIntProvider = new Provider() {
@Override public Object get() {
return testValue;
}
};
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testValue, injector.getInstance(String.class));
}
public void testMultipleErrorsAreAggregated() {
Object instance = new Object() {
@Bind private Provider aProvider;
@Bind(to = String.class) private Integer anInt;
};
BoundFieldModule module = BoundFieldModule.of(instance);
try {
Guice.createInjector(module);
fail();
} catch (CreationException e) {
assertEquals(2, e.getErrorMessages().size());
}
}
public void testBindingProviderWithProviderSubclassValue() {
final Integer testValue = 1024;
Object instance = new Object() {
@Bind private Provider<Integer> anIntProvider = new IntegerProvider(testValue);
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testValue, injector.getInstance(Integer.class));
}
public void testBoundFieldsCannotBeInjected() {
Object instance = new Object() {
@Bind
@Inject
Integer anInt = 0;
};
BoundFieldModule module = BoundFieldModule.of(instance);
try {
Guice.createInjector(module);
fail();
} catch (CreationException e) {
assertContains(
e.getMessage(),
"Fields annotated with both @Bind and @Inject are illegal.");
}
}
public void testIncrementingProvider() {
final Integer testBaseValue = 1024;
Object instance = new Object() {
@Bind private Provider<Integer> anIntProvider = new Provider<Integer>() {
private int value = testBaseValue;
@Override public Integer get() {
return value++;
}
};
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testBaseValue, injector.getInstance(Integer.class));
assertEquals((Integer) (testBaseValue + 1), injector.getInstance(Integer.class));
assertEquals((Integer) (testBaseValue + 2), injector.getInstance(Integer.class));
}
public void testProviderDoesNotProvideDuringInjectorConstruction() {
Object instance = new Object() {
@Bind private Provider<Integer> myIntProvider = new Provider<Integer>() {
@Override public Integer get() {
throw new UnsupportedOperationException();
}
};
};
BoundFieldModule module = BoundFieldModule.of(instance);
Guice.createInjector(module);
// If we don't throw an exception, we succeeded.
}
private static class InvalidBindableClass {
@Bind(to = String.class) Integer anInt;
}
public void testIncompatibleBindingTypeStackTraceHasUserFrame() {
Object instance = new InvalidBindableClass();
BoundFieldModule module = BoundFieldModule.of(instance);
try {
Guice.createInjector(module);
fail();
} catch (CreationException e) {
assertContains(e.getMessage(), "at " + InvalidBindableClass.class.getName() + " field anInt");
}
}
private static class InjectedNumberProvider implements Provider<Number> {
@Inject Integer anInt;
@Override public Number get() {
return anInt;
}
}
public void testBoundProvidersAreInjected() {
final Integer testValue = 1024;
Object instance = new Object() {
@Bind private Integer anInt = testValue;
@Bind private Provider<Number> aNumberProvider = new InjectedNumberProvider();
};
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testValue, injector.getInstance(Number.class));
}
public void testBoundInstancesAreInjected() {
final Integer testValue = 1024;
final InjectedNumberProvider testNumberProvider = new InjectedNumberProvider();
Object instance = new Object() {
@Bind private Integer anInt = testValue;
@Bind private InjectedNumberProvider aNumberProvider = testNumberProvider;
};
BoundFieldModule module = BoundFieldModule.of(instance);
Guice.createInjector(module);
assertEquals(testValue, testNumberProvider.anInt);
}
private static class InvalidBindableSubclass extends InvalidBindableClass {}
public void testClassIsPrintedInErrorsWhenCauseIsSuperclass() {
Object instance = new InvalidBindableSubclass();
BoundFieldModule module = BoundFieldModule.of(instance);
try {
Guice.createInjector(module);
fail();
} catch (CreationException e) {
assertContains(
e.getMessage(),
"Requested binding type \"java.lang.String\" is not assignable from field binding type "
+ "\"java.lang.Integer\"");
}
}
private static class FieldBindableSubclass2 extends FieldBindableClass {
@Bind Number aNumber;
FieldBindableSubclass2(Integer anInt, Number aNumber) {
super(anInt);
this.aNumber = aNumber;
}
}
public void testFieldsAreBoundFromFullClassHierarchy() {
final Integer testValue1 = 1024, testValue2 = 2048;
FieldBindableSubclass2 instance = new FieldBindableSubclass2(testValue1, testValue2);
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertEquals(testValue1, injector.getInstance(Integer.class));
assertEquals(testValue2, injector.getInstance(Number.class));
}
static final class LazyClass {
@Bind(lazy = true) Integer foo = 1;
}
public void testFieldBound_lazy() {
LazyClass asProvider = new LazyClass();
Injector injector = Guice.createInjector(BoundFieldModule.of(asProvider));
assertEquals(1, injector.getInstance(Integer.class).intValue());
asProvider.foo++;
assertEquals(2, injector.getInstance(Integer.class).intValue());
}
public void testNonNullableFieldBound_lazy_rejectNull() {
LazyClass asProvider = new LazyClass();
Injector injector = Guice.createInjector(BoundFieldModule.of(asProvider));
assertEquals(1, injector.getInstance(Integer.class).intValue());
asProvider.foo = null;
try {
injector.getInstance(Integer.class);
fail();
} catch (ProvisionException e) {
assertContains(e.getMessage(),
"Binding to null values is only allowed for fields that are annotated @Nullable.");
}
}
static final class LazyClassNullable {
@Bind(lazy = true) @Nullable Integer foo = 1;
}
public void testNullableFieldBound_lazy_allowNull() {
LazyClassNullable asProvider = new LazyClassNullable();
Injector injector = Guice.createInjector(BoundFieldModule.of(asProvider));
assertEquals(1, injector.getInstance(Integer.class).intValue());
asProvider.foo = null;
assertNull(injector.getInstance(Integer.class));
}
static final class LazyProviderClass {
@Bind(lazy = true) Provider<Integer> foo = Providers.of(null);
}
public void testFieldBoundAsProvider_lazy() {
LazyProviderClass asProvider = new LazyProviderClass();
Provider<Integer> provider =
Guice.createInjector(BoundFieldModule.of(asProvider)).getProvider(Integer.class);
assertNull(provider.get());
asProvider.foo = Providers.of(1);
assertEquals(1, provider.get().intValue());
asProvider.foo =
new Provider<Integer>() {
@Override
public Integer get() {
throw new RuntimeException("boom");
}
};
try {
provider.get();
fail();
} catch (ProvisionException e) {
assertContains(e.getMessage(), "boom");
}
}
private static final class LazyNonTransparentProvider {
@Bind(lazy = true)
@Nullable
private IntegerProvider anIntProvider = null;
}
public void testFieldBoundAsNonTransparentProvider_lazy() {
LazyNonTransparentProvider instance = new LazyNonTransparentProvider();
BoundFieldModule module = BoundFieldModule.of(instance);
Injector injector = Guice.createInjector(module);
assertNull(injector.getInstance(IntegerProvider.class));
instance.anIntProvider = new IntegerProvider(3);
assertEquals(3, injector.getInstance(IntegerProvider.class).get().intValue());
try {
injector.getInstance(Integer.class);
fail();
} catch (ConfigurationException expected) {
// expected because we don't interpret IntegerProvider as a Provider<Integer>
}
}
}
|
|
package com.actelion.research.chem.phesa;
import com.actelion.research.chem.Coordinates;
import com.actelion.research.chem.StereoMolecule;
import com.actelion.research.chem.conf.Conformer;
import com.actelion.research.chem.optimization.OptimizerLBFGS;
import com.actelion.research.chem.phesa.pharmacophore.PPGaussian;
import com.actelion.research.calc.Matrix;
import com.actelion.research.calc.SingularValueDecomposition;
import java.util.Arrays;
import java.util.stream.DoubleStream;
import java.util.stream.IntStream;
import java.util.ArrayList;
/**
* @version: 1.0, February 2018
* Author: J. Wahl
* this class provides functionalities to calculate the overlap between two molecules
*
*/
public class PheSAAlignment {
private MolecularVolume refMolGauss;
private MolecularVolume molGauss;
private double ppWeight;
public enum axis {X,Y,Z};
public PheSAAlignment(StereoMolecule refMol, StereoMolecule mol,double ppWeight) {
this.ppWeight = ppWeight;
this.refMolGauss = new MolecularVolume(refMol);
this.molGauss = new MolecularVolume(mol);
}
public PheSAAlignment(StereoMolecule refMol, StereoMolecule mol) {
this(refMol,mol,0.5);
}
public PheSAAlignment(MolecularVolume refMolGauss, MolecularVolume molGauss) {
this(refMolGauss,molGauss,0.5);
}
public PheSAAlignment(MolecularVolume refMolGauss, MolecularVolume molGauss,double ppWeight) {
this.ppWeight = ppWeight;
this.refMolGauss= refMolGauss;
this.molGauss = molGauss;
}
public MolecularVolume getRefMolGauss() {
return refMolGauss;
}
public MolecularVolume getMolGauss() {
return molGauss;
}
/**
* Move COM of the molecular volume to the origin of the lab-frame and orient molecules so that their principal moments
* of inertia coincide with the 3 axis of the coordinate system
* @param mol
* @param molVol
*/
public static Matrix preProcess(Conformer conf, MolecularVolume molVol) {
Coordinates COM = molVol.getCOM();
int nrOfAtoms = conf.getSize();
for (int i=0;i<nrOfAtoms;i++) {
Coordinates coords1 = conf.getCoordinates(i);
coords1.sub(COM);
}
molVol.translateToCOM(COM);
return createCanonicalOrientation(conf,molVol);
}
public static Matrix createCanonicalOrientation(Conformer conf, MolecularVolume molGauss) {
Matrix m = PheSAAlignment.getCovarianceMatrix(molGauss);
SingularValueDecomposition svd = new SingularValueDecomposition(m.getArray(),null,null);
Matrix u = new Matrix(svd.getU());
double det = u.det();
if(det<0) {
u.set(0,1,-u.get(0, 1));
u.set(1,1,-u.get(1, 1));
u.set(2,1,-u.get(2, 1));
}
PheSAAlignment.rotateMol(conf,u);
molGauss.update(conf);
Matrix rotMat = u;
if(!isCanonicalOrientation(molGauss)) {
rotateMolAroundAxis180(conf,axis.X);
molGauss.update(conf);
if(isCanonicalOrientation(molGauss)) {
u.set(0,1,-u.get(0, 1));
u.set(1,1,-u.get(1, 1));
u.set(2,1,-u.get(2, 1));
u.set(0,2,-u.get(0, 2));
u.set(1,2,-u.get(1, 2));
u.set(2,2,-u.get(2, 2));
rotMat = u;
}
else {
rotateMolAroundAxis180(conf,axis.X); // rotate back
molGauss.update(conf);
rotateMolAroundAxis180(conf,axis.Y);
molGauss.update(conf);
if(isCanonicalOrientation(molGauss)) {
u.set(0,0,-u.get(0, 0));
u.set(1,0,-u.get(1, 0));
u.set(2,0,-u.get(2, 0));
u.set(0,2,-u.get(0, 2));
u.set(1,2,-u.get(1, 2));
u.set(2,2,-u.get(2, 2));
rotMat = u;
}
else {
rotateMolAroundAxis180(conf,axis.Y);
molGauss.update(conf);
rotateMolAroundAxis180(conf,axis.Z);
molGauss.update(conf);
if(isCanonicalOrientation(molGauss)) {
u.set(0,0,-u.get(0, 0));
u.set(1,0,-u.get(1, 0));
u.set(2,0,-u.get(2, 0));
u.set(0,1,-u.get(0, 1));
u.set(1,1,-u.get(1, 1));
u.set(2,1,-u.get(2, 1));
rotMat = u;
}
}
}
}
for(VolumeGaussian vg : molGauss.getVolumeGaussians())
vg.rotateShift(rotMat);
return rotMat;
}
private static void rotateMolAroundAxis180(Conformer conf,axis a) {
if (a == axis.X) {
IntStream.range(0,conf.getSize()).forEach(i -> {
Coordinates coords = conf.getCoordinates(i);
coords.y = -coords.y;
coords.z = -coords.z;
});
}
else if (a == axis.Y) {
IntStream.range(0,conf.getSize()).forEach(i -> {
Coordinates coords = conf.getCoordinates(i);
coords.x = -coords.x;
coords.z = -coords.z;
});
}
else {
IntStream.range(0,conf.getSize()).forEach(i -> {
Coordinates coords = conf.getCoordinates(i);
coords.x = -coords.x;
coords.y = -coords.y;
});
}
}
private static Matrix getCovarianceMatrix(MolecularVolume molGauss) {
Matrix massMatrix = new Matrix(3,3);
double volume = 0.0;
for (AtomicGaussian ag : molGauss.getAtomicGaussians()){
volume += ag.getVolume();
double value = ag.getVolume()*ag.getCenter().x*ag.getCenter().x;
massMatrix.addToElement(0,0,value);
value = ag.getVolume()*ag.getCenter().x*ag.getCenter().y;
massMatrix.addToElement(0,1,value);
value = ag.getVolume()*ag.getCenter().x*ag.getCenter().z;
massMatrix.addToElement(0,2,value);
value = ag.getVolume()*ag.getCenter().y*ag.getCenter().y;
massMatrix.addToElement(1,1,value);
value = ag.getVolume()*ag.getCenter().y*ag.getCenter().z;
massMatrix.addToElement(1,2,value);
value = ag.getVolume()*ag.getCenter().z*ag.getCenter().z;
massMatrix.addToElement(2,2,value);
}
for (VolumeGaussian vg : molGauss.getVolumeGaussians()){
volume += vg.getRole()*vg.getVolume();
double value = vg.getRole()*vg.getVolume()*vg.getCenter().x*vg.getCenter().x;
massMatrix.addToElement(0,0,value);
value = vg.getRole()*vg.getVolume()*vg.getCenter().x*vg.getCenter().y;
massMatrix.addToElement(0,1,value);
value = vg.getRole()*vg.getVolume()*vg.getCenter().x*vg.getCenter().z;
massMatrix.addToElement(0,2,value);
value = vg.getRole()*vg.getVolume()*vg.getCenter().y*vg.getCenter().y;
massMatrix.addToElement(1,1,value);
value = vg.getRole()*vg.getVolume()*vg.getCenter().y*vg.getCenter().z;
massMatrix.addToElement(1,2,value);
value = vg.getRole()*vg.getVolume()*vg.getCenter().z*vg.getCenter().z;
massMatrix.addToElement(2,2,value);
}
massMatrix.set(0,0,massMatrix.get(0,0)/volume);
massMatrix.set(0,1,massMatrix.get(0,1)/volume);
massMatrix.set(0,2,massMatrix.get(0,2)/volume);
massMatrix.set(1,1,massMatrix.get(1,1)/volume);
massMatrix.set(1,2,massMatrix.get(1,2)/volume);
massMatrix.set(2,2,massMatrix.get(2,2)/volume);
massMatrix.set(1,0,massMatrix.get(0,1));
massMatrix.set(2,0,massMatrix.get(0,2));
massMatrix.set(2,1,massMatrix.get(1,2));
return massMatrix;
}
private static boolean isCanonicalOrientation(MolecularVolume molGauss) {
double xxPos = 0;
double xxNeg = 0;
double yyPos = 0;
double yyNeg = 0;
int nXPos = 0;
int nXNeg = 0;
int nYPos = 0;
int nYNeg = 0;
for (AtomicGaussian ag : molGauss.getAtomicGaussians()){
double x = ag.center.x;
double y = ag.center.y;
if(x>0) {
xxPos += x*x;
nXPos++;
}
else {
xxNeg += x*x;
nXNeg++;
}
if(y>0) {
yyPos += y*y;
nYPos++;
}
else {
yyNeg += y*y;
nYNeg++;
}
}
xxPos/=nXPos;
yyPos/=nYPos;
xxNeg/=nXNeg;
yyNeg/=nYNeg;
if(xxPos>xxNeg && yyPos>yyNeg)
return true;
else
return false;
}
/**
* .
* generate initial orientations of the molecule:
* mode1: 4 orientations: initial orientation and 180 degree rotation about each axis
* mode2: mode1 and 90 degree rotations about each axis
* a transformation vector consists of 7 elements: the first 4 elements form a Quaternion and describe the rotation
* the last three elements are the translation vector
* @param mode
* @return
*/
public static double[][] initialTransform(int mode) {
double c = 0.707106781;
switch(mode){
case 1:
double[][] transforms1 = {{1.0,0.0,0.0,0.0,0.0,0.0,0.0},{0.0,1.0,0.0,0.0,0.0,0.0,0.0},{0.0,0.0,1.0,0.0,0.0,0.0,0.0},
{0.0,0.0,0.0,1.0,0.0,0.0,0.0}};
return transforms1;
case 2:
double[][] transforms2 = {{1,0,0,0,0,0,0},{0,1,0,0,0,0,0},{0,0,1,0,0,0,0},
{0,0,0,1,0,0,0},
{c,c,0,0,0,0,0},
{c,0,c,0,0,0,0},
{c,0,0,c,0,0,0},
{-0.5,0.5,0.5,-0.5,0,0,0},
{0.5,-0.5,0.5,-0.5,0,0,0},
{0.5,0.5,0.5,-0.5,0,0,0},
{0.5,-0.5,-0.5,-0.5,0,0,0},
{0.5,0.5,-0.5,-0.5,0,0,0}
};
return transforms2;
default:
double [][] transform = {{1.0,0.0,0.0,0.0,0.0,0.0,0.0}};
return transform;
}
}
/**
* calculate the Overlap of the two molecular volumes as a function a transform vector that is applied to the query molecule
* overlap Volume of two molecular Volumes formulated as a summed overlap of atomic Gaussians
* taken from Grant, Gallardo, Pickup, Journal of Computational Chemistry, 17, 1653-1666, 1996
* returns a double[2]: the first double is the total overlap, whereas the second value is the specific
* contribution of additional volume gaussians (inclusion, exclusion)
* @param transform
* @return
*/
public double[] getTotalAtomOverlap(double[] transform){
double[] result = new double[2];
Quaternion quat = new Quaternion(transform[0],transform[1],transform[2],transform[3]);
double Vtot = 0.0;
double Vvol = 0.0;
double[][] rotMatrix = quat.getRotMatrix().getArray();
ArrayList<AtomicGaussian> atomicGaussians = molGauss.getAtomicGaussians();
Coordinates[] fitCenterModCoords = new Coordinates[atomicGaussians.size()];
double normFactor = 1/(transform[0]*transform[0]+transform[1]*transform[1]+transform[2]*transform[2]+transform[3]*transform[3]);
for(int k=0;k<atomicGaussians.size();k++) {
fitCenterModCoords[k] = atomicGaussians.get(k).getRotatedCenter(rotMatrix, normFactor, new double[] {transform[4], transform[5], transform[6]}); //we operate on the transformed coordinates of the molecule to be fitted
}
for(AtomicGaussian refAt:refMolGauss.getAtomicGaussians()){
int index = 0;
for(AtomicGaussian fitAt:molGauss.getAtomicGaussians()){
Vtot += refAt.getVolumeOverlap(fitAt, fitCenterModCoords[index],Gaussian3D.DIST_CUTOFF);
index+=1;
}
}
for(VolumeGaussian refVol:refMolGauss.getVolumeGaussians()){
int index = 0;
for(AtomicGaussian fitAt:molGauss.getAtomicGaussians()){
double overlap = refVol.getRole()*refVol.getVolumeOverlap(fitAt, fitCenterModCoords[index],Gaussian3D.DIST_CUTOFF);
Vtot += overlap;
Vvol += overlap;
index+=1;
}
}
if(Vtot<0)
Vtot = 0.0;
result[0] = Vtot;
result[1] = Vvol;
return result;
}
public double getTotalPPOverlap(double[] transform){
Quaternion quat = new Quaternion(transform[0],transform[1],transform[2],transform[3]);
double Vtot = 0.0;
double[][] rotMatrix = quat.getRotMatrix().getArray();
ArrayList<PPGaussian> ppGaussians = molGauss.getPPGaussians();
Coordinates[] fitCenterModCoords = new Coordinates[ppGaussians.size()];
Coordinates[] fitDirectionalityMod = new Coordinates[ppGaussians.size()];
double normFactor = 1/(transform[0]*transform[0]+transform[1]*transform[1]+transform[2]*transform[2]+transform[3]*transform[3]);
for(int k=0;k<ppGaussians.size();k++) {
fitCenterModCoords[k]= ppGaussians.get(k).getRotatedCenter(rotMatrix, normFactor, new double[] {transform[4],transform[5],transform[6]}); //we operate on the transformed coordinates of the molecule to be fitted
fitDirectionalityMod[k] = ppGaussians.get(k).getRotatedDirectionality(rotMatrix, normFactor);
}
for(PPGaussian refPP:refMolGauss.getPPGaussians()){
int index = 0;
for(PPGaussian fitPP:molGauss.getPPGaussians()){
Vtot+=refPP.getWeight()*refPP.getSimilarity(fitPP, fitDirectionalityMod[index])* refPP.getVolumeOverlap(fitPP, fitCenterModCoords[index],10.0);
index+=1;
}
}
return Vtot;
}
/**
* calculate the self-overlap of the base molecule
* @return
*/
public double getSelfAtomOverlap(MolecularVolume molGauss){
double Vtot = 0.0;
for(AtomicGaussian at:molGauss.getAtomicGaussians()){
for(AtomicGaussian at2:molGauss.getAtomicGaussians()){
Vtot += at.getVolumeOverlap(at2);
}
for(VolumeGaussian vg : molGauss.getVolumeGaussians()) {
if(vg.getRole()!=VolumeGaussian.INCLUSION)
continue;
Vtot += vg.getRole()*at.getVolumeOverlap(vg);
}
}
for(VolumeGaussian vg:molGauss.getVolumeGaussians()){
if(vg.getRole()!=VolumeGaussian.INCLUSION)
continue;
for(VolumeGaussian vg2 : molGauss.getVolumeGaussians()) {
//only consider self-overlap of inclusion spheres
if(vg2.getRole()!=VolumeGaussian.INCLUSION)
continue;
Vtot += vg2.getVolumeOverlap(vg);
}
}
return Vtot;
}
public double getSelfPPOverlap(MolecularVolume molGauss){
double Vtot = 0.0;
for(PPGaussian pp:molGauss.getPPGaussians()){
for(PPGaussian pp2:molGauss.getPPGaussians()){
Vtot+=pp.getWeight()*pp.getSimilarity(pp2)* pp.getVolumeOverlap(pp2);
}
}
return Vtot;
}
public double getSelfAtomOverlapRef(){
return getSelfAtomOverlap(refMolGauss);
}
public double getSelfAtomOverlapFit(){
return getSelfAtomOverlap(molGauss);
}
public double getSelfPPOverlapRef(){
return getSelfPPOverlap(refMolGauss);
}
public double getSelfPPOverlapFit(){
return getSelfPPOverlap(molGauss);
}
public static void rotateMol(Conformer conf,Quaternion rotor, double[] transl) {
double normFactor = 1/rotor.normSquared();
int nrOfAtoms = conf.getSize();
for (int i=0;i<nrOfAtoms;i++) {
Coordinates coords = conf.getCoordinates(i);
double[][] m = rotor.getRotMatrix().getArray();
coords.rotate(m);
coords.scale(normFactor);
coords.add(transl[0],transl[1],transl[2]);
}
}
public static void rotateMol(StereoMolecule mol,Quaternion rotor, double[] transl) {
double normFactor = 1/rotor.normSquared();
int nrOfAtoms = mol.getAllAtoms();
for (int i=0;i<nrOfAtoms;i++) {
Coordinates coords = mol.getCoordinates(i);
double[][] m = rotor.getRotMatrix().getArray();
coords.rotate(m);
coords.scale(normFactor);
coords.add(transl[0],transl[1],transl[2]);
}
}
public static void rotateMol(StereoMolecule mol,double[][] m) {
int nrOfAtoms = mol.getAllAtoms();
for (int i=0;i<nrOfAtoms;i++) {
Coordinates coords = mol.getCoordinates(i);
coords.rotate(m);
}
}
public static void translateMol(StereoMolecule mol,double[] translate) {
int nrOfAtoms = mol.getAllAtoms();
for (int i=0;i<nrOfAtoms;i++) {
Coordinates coords = mol.getCoordinates(i);
coords.x += translate[0];
coords.y += translate[1];
coords.z += translate[2];
}
}
public static void multiplyMatrix(double[][] r, double[][] s, double[][] rs) {
rs[0][0] = r[0][0]*s[0][0] + r[0][1]*s[1][0] + r[0][2]*s[2][0];
rs[0][1] = r[0][0]*s[0][1] + r[0][1]*s[1][1] + r[0][2]*s[2][1];
rs[0][2] = r[0][0]*s[0][2] + r[0][1]*s[1][2] + r[0][2]*s[2][2];
rs[1][0] = r[1][0]*s[0][0] + r[1][1]*s[1][0] + r[1][2]*s[2][0];
rs[1][1] = r[1][0]*s[0][1] + r[1][1]*s[1][1] + r[1][2]*s[2][1];
rs[1][2] = r[1][0]*s[0][2] + r[1][1]*s[1][2] + r[1][2]*s[2][2];
rs[2][0] = r[2][0]*s[0][0] + r[2][1]*s[1][0] + r[2][2]*s[2][0];
rs[2][1] = r[2][0]*s[0][1] + r[2][1]*s[1][1] + r[2][2]*s[2][1];
rs[2][2] = r[2][0]*s[0][2] + r[2][1]*s[1][2] + r[2][2]*s[2][2];
}
public static void multiplyInverseMatrix(double[][] r, double[][] s, double[][] rs) {
rs[0][0] = r[0][0]*s[0][0] + r[0][1]*s[0][1] + r[0][2]*s[0][2];
rs[0][1] = r[0][0]*s[1][0] + r[0][1]*s[1][1] + r[0][2]*s[1][2];
rs[0][2] = r[0][0]*s[2][0] + r[0][1]*s[2][1] + r[0][2]*s[2][2];
rs[1][0] = r[1][0]*s[0][0] + r[1][1]*s[0][1] + r[1][2]*s[0][2];
rs[1][1] = r[1][0]*s[1][0] + r[1][1]*s[1][1] + r[1][2]*s[1][2];
rs[1][2] = r[1][0]*s[2][0] + r[1][1]*s[2][1] + r[1][2]*s[2][2];
rs[2][0] = r[2][0]*s[0][0] + r[2][1]*s[0][1] + r[2][2]*s[0][2];
rs[2][1] = r[2][0]*s[1][0] + r[2][1]*s[1][1] + r[2][2]*s[1][2];
rs[2][2] = r[2][0]*s[2][0] + r[2][1]*s[2][1] + r[2][2]*s[2][2];
}
public static void getRotationMatrix(double theta, Coordinates axis, double[][] r) {
double x = axis.x;
double y = axis.y;
double z = axis.z;
double c = Math.cos(theta);
double s = Math.sin(theta);
double t = 1-c;
r[0][0] = c+x*x*t;
r[1][0] = x*y*t-z*s;
r[2][0] = x*z*t+y*s;
r[0][1] = x*y*t+z*s;
r[1][1] = c+y*y*t;
r[2][1] = y*z*t-x*s;
r[0][2] = z*x*t-y*s;
r[1][2] = z*y*t+x*s;
r[2][2] = c+z*z*t;
}
public double[] findAlignment(double[][] transforms) {
return findAlignment(transforms,true);
}
public double[] findAlignment(double[][] transforms, boolean optimize) {
double [] alignment = {1.0,0.0,0.0,0.0,0.0,0.0,0.0};
double Oaa = getSelfAtomOverlapRef();
double Obb = getSelfAtomOverlapFit();
double ppOaa = getSelfPPOverlapRef();
double ppObb = getSelfPPOverlapFit();
EvaluableOverlap eval = new EvaluableOverlap(this, new double[7],ppWeight);
OptimizerLBFGS opt = new OptimizerLBFGS(200,0.001);
double maxSimilarity = 0.0;
double maxPPSimilarity = 0.0;
double maxVolSimilarity = 0.0;
double maxShapeSimilarity = 0.0;
for(double [] transform:transforms) {
double ppSimilarity = 0.0;//iterate over all initial alignments (necessary since optimizer just finds next local minimum, so we need different initial guesses
double volSimilarity = 0.0;
eval.setState(transform);
double[] bestTransform;
if(optimize)
bestTransform = opt.optimize(eval);
else
bestTransform = transform;
double atomOverlap = 0.0;
double ppOverlap = 0.0;
double similarity = 0.0;
ppOverlap = getTotalPPOverlap(bestTransform);
if(getRefMolGauss().getPPGaussians().size()==0 && getMolGauss().getPPGaussians().size()==0 )
ppSimilarity = 1.0;
else ppSimilarity=(ppOverlap/(ppOaa+ppObb-ppOverlap));
double correctionFactor = refMolGauss.getPPGaussians().size()/refMolGauss.getPPGaussians().stream().mapToDouble(g -> g.getWeight()).sum();
ppSimilarity*=correctionFactor;
if(ppSimilarity>1.0) //can happen because of weights
ppSimilarity = 1.0f;
double[] result = getTotalAtomOverlap(bestTransform);
atomOverlap = result[0];
double additionalVolOverlap = result[1];
double atomSimilarity = atomOverlap/(Oaa+Obb-atomOverlap);
if(atomSimilarity>1.0) //can happen because of weights
atomSimilarity = 1.0f;
volSimilarity = (additionalVolOverlap/atomOverlap);
similarity = (1.0-ppWeight)*atomSimilarity + ppWeight*ppSimilarity;
if (similarity>maxSimilarity) {
maxSimilarity = similarity;
maxVolSimilarity = volSimilarity;
maxShapeSimilarity = atomSimilarity;
maxPPSimilarity = ppSimilarity;
alignment = bestTransform;
}
}
if(maxSimilarity>1.0) // can happen because of manually placed inclusion spheres
maxSimilarity = 1.0;
return DoubleStream.concat(Arrays.stream(new double[] {maxSimilarity,maxPPSimilarity,maxShapeSimilarity,maxVolSimilarity}), Arrays.stream(alignment)).toArray();
}
public static void rotateMol(Conformer conf, Matrix rotMat) {
int nrOfAtoms = conf.getSize();
for (int i=0;i<nrOfAtoms;i++) {
Coordinates coords1 = conf.getCoordinates(i);
coords1.rotate(rotMat.getArray());
}
}
public static void rotateMol(StereoMolecule mol, double[] transform) {
Quaternion rotor = new Quaternion(transform[0],transform[1], transform [2], transform[3]);
double[] translate = {transform[4], transform[5], transform[6]};
PheSAAlignment.rotateMol(mol, rotor, translate);
}
public static void rotateMol(StereoMolecule mol, Matrix rotMat) {
int nrOfAtoms = mol.getAllAtoms();
for (int i=0;i<nrOfAtoms;i++) {
Coordinates coords1 = mol.getCoordinates(i);
coords1.rotate(rotMat.getArray());
}
}
public static void rotateMol(Conformer conf, double[] transform) {
Quaternion rotor = new Quaternion(transform[0],transform[1], transform [2], transform[3]);
double[] translate = {transform[4], transform[5], transform[6]};
PheSAAlignment.rotateMol(conf, rotor, translate);
}
public static class PheSAResult {
private StereoMolecule refMol;
private StereoMolecule fitMol;
private double sim;
public PheSAResult(StereoMolecule refMol, StereoMolecule fitMol, double sim) {
this.refMol = refMol;
this.fitMol = fitMol;
this.sim = sim;
}
public StereoMolecule getRefMol() {
return refMol;
}
public StereoMolecule getFitMol() {
return fitMol;
}
public double getSim() {
return sim;
}
}
}
|
|
/**
* Copyright 2011, Big Switch Networks, Inc.
* Originally created by David Erickson, Stanford University
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
**/
/**
*
*/
package net.floodlightcontroller.packet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
/**
* @author David Erickson ([email protected])
*
*/
public class IPv4 extends BasePacket {
public static final byte PROTOCOL_ICMP = 0x1;
public static final byte PROTOCOL_TCP = 0x6;
public static final byte PROTOCOL_UDP = 0x11;
public static Map<Byte, Class<? extends IPacket>> protocolClassMap;
static {
protocolClassMap = new HashMap<Byte, Class<? extends IPacket>>();
protocolClassMap.put(PROTOCOL_ICMP, ICMP.class);
protocolClassMap.put(PROTOCOL_TCP, TCP.class);
protocolClassMap.put(PROTOCOL_UDP, UDP.class);
}
protected byte version;
protected byte headerLength;
protected byte diffServ;
protected short totalLength;
protected short identification;
protected byte flags;
protected short fragmentOffset;
protected byte ttl;
protected byte protocol;
protected short checksum;
protected int sourceAddress;
protected int destinationAddress;
protected byte[] options;
protected boolean isTruncated;
/**
* Default constructor that sets the version to 4.
*/
public IPv4() {
super();
this.version = 4;
isTruncated = false;
}
/**
* @return the version
*/
public byte getVersion() {
return version;
}
/**
* @param version the version to set
*/
public IPv4 setVersion(byte version) {
this.version = version;
return this;
}
/**
* @return the headerLength
*/
public byte getHeaderLength() {
return headerLength;
}
/**
* @return the diffServ
*/
public byte getDiffServ() {
return diffServ;
}
/**
* @param diffServ the diffServ to set
*/
public IPv4 setDiffServ(byte diffServ) {
this.diffServ = diffServ;
return this;
}
/**
* @return the totalLength
*/
public short getTotalLength() {
return totalLength;
}
/**
* @return the identification
*/
public short getIdentification() {
return identification;
}
public boolean isTruncated() {
return isTruncated;
}
public void setTruncated(boolean isTruncated) {
this.isTruncated = isTruncated;
}
/**
* @param identification the identification to set
*/
public IPv4 setIdentification(short identification) {
this.identification = identification;
return this;
}
/**
* @return the flags
*/
public byte getFlags() {
return flags;
}
/**
* @param flags the flags to set
*/
public IPv4 setFlags(byte flags) {
this.flags = flags;
return this;
}
/**
* @return the fragmentOffset
*/
public short getFragmentOffset() {
return fragmentOffset;
}
/**
* @param fragmentOffset the fragmentOffset to set
*/
public IPv4 setFragmentOffset(short fragmentOffset) {
this.fragmentOffset = fragmentOffset;
return this;
}
/**
* @return the ttl
*/
public byte getTtl() {
return ttl;
}
/**
* @param ttl the ttl to set
*/
public IPv4 setTtl(byte ttl) {
this.ttl = ttl;
return this;
}
/**
* @return the protocol
*/
public byte getProtocol() {
return protocol;
}
/**
* @param protocol the protocol to set
*/
public IPv4 setProtocol(byte protocol) {
this.protocol = protocol;
return this;
}
/**
* @return the checksum
*/
public short getChecksum() {
return checksum;
}
/**
* @param checksum the checksum to set
*/
public IPv4 setChecksum(short checksum) {
this.checksum = checksum;
return this;
}
@Override
public void resetChecksum() {
this.checksum = 0;
super.resetChecksum();
}
/**
* @return the sourceAddress
*/
public int getSourceAddress() {
return sourceAddress;
}
/**
* @param sourceAddress the sourceAddress to set
*/
public IPv4 setSourceAddress(int sourceAddress) {
this.sourceAddress = sourceAddress;
return this;
}
/**
* @param sourceAddress the sourceAddress to set
*/
public IPv4 setSourceAddress(String sourceAddress) {
this.sourceAddress = IPv4.toIPv4Address(sourceAddress);
return this;
}
/**
* @return the destinationAddress
*/
public int getDestinationAddress() {
return destinationAddress;
}
/**
* @param destinationAddress the destinationAddress to set
*/
public IPv4 setDestinationAddress(int destinationAddress) {
this.destinationAddress = destinationAddress;
return this;
}
/**
* @param destinationAddress the destinationAddress to set
*/
public IPv4 setDestinationAddress(String destinationAddress) {
this.destinationAddress = IPv4.toIPv4Address(destinationAddress);
return this;
}
/**
* @return the options
*/
public byte[] getOptions() {
return options;
}
/**
* @param options the options to set
*/
public IPv4 setOptions(byte[] options) {
if (options != null && (options.length % 4) > 0)
throw new IllegalArgumentException(
"Options length must be a multiple of 4");
this.options = options;
return this;
}
/**
* Serializes the packet. Will compute and set the following fields if they
* are set to specific values at the time serialize is called:
* -checksum : 0
* -headerLength : 0
* -totalLength : 0
*/
public byte[] serialize() {
byte[] payloadData = null;
if (payload != null) {
payload.setParent(this);
payloadData = payload.serialize();
}
int optionsLength = 0;
if (this.options != null)
optionsLength = this.options.length / 4;
this.headerLength = (byte) (5 + optionsLength);
this.totalLength = (short) (this.headerLength * 4 + ((payloadData == null) ? 0
: payloadData.length));
byte[] data = new byte[this.totalLength];
ByteBuffer bb = ByteBuffer.wrap(data);
bb.put((byte) (((this.version & 0xf) << 4) | (this.headerLength & 0xf)));
bb.put(this.diffServ);
bb.putShort(this.totalLength);
bb.putShort(this.identification);
bb.putShort((short) (((this.flags & 0x7) << 13) | (this.fragmentOffset & 0x1fff)));
bb.put(this.ttl);
bb.put(this.protocol);
bb.putShort(this.checksum);
bb.putInt(this.sourceAddress);
bb.putInt(this.destinationAddress);
if (this.options != null)
bb.put(this.options);
if (payloadData != null)
bb.put(payloadData);
// compute checksum if needed
if (this.checksum == 0) {
bb.rewind();
int accumulation = 0;
for (int i = 0; i < this.headerLength * 2; ++i) {
accumulation += 0xffff & bb.getShort();
}
accumulation = ((accumulation >> 16) & 0xffff)
+ (accumulation & 0xffff);
this.checksum = (short) (~accumulation & 0xffff);
bb.putShort(10, this.checksum);
}
return data;
}
@Override
public IPacket deserialize(byte[] data, int offset, int length) {
ByteBuffer bb = ByteBuffer.wrap(data, offset, length);
short sscratch;
this.version = bb.get();
this.headerLength = (byte) (this.version & 0xf);
this.version = (byte) ((this.version >> 4) & 0xf);
this.diffServ = bb.get();
this.totalLength = bb.getShort();
this.identification = bb.getShort();
sscratch = bb.getShort();
this.flags = (byte) ((sscratch >> 13) & 0x7);
this.fragmentOffset = (short) (sscratch & 0x1fff);
this.ttl = bb.get();
this.protocol = bb.get();
this.checksum = bb.getShort();
this.sourceAddress = bb.getInt();
this.destinationAddress = bb.getInt();
if (this.headerLength > 5) {
int optionsLength = (this.headerLength - 5) * 4;
this.options = new byte[optionsLength];
bb.get(this.options);
}
IPacket payload;
if (IPv4.protocolClassMap.containsKey(this.protocol)) {
Class<? extends IPacket> clazz = IPv4.protocolClassMap.get(this.protocol);
try {
payload = clazz.newInstance();
} catch (Exception e) {
throw new RuntimeException("Error parsing payload for IPv4 packet", e);
}
} else {
payload = new Data();
}
this.payload = payload.deserialize(data, bb.position(), bb.limit()-bb.position());
this.payload.setParent(this);
if (this.totalLength != length)
this.isTruncated = true;
else
this.isTruncated = false;
return this;
}
/**
* Accepts an IPv4 address of the form xxx.xxx.xxx.xxx, ie 192.168.0.1 and
* returns the corresponding 32 bit integer.
* @param ipAddress
* @return
*/
public static int toIPv4Address(String ipAddress) {
if (ipAddress == null)
throw new IllegalArgumentException("Specified IPv4 address must" +
"contain 4 sets of numerical digits separated by periods");
String[] octets = ipAddress.split("\\.");
if (octets.length != 4)
throw new IllegalArgumentException("Specified IPv4 address must" +
"contain 4 sets of numerical digits separated by periods");
int result = 0;
for (int i = 0; i < 4; ++i) {
//String test = Integer.toBinaryString(Integer.valueOf(octets[i]));
//result = Integer.valueOf(octets[i]) << ((3-i)*8);
//String test2 = Integer.toBinaryString(result);
result |= Integer.valueOf(octets[i]) << ((3-i)*8);
}
return result;
}
/**
* Accepts an IPv4 address in a byte array and returns the corresponding
* 32-bit integer value.
* @param ipAddress
* @return
*/
public static int toIPv4Address(byte[] ipAddress) {
int ip = 0;
for (int i = 0; i < 4; i++) {
int t = (ipAddress[i] & 0xff) << ((3-i)*8);
ip |= t;
}
return ip;
}
/**
* Accepts an IPv4 address and returns of string of the form xxx.xxx.xxx.xxx
* ie 192.168.0.1
*
* @param ipAddress
* @return
*/
public static String fromIPv4Address(int ipAddress) {
StringBuffer sb = new StringBuffer();
int result = 0;
for (int i = 0; i < 4; ++i) {
result = (ipAddress >> ((3-i)*8)) & 0xff;
sb.append(Integer.valueOf(result).toString());
if (i != 3)
sb.append(".");
}
return sb.toString();
}
/**
* Accepts a collection of IPv4 addresses as integers and returns a single
* String useful in toString method's containing collections of IP
* addresses.
*
* @param ipAddresses collection
* @return
*/
public static String fromIPv4AddressCollection(Collection<Integer> ipAddresses) {
if (ipAddresses == null)
return "null";
StringBuffer sb = new StringBuffer();
sb.append("[");
for (Integer ip : ipAddresses) {
sb.append(fromIPv4Address(ip));
sb.append(",");
}
sb.replace(sb.length()-1, sb.length(), "]");
return sb.toString();
}
/**
* Accepts an IPv4 address of the form xxx.xxx.xxx.xxx, ie 192.168.0.1 and
* returns the corresponding byte array.
* @param ipAddress The IP address in the form xx.xxx.xxx.xxx.
* @return The IP address separated into bytes
*/
public static byte[] toIPv4AddressBytes(String ipAddress) {
String[] octets = ipAddress.split("\\.");
if (octets.length != 4)
throw new IllegalArgumentException("Specified IPv4 address must" +
"contain 4 sets of numerical digits separated by periods");
byte[] result = new byte[4];
for (int i = 0; i < 4; ++i) {
result[i] = Integer.valueOf(octets[i]).byteValue();
}
return result;
}
/**
* Accepts an IPv4 address in the form of an integer and
* returns the corresponding byte array.
* @param ipAddress The IP address as an integer.
* @return The IP address separated into bytes.
*/
public static byte[] toIPv4AddressBytes(int ipAddress) {
return new byte[] {
(byte)(ipAddress >>> 24),
(byte)(ipAddress >>> 16),
(byte)(ipAddress >>> 8),
(byte)ipAddress};
}
/* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 2521;
int result = super.hashCode();
result = prime * result + checksum;
result = prime * result + destinationAddress;
result = prime * result + diffServ;
result = prime * result + flags;
result = prime * result + fragmentOffset;
result = prime * result + headerLength;
result = prime * result + identification;
result = prime * result + Arrays.hashCode(options);
result = prime * result + protocol;
result = prime * result + sourceAddress;
result = prime * result + totalLength;
result = prime * result + ttl;
result = prime * result + version;
return result;
}
/* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (!super.equals(obj))
return false;
if (!(obj instanceof IPv4))
return false;
IPv4 other = (IPv4) obj;
if (checksum != other.checksum)
return false;
if (destinationAddress != other.destinationAddress)
return false;
if (diffServ != other.diffServ)
return false;
if (flags != other.flags)
return false;
if (fragmentOffset != other.fragmentOffset)
return false;
if (headerLength != other.headerLength)
return false;
if (identification != other.identification)
return false;
if (!Arrays.equals(options, other.options))
return false;
if (protocol != other.protocol)
return false;
if (sourceAddress != other.sourceAddress)
return false;
if (totalLength != other.totalLength)
return false;
if (ttl != other.ttl)
return false;
if (version != other.version)
return false;
return true;
}
}
|
|
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.messaging;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wso2.carbon.messaging.exceptions.MessagingException;
import org.wso2.carbon.messaging.exceptions.NelException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.stream.Collectors;
/**
* Data carrier between the components.
*/
public abstract class CarbonMessage {
private static final Logger LOG = LoggerFactory.getLogger(CarbonMessage.class);
protected Headers headers = new Headers();
protected Map<String, Object> properties = new HashMap<>();
protected BlockingQueue messageBody = new LinkedBlockingQueue<>();
protected Stack<FaultHandler> faultHandlerStack = new Stack<>();
protected MessageDataSource messageDataSource;
/**
* @deprecated This field will be replace by {@link #messagingException}
*/
@Deprecated
protected NelException nelException = null;
/**
* Exception related to fault CarbonMessage.
*/
protected MessagingException messagingException = null;
protected ByteBufferInputStream byteBufferInputStream;
private ByteBufferOutputStream byteBufferOutputStream;
protected Lock lock = new ReentrantLock();
protected boolean bufferContent = true;
protected AtomicBoolean alreadyRead = new AtomicBoolean(false);
private AtomicBoolean endOfMsgAdded = new AtomicBoolean(false);
private Writer writer;
private boolean isMessageBodyAdded;
public CarbonMessage() {
}
/**
* This enable you to avoid filling content in to internal buffer.
* Use this constructor when creating response message and need to write content and
*
* @param buffercontent enables direct writing to channel if true else buffer content in message queue
*/
public CarbonMessage(Boolean buffercontent) {
this.bufferContent = buffercontent;
}
public void setBufferContent(boolean bufferContent) {
if (isMessageBodyAdded) {
throw new IllegalStateException(
"CarbonMessage#setBufferContent cannot " + "be called after adding message body");
}
this.bufferContent = bufferContent;
}
public boolean isEndOfMsgAdded() {
return endOfMsgAdded.get();
}
public boolean isEmpty() {
return messageBody.isEmpty();
}
public ByteBuffer getMessageBody() {
try {
return (ByteBuffer) messageBody.take();
} catch (InterruptedException e) {
LOG.error("Error while retrieving chunk from queue.", e);
return null;
}
}
/**
* Calling this method will be blocked until all the message content is received.
*
* @return Full message body as list of {@link ByteBuffer}
*/
public List<ByteBuffer> getFullMessageBody() {
List<ByteBuffer> byteBufferList = new ArrayList<>();
while (true) {
try {
if (endOfMsgAdded.get() && messageBody.isEmpty()) {
break;
}
byteBufferList.add((ByteBuffer) messageBody.take());
} catch (InterruptedException e) {
LOG.error("Error while getting full message body", e);
}
}
return byteBufferList;
}
public void addMessageBody(ByteBuffer msgBody) {
isMessageBodyAdded = true;
if (bufferContent) {
messageBody.add(msgBody);
} else {
if (writer != null) {
writer.write(msgBody);
} else {
LOG.error("Cannot write content no registered writer found");
}
}
}
/**
* Method to be used for resources clean up after using Carbon Messaging.
*/
public void release() {
}
public Headers getHeaders() {
return headers;
}
public String getHeader(String key) {
return headers.get(key);
}
public void setHeader(String key, String value) {
headers.set(key, value);
}
public void setHeaders(Map<String, String> headerMap) {
headers.set(headerMap);
}
public void setHeaders(List<Header> headerList) {
headers.set(headerList);
}
public Object getProperty(String key) {
if (properties != null) {
return properties.get(key);
} else {
return null;
}
}
public Map<String, Object> getProperties() {
return properties;
}
public void setProperty(String key, Object value) {
properties.put(key, value);
}
public void removeHeader(String key) {
headers.remove(key);
}
public void removeProperty(String key) {
properties.remove(key);
}
public Stack<FaultHandler> getFaultHandlerStack() {
return faultHandlerStack;
}
public void setFaultHandlerStack(Stack<FaultHandler> faultHandlerStack) {
this.faultHandlerStack = faultHandlerStack;
}
public Lock getLock() {
return lock;
}
public int getFullMessageLength() {
List<ByteBuffer> fullMessageBody = getFullMessageBody();
int size = (int) fullMessageBody.stream().mapToInt(byteBuffer -> byteBuffer.limit()).sum();
fullMessageBody.forEach(byteBuffer -> addMessageBody(byteBuffer));
return size;
}
public List<ByteBuffer> getCopyOfFullMessageBody() {
List<ByteBuffer> fullMessageBody = getFullMessageBody();
List<ByteBuffer> newCopy = fullMessageBody.stream().map(byteBuffer -> MessageUtil.deepCopy(byteBuffer))
.collect(Collectors.toList());
fullMessageBody.forEach(byteBuffer -> addMessageBody(byteBuffer));
return newCopy;
}
public void setEndOfMsgAdded(boolean endOfMsgAdded) {
this.endOfMsgAdded.compareAndSet(false, endOfMsgAdded);
if (byteBufferOutputStream != null) {
try {
this.byteBufferOutputStream.flush();
} catch (IOException e) {
LOG.error("Exception occured while flushing the buffer", e);
byteBufferOutputStream.close();
}
}
;
if (writer != null) {
writer.writeLastContent(this);
}
}
public Writer getWriter() {
return writer;
}
public void setWriter(Writer writer) {
this.writer = writer;
}
public boolean isBufferContent() {
return bufferContent;
}
public MessageDataSource getMessageDataSource() {
return messageDataSource;
}
public void setMessageDataSource(MessageDataSource messageDataSource) {
this.messageDataSource = messageDataSource;
}
public boolean isAlreadyRead() {
return alreadyRead.get();
}
public void setAlreadyRead(boolean alreadyRead) {
this.alreadyRead.set(alreadyRead);
}
/**
* This is a blocking call and provides full message as inputStream
* removes original content from queue.
*
* @return InputStream Instance.
*/
public InputStream getInputStream() {
if (byteBufferInputStream == null) {
byteBufferInputStream = new ByteBufferInputStream();
}
return byteBufferInputStream;
}
/**
* This provide access to write byte stream in to message content Queue as
* Stream
*
* @return OutputStream Instance.
*/
public OutputStream getOutputStream() {
if (byteBufferOutputStream == null) {
byteBufferOutputStream = new ByteBufferOutputStream();
}
return byteBufferOutputStream;
}
/**
* A class which represents the InputStream of the ByteBuffers
* No need to worry about thread safety of this class this is called only once by
* for a message instance from one thread.
*/
protected class ByteBufferInputStream extends InputStream {
private int count;
private boolean chunkFinished = true;
private int limit;
private ByteBuffer byteBuffer;
@Override
public int read() throws IOException {
setAlreadyRead(true);
if (isEndOfMsgAdded() && isEmpty() && chunkFinished) {
return -1;
} else if (chunkFinished) {
byteBuffer = getMessageBody();
count = 0;
limit = byteBuffer.limit();
if (limit == 0) {
return -1;
}
chunkFinished = false;
}
count++;
if (count == limit) {
chunkFinished = true;
}
return byteBuffer.get() & 0xff;
}
}
/**
* A class which write byteStream into ByteBuffers and add those
* ByteBuffers to Content Queue.
* No need to worry about thread safety of this class this is called only once by
* one thread at particular time.
*/
protected class ByteBufferOutputStream extends OutputStream {
private ByteBuffer buffer;
@Override
public void write(int b) throws IOException {
if (buffer == null) {
buffer = BufferFactory.getInstance().getBuffer();
}
if (buffer.hasRemaining()) {
buffer.put((byte) b);
} else {
buffer.flip();
addMessageBody(buffer);
buffer = BufferFactory.getInstance().getBuffer();
buffer.put((byte) b);
}
}
@Override
public void flush() throws IOException {
if (buffer != null && buffer.position() > 0) {
buffer.flip();
addMessageBody(buffer);
buffer = BufferFactory.getInstance().getBuffer();
}
}
@Override
public void close() {
try {
super.close();
} catch (IOException e) {
LOG.error("Error while closing output stream but underlying resources are reset", e);
} finally {
byteBufferOutputStream = null;
buffer = null;
}
}
}
/**
* Get NelException
*
* @return NelException instance.
* @deprecated Get NelException method will be replaced by {@link #getMessagingException()} method.
*/
@Deprecated
public NelException getNelException() {
return nelException;
}
/**
* Set NelException.
*
* @param nelException NelException instance related to faulty CarbonMessage.
* @deprecated Set NelException will be replaced by
* {@link #setMessagingException(MessagingException carbonMessageException)} method.
*/
@Deprecated
public void setNelException(NelException nelException) {
this.nelException = nelException;
}
/**
* Get CarbonMessageException
*
* @return CarbonMessageException instance related to faulty CarbonMessage.
*/
public MessagingException getMessagingException() {
return messagingException;
}
/**
* Set CarbonMessageException.
*
* @param messagingException exception related to faulty CarbonMessage.
*/
public void setMessagingException(MessagingException messagingException) {
this.messagingException = messagingException;
}
public boolean isFaulty() {
// TODO: Remove {@link #nelException} reference.
return (this.messagingException != null || this.nelException != null);
}
}
|
|
/**
* Copyright (C) FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.fabric8.bridge.model;
import javax.jms.ConnectionFactory;
import javax.naming.NamingException;
import javax.naming.Referenceable;
import javax.xml.bind.annotation.*;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import org.apache.commons.lang3.builder.ReflectionToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import io.fabric8.bridge.internal.ConnectionFactoryAdapter;
import org.springframework.jms.support.destination.DestinationResolver;
import org.springframework.jms.support.destination.DynamicDestinationResolver;
/**
* @author Dhiraj Bokde
*
*/
@XmlRootElement(name="broker-config")
@XmlAccessorType(XmlAccessType.NONE)
public class BrokerConfig extends IdentifiedType {
public static final int DEFAULT_MAX_CONNECTIONS = 10;
@XmlAttribute
private String brokerUrl;
// number of connections for default connection factory created using brokerUrl
@XmlAttribute
private int maxConnections = DEFAULT_MAX_CONNECTIONS;
// use the bean name to lookup in BeanFactory
// represents the exported connection factory for this bridge
@XmlElement(name="exportedConnectionFactory")
@XmlJavaTypeAdapter(ConnectionFactoryAdapter.class)
@XmlMimeType("application/octet-stream")
private ConnectionFactory connectionFactory;
// place holder for Spring bean definition parser
@XmlAttribute
private String connectionFactoryRef;
@XmlAttribute
private String userName;
@XmlAttribute
private String password;
@XmlAttribute
private String clientId;
// use a bean name for marshaling to a remote broker
private DestinationResolver destinationResolver = new DynamicDestinationResolver();
@XmlAttribute
private String destinationResolverRef;
public final String getBrokerUrl() {
return brokerUrl;
}
public final void setBrokerUrl(String BrokerUrl) {
this.brokerUrl = BrokerUrl;
}
public final void setMaxConnections(int maxConnections) {
this.maxConnections = maxConnections;
}
public final int getMaxConnections() {
return maxConnections;
}
public final ConnectionFactory getConnectionFactory() {
return connectionFactory;
}
public final void setConnectionFactory(
ConnectionFactory ConnectionFactory) {
this.connectionFactory = ConnectionFactory;
}
public void setConnectionFactoryRef(String connectionFactoryRef) {
this.connectionFactoryRef = connectionFactoryRef;
}
public String getConnectionFactoryRef() {
return connectionFactoryRef;
}
public final String getUserName() {
return userName;
}
public final void setUserName(String UserName) {
this.userName = UserName;
}
public final String getPassword() {
return password;
}
public final void setPassword(String Password) {
this.password = Password;
}
public final String getClientId() {
return clientId;
}
public final void setClientId(String ClientId) {
this.clientId = ClientId;
}
public void setDestinationResolver(DestinationResolver destinationResolver) {
this.destinationResolver = destinationResolver;
}
public DestinationResolver getDestinationResolver() {
return destinationResolver;
}
public String getDestinationResolverRef() {
return destinationResolverRef;
}
public void setDestinationResolverRef(
String destinationResolverRef) {
this.destinationResolverRef = destinationResolverRef;
}
@Override
public String toString() {
return ReflectionToStringBuilder.toString(this,ToStringStyle.SHORT_PREFIX_STYLE);
}
@Override
public int hashCode() {
int val = 0;
val += (brokerUrl != null) ? brokerUrl.hashCode() : 0;
val += maxConnections;
val += (userName != null) ? userName.hashCode() : 0;
val += (password != null) ? password.hashCode() : 0;
val += (clientId != null) ? clientId.hashCode() : 0;
val += (connectionFactoryRef != null) ? connectionFactoryRef.hashCode() : 0;
val += (destinationResolverRef != null) ? destinationResolverRef.hashCode() : 0;
if (connectionFactory != null) {
try {
val += ((Referenceable)connectionFactory).getReference().hashCode();
} catch (NamingException e) {
throw new IllegalArgumentException("Could not get Reference from ConnectionFactory: "
+ e.getMessage(), e);
}
}
return val;
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj instanceof BrokerConfig) {
BrokerConfig config = (BrokerConfig) obj;
// ignore destinationResolver
boolean retVal =
(this.brokerUrl != null ? this.brokerUrl.equals(config.brokerUrl) : config.brokerUrl == null)
&& this.maxConnections == config.maxConnections
&& (this.userName != null ? this.userName.equals(config.userName) : config.userName == null)
&& (this.password != null ? this.password.equals(config.password) : config.password == null)
&& (this.clientId != null ? this.clientId.equals(config.clientId) : config.clientId == null)
&& (this.connectionFactoryRef != null ? this.connectionFactoryRef.equals(config.connectionFactoryRef)
: config.connectionFactoryRef == null)
&& (this.destinationResolverRef != null ? this.destinationResolverRef.equals(config.destinationResolverRef)
: config.destinationResolverRef == null);
if (retVal && connectionFactory != null) {
if (config.connectionFactory == null) {
retVal = false;
} else {
try {
retVal = ((Referenceable) connectionFactory).getReference().equals(
((Referenceable) config.connectionFactory).getReference());
} catch (NamingException e) {
throw new IllegalArgumentException("Could not get Reference from ConnectionFactory: "
+ e.getMessage(), e);
}
}
} else if (retVal) {
retVal = (config.connectionFactory == null);
}
return retVal;
}
return false;
}
}
|
|
package net.glowstone.io.nbt;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
import net.glowstone.GlowOfflinePlayer;
import net.glowstone.GlowServer;
import net.glowstone.entity.GlowPlayer;
import net.glowstone.i18n.ConsoleMessages;
import net.glowstone.i18n.GlowstoneMessages;
import net.glowstone.io.PlayerDataService;
import net.glowstone.io.entity.EntityStorage;
import net.glowstone.util.UuidUtils;
import net.glowstone.util.nbt.CompoundTag;
import net.glowstone.util.nbt.NbtInputStream;
import net.glowstone.util.nbt.NbtOutputStream;
import org.bukkit.Location;
import org.bukkit.OfflinePlayer;
import org.bukkit.World;
/**
* Standard NBT-based player data storage.
*/
public class NbtPlayerDataService implements PlayerDataService {
private final GlowServer server;
private final File playerDir;
public NbtPlayerDataService(GlowServer server, File playerDir) {
this.server = server;
this.playerDir = playerDir;
}
private File getPlayerFile(UUID uuid) {
if (!playerDir.isDirectory() && !playerDir.mkdirs()) {
ConsoleMessages.Warn.Io.MKDIR_FAILED.log(playerDir);
}
return new File(playerDir, UuidUtils.toString(uuid) + ".dat");
}
private void readDataImpl(GlowPlayer player, CompoundTag playerTag) {
EntityStorage.load(player, playerTag);
}
@Override
public CompletableFuture<Collection<OfflinePlayer>> getOfflinePlayers() {
// list files in directory
File[] files = playerDir.listFiles();
if (files == null) {
return CompletableFuture.completedFuture(Arrays.asList());
}
List<CompletableFuture<GlowOfflinePlayer>> futures = new ArrayList<>(files.length);
for (File file : files) {
// first, make sure it looks like a player file
String name = file.getName();
if (name.length() != 40 || !name.endsWith(".dat")) { // NON-NLS
continue;
}
// get the UUID
UUID uuid;
try {
uuid = UuidUtils.fromString(name.substring(0, 36));
} catch (IllegalArgumentException e) {
continue;
}
// creating the OfflinePlayer will read the data
futures.add(GlowOfflinePlayer.getOfflinePlayer(server, uuid));
}
CompletableFuture<Void> gotAll = CompletableFuture.allOf(futures.toArray(
new CompletableFuture[futures.size()]));
return gotAll.thenApplyAsync((v) ->
futures.stream().map((f) -> f.join()).collect(Collectors.toList()));
}
@Override
public PlayerReader beginReadingData(UUID uuid) {
return new NbtPlayerReader(getPlayerFile(uuid));
}
@Override
public void readData(GlowPlayer player) {
File playerFile = getPlayerFile(player.getUniqueId());
CompoundTag playerTag = new CompoundTag();
if (playerFile.exists()) {
try (NbtInputStream in = new NbtInputStream(new FileInputStream(playerFile))) {
playerTag = in.readCompound();
} catch (IOException e) {
player.kickPlayer(GlowstoneMessages.Kick.FILE_READ.get());
ConsoleMessages.Error.Io.PLAYER_READ.log(e, player.getName(), playerFile);
}
}
readDataImpl(player, playerTag);
}
@Override
public void writeData(GlowPlayer player) {
File playerFile = getPlayerFile(player.getUniqueId());
CompoundTag tag = new CompoundTag();
EntityStorage.save(player, tag);
try (NbtOutputStream out = new NbtOutputStream(new FileOutputStream(playerFile))) {
out.writeTag(tag);
} catch (IOException e) {
player.kickPlayer(GlowstoneMessages.Kick.FILE_WRITE.get());
ConsoleMessages.Error.Io.PLAYER_WRITE.log(e, player.getName(), playerFile);
}
}
@SuppressWarnings("HardCodedStringLiteral")
private class NbtPlayerReader implements PlayerReader {
private CompoundTag tag = new CompoundTag();
private boolean hasPlayed;
public NbtPlayerReader(File playerFile) {
if (playerFile.exists()) {
try (NbtInputStream in = new NbtInputStream(new FileInputStream(playerFile))) {
tag = in.readCompound();
hasPlayed = true;
} catch (IOException e) {
ConsoleMessages.Error.Io.PLAYER_READ_UNKNOWN.log(e, playerFile);
}
}
}
private void checkOpen() {
if (tag == null) {
throw new IllegalStateException("cannot access fields after close");
}
}
@Override
public boolean hasPlayedBefore() {
return hasPlayed;
}
@Override
public Location getLocation() {
checkOpen();
World world = NbtSerialization.readWorld(server, tag);
if (world != null) {
return NbtSerialization.listTagsToLocation(world, tag);
}
return null;
}
@Override
public Location getBedSpawnLocation() {
checkOpen();
// check that all fields are present
if (!tag.isString("SpawnWorld") || !tag.isInt("SpawnX") || !tag.isInt("SpawnY") || !tag
.isInt("SpawnZ")) {
return null;
}
// look up world
World world = server.getWorld(tag.getString("SpawnWorld"));
if (world == null) {
return null;
}
// return location
return new Location(world, tag.getInt("SpawnX"), tag.getInt("SpawnY"),
tag.getInt("SpawnZ"));
}
@Override
public long getFirstPlayed() {
checkOpen();
long[] out = {0};
tag.readCompound("bukkit", bukkit -> bukkit.readLong("firstPlayed", x -> out[0] = x));
return out[0];
}
@Override
public long getLastPlayed() {
checkOpen();
long[] out = {0};
tag.readCompound("bukkit", bukkit -> bukkit.readLong("lastPlayed", x -> out[0] = x));
return out[0];
}
@Override
public String getLastKnownName() {
checkOpen();
String[] out = {null};
tag.readCompound("bukkit",
bukkit -> bukkit.readString("lastKnownName", x -> out[0] = x));
return out[0];
}
@Override
public void readData(GlowPlayer player) {
checkOpen();
readDataImpl(player, tag);
}
@Override
public void close() {
tag = null;
}
}
}
|
|
/*
* Copyright (c) 1997, 2006, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.security.x509;
import java.io.IOException;
import java.io.OutputStream;
import java.util.*;
import java.security.cert.CertificateException;
import sun.security.util.*;
/**
* Represent the Policy Mappings Extension.
*
* This extension, if present, identifies the certificate policies considered
* identical between the issuing and the subject CA.
* <p>Extensions are addiitonal attributes which can be inserted in a X509
* v3 certificate. For example a "Driving License Certificate" could have
* the driving license number as a extension.
*
* <p>Extensions are represented as a sequence of the extension identifier
* (Object Identifier), a boolean flag stating whether the extension is to
* be treated as being critical and the extension value itself (this is again
* a DER encoding of the extension value).
*
* @author Amit Kapoor
* @author Hemma Prafullchandra
* @see Extension
* @see CertAttrSet
*/
public class PolicyMappingsExtension extends Extension
implements CertAttrSet<String> {
/**
* Identifier for this attribute, to be used with the
* get, set, delete methods of Certificate, x509 type.
*/
public static final String IDENT = "x509.info.extensions.PolicyMappings";
/**
* Attribute names.
*/
public static final String NAME = "PolicyMappings";
public static final String MAP = "map";
// Private data members
private List<CertificatePolicyMap> maps;
// Encode this extension value
private void encodeThis() throws IOException {
if (maps == null || maps.isEmpty()) {
this.extensionValue = null;
return;
}
DerOutputStream os = new DerOutputStream();
DerOutputStream tmp = new DerOutputStream();
for (CertificatePolicyMap map : maps) {
map.encode(tmp);
}
os.write(DerValue.tag_Sequence, tmp);
this.extensionValue = os.toByteArray();
}
/**
* Create a PolicyMappings with the List of CertificatePolicyMap.
*
* @param maps the List of CertificatePolicyMap.
*/
public PolicyMappingsExtension(List<CertificatePolicyMap> map)
throws IOException {
this.maps = map;
this.extensionId = PKIXExtensions.PolicyMappings_Id;
this.critical = false;
encodeThis();
}
/**
* Create a default PolicyMappingsExtension.
*/
public PolicyMappingsExtension() {
extensionId = PKIXExtensions.KeyUsage_Id;
critical = false;
maps = new ArrayList<CertificatePolicyMap>();
}
/**
* Create the extension from the passed DER encoded value.
*
* @params critical true if the extension is to be treated as critical.
* @params value an array of DER encoded bytes of the actual value.
* @exception ClassCastException if value is not an array of bytes
* @exception IOException on error.
*/
public PolicyMappingsExtension(Boolean critical, Object value)
throws IOException {
this.extensionId = PKIXExtensions.PolicyMappings_Id;
this.critical = critical.booleanValue();
this.extensionValue = (byte[]) value;
DerValue val = new DerValue(this.extensionValue);
if (val.tag != DerValue.tag_Sequence) {
throw new IOException("Invalid encoding for " +
"PolicyMappingsExtension.");
}
maps = new ArrayList<CertificatePolicyMap>();
while (val.data.available() != 0) {
DerValue seq = val.data.getDerValue();
CertificatePolicyMap map = new CertificatePolicyMap(seq);
maps.add(map);
}
}
/**
* Returns a printable representation of the policy map.
*/
public String toString() {
if (maps == null) return "";
String s = super.toString() + "PolicyMappings [\n"
+ maps.toString() + "]\n";
return (s);
}
/**
* Write the extension to the OutputStream.
*
* @param out the OutputStream to write the extension to.
* @exception IOException on encoding errors.
*/
public void encode(OutputStream out) throws IOException {
DerOutputStream tmp = new DerOutputStream();
if (extensionValue == null) {
extensionId = PKIXExtensions.PolicyMappings_Id;
critical = false;
encodeThis();
}
super.encode(tmp);
out.write(tmp.toByteArray());
}
/**
* Set the attribute value.
*/
public void set(String name, Object obj) throws IOException {
if (name.equalsIgnoreCase(MAP)) {
if (!(obj instanceof List)) {
throw new IOException("Attribute value should be of" +
" type List.");
}
maps = (List<CertificatePolicyMap>)obj;
} else {
throw new IOException("Attribute name not recognized by " +
"CertAttrSet:PolicyMappingsExtension.");
}
encodeThis();
}
/**
* Get the attribute value.
*/
public Object get(String name) throws IOException {
if (name.equalsIgnoreCase(MAP)) {
return (maps);
} else {
throw new IOException("Attribute name not recognized by " +
"CertAttrSet:PolicyMappingsExtension.");
}
}
/**
* Delete the attribute value.
*/
public void delete(String name) throws IOException {
if (name.equalsIgnoreCase(MAP)) {
maps = null;
} else {
throw new IOException("Attribute name not recognized by " +
"CertAttrSet:PolicyMappingsExtension.");
}
encodeThis();
}
/**
* Return an enumeration of names of attributes existing within this
* attribute.
*/
public Enumeration<String> getElements () {
AttributeNameEnumeration elements = new AttributeNameEnumeration();
elements.addElement(MAP);
return elements.elements();
}
/**
* Return the name of this attribute.
*/
public String getName () {
return (NAME);
}
}
|
|
/**
* Copyright 2015 Atos
* Contact: Seaclouds
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.seaclouds.platform.planner.aamwriter.modeldesigner;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.json.simple.JSONObject;
/**
* Classes in aamwriter.modeldesigner package model the topology described in UI.
*
* DNode class models a module in the topology, describing the name, tech properties,
* deployment properties and needed QoS.
*/
public class DNode {
public static final class Attributes {
public static final String NAME = "name";
public static final String TYPE = "type";
public static final String PROPERTIES = "properties";
}
public static final class Types {
public static final String WEB_APPLICATION = "WebApplication";
public static final String DATABASE = "Database";
}
public static final class Categories {
public static final String MYSQL = "database.mysql.MySqlNode";
public static final String MARIADB = "database.mariadb.MariaDbNode";
public static final String POSTGRESQL = "database.postgresql.PostgreSqlNode";
public static final String MONGODB = "nosql.mongodb.MongoDBServer";
public static final String REDIS = "nosql.redis.RedisStore";
}
public static final class Containers {
public static final String JBOSS6 = "webapp.jboss.JBoss6Server";
public static final String JBOSS7 = "webapp.jboss.JBoss7Server";
public static final String JETTY6 = "webapp.jetty.Jetty6Server";
public static final String TOMCAT = "webapp.tomcat.TomcatServer";
public static final String TOMCAT8 = "webapp.tomcat.Tomcat8Server";
}
public static final DNode NOT_FOUND = new DNode("[null]", "[null]");
@SuppressWarnings("unused")
private DGraph graph;
private String name;
private String type;
private Map<String, Object> properties;
private String language;
private String minVersion;
private String maxVersion;
private String category;
private String artifact;
private String infrastructure;
private String container;
private String numCpus;
private String diskSize;
private String benchmarkResponseTime;
private String benchmarkPlatform;
private List<Map<String, String>> qos;
public DNode(JSONObject jnode, DGraph graph) {
this.graph = graph;
this.name = (String) jnode.get(Attributes.NAME);
this.type = (String) jnode.get(Attributes.TYPE);
this.properties = new HashMap<String, Object>();
this.properties.putAll(readProperties(jnode));
}
private DNode(String name, String type) {
this.name = name;
this.type = type;
}
@SuppressWarnings("unchecked")
private Map<String, Object> readProperties(JSONObject jnode) {
Map<String, Object> map = (Map<String, Object>)jnode.get(Attributes.PROPERTIES);
language = extractStringFromMap("language", map);
minVersion = extractStringFromMap("min_version", map);
maxVersion = extractStringFromMap("max_version", map);
category = extractStringFromMap("category", map);
artifact = extractStringFromMap("artifact", map);
infrastructure = extractStringFromMap("infrastructure", map);
container = extractStringFromMap("container", map);
numCpus = extractStringFromMap("num_cpus", map);
diskSize = extractStringFromMap("disk_size", map);
benchmarkResponseTime = extractStringFromMap("benchmark_rt", map);
benchmarkPlatform = extractStringFromMap("benchmark_platform", map);
qos = (List) extractQosFromMap("qos", map);
return map;
}
private String extractStringFromMap(String key, Map<String, Object> map) {
String value = (String) map.remove(key);
return (value == null)? "" : value;
}
private List<Object> extractListFromMap(String key, Map<String, Object> map) {
List<Object> value = (List) map.remove(key);
return (value == null)? Collections.EMPTY_LIST : value;
}
private List<Map<String, String>> extractQosFromMap(String key, Map<String, Object> map) {
List<Map<String, Object>> qos = (List) map.remove(key);
if (qos != null) {
for (Map<String, Object> listitem : qos) {
for (String mapkey : listitem.keySet()) {
Object mapvalue = listitem.get(mapkey);
listitem.put(mapkey, mapvalue.toString());
}
}
}
return (qos == null)? Collections.EMPTY_LIST : qos;
}
@Override
public String toString() {
return String.format("Node [name=%s, type=%s, properties=%s]",
name, type, properties);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((name == null) ? 0 : name.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
DNode other = (DNode) obj;
if (name == null) {
if (other.name != null)
return false;
} else if (!name.equals(other.name))
return false;
return true;
}
public String getName() {
return name;
}
public String getType() {
return type;
}
public Map<String, Object> getOtherProperties() {
return Collections.unmodifiableMap(properties);
}
public String getLanguage() {
return language;
}
public String getMinVersion() {
return minVersion;
}
public String getMaxVersion() {
return maxVersion;
}
public String getCategory() {
return category;
}
public String getArtifact() {
return artifact;
}
public String getInfrastructure() {
return infrastructure;
}
public String getContainer() {
return container;
}
public String getNumCpus() {
return numCpus;
}
public String getDiskSize() {
return diskSize;
}
public String getBenchmarkResponseTime() {
return benchmarkResponseTime;
}
public String getBenchmarkPlatform() {
return benchmarkPlatform;
}
public List<Map<String, String>> getQos() {
return qos;
}
public boolean isFrontend() {
boolean result = false;
if (this.graph != null) {
result = (this.graph.getFrontendNode() == this);
}
return result;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.auth;
import java.util.concurrent.TimeUnit;
import java.util.function.BooleanSupplier;
import java.util.function.Function;
import java.util.function.IntConsumer;
import java.util.function.IntSupplier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.github.benmanes.caffeine.cache.Caffeine;
import com.github.benmanes.caffeine.cache.LoadingCache;
import org.apache.cassandra.concurrent.DebuggableThreadPoolExecutor;
import org.apache.cassandra.utils.MBeanWrapper;
import static com.google.common.base.Preconditions.checkNotNull;
public class AuthCache<K, V> implements AuthCacheMBean
{
private static final Logger logger = LoggerFactory.getLogger(AuthCache.class);
public static final String MBEAN_NAME_BASE = "org.apache.cassandra.auth:type=";
/**
* Underlying cache. LoadingCache will call underlying load function on {@link #get} if key is not present
*/
protected volatile LoadingCache<K, V> cache;
private DebuggableThreadPoolExecutor cacheRefreshExecutor;
private String name;
private IntConsumer setValidityDelegate;
private IntSupplier getValidityDelegate;
private IntConsumer setUpdateIntervalDelegate;
private IntSupplier getUpdateIntervalDelegate;
private IntConsumer setMaxEntriesDelegate;
private IntSupplier getMaxEntriesDelegate;
private Function<K, V> loadFunction;
private BooleanSupplier enableCache;
/**
* @param name Used for MBean
* @param setValidityDelegate Used to set cache validity period. See {@link Policy#expireAfterWrite()}
* @param getValidityDelegate Getter for validity period
* @param setUpdateIntervalDelegate Used to set cache update interval. See {@link Policy#refreshAfterWrite()}
* @param getUpdateIntervalDelegate Getter for update interval
* @param setMaxEntriesDelegate Used to set max # entries in cache. See {@link com.github.benmanes.caffeine.cache.Policy.Eviction#setMaximum(long)}
* @param getMaxEntriesDelegate Getter for max entries.
* @param loadFunction Function to load the cache. Called on {@link #get(Object)}
* @param cacheEnabledDelegate Used to determine if cache is enabled.
*/
protected AuthCache(String name,
IntConsumer setValidityDelegate,
IntSupplier getValidityDelegate,
IntConsumer setUpdateIntervalDelegate,
IntSupplier getUpdateIntervalDelegate,
IntConsumer setMaxEntriesDelegate,
IntSupplier getMaxEntriesDelegate,
Function<K, V> loadFunction,
BooleanSupplier cacheEnabledDelegate)
{
this.name = checkNotNull(name);
this.setValidityDelegate = checkNotNull(setValidityDelegate);
this.getValidityDelegate = checkNotNull(getValidityDelegate);
this.setUpdateIntervalDelegate = checkNotNull(setUpdateIntervalDelegate);
this.getUpdateIntervalDelegate = checkNotNull(getUpdateIntervalDelegate);
this.setMaxEntriesDelegate = checkNotNull(setMaxEntriesDelegate);
this.getMaxEntriesDelegate = checkNotNull(getMaxEntriesDelegate);
this.loadFunction = checkNotNull(loadFunction);
this.enableCache = checkNotNull(cacheEnabledDelegate);
init();
}
/**
* Do setup for the cache and MBean.
*/
protected void init()
{
this.cacheRefreshExecutor = new DebuggableThreadPoolExecutor(name + "Refresh", Thread.NORM_PRIORITY);
cache = initCache(null);
MBeanWrapper.instance.registerMBean(this, getObjectName());
}
protected void unregisterMBean()
{
MBeanWrapper.instance.unregisterMBean(getObjectName(), MBeanWrapper.OnException.LOG);
}
protected String getObjectName()
{
return MBEAN_NAME_BASE + name;
}
/**
* Retrieve a value from the cache. Will call {@link LoadingCache#get(Object)} which will
* "load" the value if it's not present, thus populating the key.
* @param k
* @return The current value of {@code K} if cached or loaded.
*
* See {@link LoadingCache#get(Object)} for possible exceptions.
*/
public V get(K k)
{
if (cache == null)
return loadFunction.apply(k);
return cache.get(k);
}
/**
* Invalidate the entire cache.
*/
public void invalidate()
{
cache = initCache(null);
}
/**
* Invalidate a key
* @param k key to invalidate
*/
public void invalidate(K k)
{
if (cache != null)
cache.invalidate(k);
}
/**
* Time in milliseconds that a value in the cache will expire after.
* @param validityPeriod in milliseconds
*/
public void setValidity(int validityPeriod)
{
if (Boolean.getBoolean("cassandra.disable_auth_caches_remote_configuration"))
throw new UnsupportedOperationException("Remote configuration of auth caches is disabled");
setValidityDelegate.accept(validityPeriod);
cache = initCache(cache);
}
public int getValidity()
{
return getValidityDelegate.getAsInt();
}
/**
* Time in milliseconds after which an entry in the cache should be refreshed (it's load function called again)
* @param updateInterval in milliseconds
*/
public void setUpdateInterval(int updateInterval)
{
if (Boolean.getBoolean("cassandra.disable_auth_caches_remote_configuration"))
throw new UnsupportedOperationException("Remote configuration of auth caches is disabled");
setUpdateIntervalDelegate.accept(updateInterval);
cache = initCache(cache);
}
public int getUpdateInterval()
{
return getUpdateIntervalDelegate.getAsInt();
}
/**
* Set maximum number of entries in the cache.
* @param maxEntries
*/
public void setMaxEntries(int maxEntries)
{
if (Boolean.getBoolean("cassandra.disable_auth_caches_remote_configuration"))
throw new UnsupportedOperationException("Remote configuration of auth caches is disabled");
setMaxEntriesDelegate.accept(maxEntries);
cache = initCache(cache);
}
public int getMaxEntries()
{
return getMaxEntriesDelegate.getAsInt();
}
/**
* (Re-)initialise the underlying cache. Will update validity, max entries, and update interval if
* any have changed. The underlying {@link LoadingCache} will be initiated based on the provided {@code loadFunction}.
* Note: If you need some unhandled cache setting to be set you should extend {@link AuthCache} and override this method.
* @param existing If not null will only update cache update validity, max entries, and update interval.
* @return New {@link LoadingCache} if existing was null, otherwise the existing {@code cache}
*/
protected LoadingCache<K, V> initCache(LoadingCache<K, V> existing)
{
if (!enableCache.getAsBoolean())
return null;
if (getValidity() <= 0)
return null;
logger.info("(Re)initializing {} (validity period/update interval/max entries) ({}/{}/{})",
name, getValidity(), getUpdateInterval(), getMaxEntries());
if (existing == null) {
return Caffeine.newBuilder()
.refreshAfterWrite(getUpdateInterval(), TimeUnit.MILLISECONDS)
.expireAfterWrite(getValidity(), TimeUnit.MILLISECONDS)
.maximumSize(getMaxEntries())
.executor(cacheRefreshExecutor)
.build(loadFunction::apply);
}
// Always set as mandatory
cache.policy().refreshAfterWrite().ifPresent(policy ->
policy.setExpiresAfter(getUpdateInterval(), TimeUnit.MILLISECONDS));
cache.policy().expireAfterWrite().ifPresent(policy ->
policy.setExpiresAfter(getValidity(), TimeUnit.MILLISECONDS));
cache.policy().eviction().ifPresent(policy ->
policy.setMaximum(getMaxEntries()));
return cache;
}
}
|
|
// Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.exec;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.devtools.build.lib.actions.ActionContext;
import com.google.devtools.build.lib.actions.ActionInputPrefetcher;
import com.google.devtools.build.lib.actions.Executor;
import com.google.devtools.build.lib.actions.ExecutorInitException;
import com.google.devtools.build.lib.actions.Spawn;
import com.google.devtools.build.lib.actions.SpawnStrategy;
import com.google.devtools.build.lib.util.RegexFilter;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
/**
* Builder class to create an {@link Executor} instance. This class is part of the module API,
* which allows modules to affect how the executor is initialized.
*/
public class ExecutorBuilder {
private final SpawnActionContextMaps.Builder spawnActionContextMapsBuilder =
new SpawnActionContextMaps.Builder();
private final Set<ExecutorLifecycleListener> executorLifecycleListeners = new LinkedHashSet<>();
private ActionInputPrefetcher prefetcher;
public SpawnActionContextMaps getSpawnActionContextMaps() throws ExecutorInitException {
return spawnActionContextMapsBuilder.build();
}
/** Returns all executor lifecycle listeners registered with this builder so far. */
public ImmutableSet<ExecutorLifecycleListener> getExecutorLifecycleListeners() {
return ImmutableSet.copyOf(executorLifecycleListeners);
}
public ActionInputPrefetcher getActionInputPrefetcher() {
return prefetcher == null ? ActionInputPrefetcher.NONE : prefetcher;
}
/**
* Adds the specified action context to the executor, by wrapping it in a simple action context
* provider implementation.
*
* <p>If two action contexts are registered that share an identifying type and commandline
* identifier the last registered will take precedence.
*/
public <T extends ActionContext> ExecutorBuilder addActionContext(
Class<T> identifyingType, T context, String... commandlineIdentifiers) {
spawnActionContextMapsBuilder.addContext(identifyingType, context, commandlineIdentifiers);
return this;
}
/**
* Sets the strategy names for a given action mnemonic.
*
* <p>During execution, the {@link ProxySpawnActionContext} will ask each strategy whether it can
* execute a given Spawn. The first strategy in the list that says so will get the job.
*/
public ExecutorBuilder addStrategyByMnemonic(String mnemonic, List<String> strategies) {
spawnActionContextMapsBuilder.strategyByMnemonicMap().replaceValues(mnemonic, strategies);
return this;
}
/**
* Sets the strategy names to use in the remote branch of dynamic execution for a given action
* mnemonic.
*
* <p>During execution, each strategy is {@linkplain SpawnStrategy#canExec(Spawn,
* ActionContext.ActionContextRegistry) asked} whether it can execute a given Spawn. The first
* strategy in the list that says so will get the job.
*/
public ExecutorBuilder addDynamicRemoteStrategiesByMnemonic(
String mnemonic, List<String> strategies) {
spawnActionContextMapsBuilder
.remoteDynamicStrategyByMnemonicMap()
.replaceValues(mnemonic, strategies);
return this;
}
/**
* Sets the strategy names to use in the local branch of dynamic execution for a given action
* mnemonic.
*
* <p>During execution, each strategy is {@linkplain SpawnStrategy#canExec(Spawn,
* ActionContext.ActionContextRegistry) asked} whether it can execute a given Spawn. The first
* strategy in the list that says so will get the job.
*/
public ExecutorBuilder addDynamicLocalStrategiesByMnemonic(
String mnemonic, List<String> strategies) {
spawnActionContextMapsBuilder
.localDynamicStrategyByMnemonicMap()
.replaceValues(mnemonic, strategies);
return this;
}
/** Sets the strategy name to use if remote execution is not possible. */
public ExecutorBuilder setRemoteFallbackStrategy(String remoteLocalFallbackStrategy) {
spawnActionContextMapsBuilder.setRemoteFallbackStrategy(remoteLocalFallbackStrategy);
return this;
}
/**
* Adds an implementation with a specific strategy name.
*
* <p>Modules are free to provide different implementations of {@code ActionContext}. This can be
* used, for example, to implement sandboxed or distributed execution of {@code SpawnAction}s in
* different ways, while giving the user control over how exactly they are executed.
*
* <p>Example: a module requires {@code MyCustomActionContext} to be available, but doesn't
* associate it with any strategy. Call <code>
* addStrategyByContext(MyCustomActionContext.class, "")</code>.
*
* <p>Example: a module requires {@code MyLocalCustomActionContext} to be available, and wants it
* to always use the "local" strategy. Call <code>
* addStrategyByContext(MyCustomActionContext.class, "local")</code>.
*/
public ExecutorBuilder addStrategyByContext(
Class<? extends ActionContext> actionContext, String strategy) {
spawnActionContextMapsBuilder.strategyByContextMap().put(actionContext, strategy);
return this;
}
/**
* Similar to {@link #addStrategyByMnemonic}, but allows specifying a regex for the set of
* matching mnemonics, instead of an exact string.
*/
public ExecutorBuilder addStrategyByRegexp(RegexFilter regexFilter, List<String> strategy) {
spawnActionContextMapsBuilder.addStrategyByRegexp(regexFilter, strategy);
return this;
}
/**
* Sets the action input prefetcher. Only one module may set the prefetcher. If multiple modules
* set it, this method will throw an {@link IllegalStateException}.
*/
public ExecutorBuilder setActionInputPrefetcher(ActionInputPrefetcher prefetcher) {
Preconditions.checkState(this.prefetcher == null);
this.prefetcher = Preconditions.checkNotNull(prefetcher);
return this;
}
/**
* Registers an executor lifecycle listener which will receive notifications throughout the
* execution phase (if one occurs).
*
* @see ExecutorLifecycleListener for events that can be listened to
*/
public ExecutorBuilder addExecutorLifecycleListener(ExecutorLifecycleListener listener) {
executorLifecycleListeners.add(listener);
return this;
}
// TODO(katre): Use a fake implementation to allow for migration to the new API.
public ModuleActionContextRegistry.Builder asModuleActionContextRegistryBuilder() {
return new ModuleActionContextDelegate(this);
}
private static final class ModuleActionContextDelegate
implements ModuleActionContextRegistry.Builder {
private final ExecutorBuilder executorBuilder;
private ModuleActionContextDelegate(ExecutorBuilder executorBuilder) {
this.executorBuilder = executorBuilder;
}
@Override
public ModuleActionContextRegistry.Builder restrictTo(
Class<?> identifyingType, String restriction) {
Preconditions.checkArgument(ActionContext.class.isAssignableFrom(identifyingType));
@SuppressWarnings("unchecked")
Class<? extends ActionContext> castType = (Class<? extends ActionContext>) identifyingType;
this.executorBuilder.addStrategyByContext(castType, restriction);
return this;
}
@Override
public <T extends ActionContext> ModuleActionContextRegistry.Builder register(
Class<T> identifyingType, T context, String... commandLineIdentifiers) {
this.executorBuilder.addActionContext(identifyingType, context, commandLineIdentifiers);
return this;
}
@Override
public ModuleActionContextRegistry build() throws ExecutorInitException {
throw new UnsupportedOperationException("not a real builder");
}
}
// TODO(katre): Use a fake implementation to allow for migration to the new API.
public SpawnStrategyRegistry.Builder asSpawnStrategyRegistryBuilder() {
return new SpawnStrategyRegistryDelegate(this);
}
private static final class SpawnStrategyRegistryDelegate
implements SpawnStrategyRegistry.Builder {
private final ExecutorBuilder executorBuilder;
private SpawnStrategyRegistryDelegate(ExecutorBuilder executorBuilder) {
this.executorBuilder = executorBuilder;
}
@Override
public SpawnStrategyRegistry.Builder addDescriptionFilter(
RegexFilter filter, List<String> identifiers) {
this.executorBuilder.addStrategyByRegexp(filter, identifiers);
return this;
}
@Override
public SpawnStrategyRegistry.Builder addMnemonicFilter(
String mnemonic, List<String> identifiers) {
this.executorBuilder.addStrategyByMnemonic(mnemonic, identifiers);
return this;
}
@Override
public SpawnStrategyRegistry.Builder registerStrategy(
SpawnStrategy strategy, List<String> commandlineIdentifiers) {
this.executorBuilder.addActionContext(
SpawnStrategy.class, strategy, commandlineIdentifiers.toArray(new String[0]));
return this;
}
@Override
public SpawnStrategyRegistry.Builder useLegacyDescriptionFilterPrecedence() {
// Ignored.
return this;
}
@Override
public SpawnStrategyRegistry.Builder setDefaultStrategies(List<String> defaultStrategies) {
this.executorBuilder.addStrategyByMnemonic("", defaultStrategies);
return this;
}
@Override
public SpawnStrategyRegistry.Builder resetDefaultStrategies() {
this.executorBuilder.addStrategyByMnemonic("", ImmutableList.of(""));
return this;
}
@Override
public SpawnStrategyRegistry.Builder addDynamicRemoteStrategiesByMnemonic(
String mnemonic, List<String> strategies) {
this.executorBuilder.addDynamicRemoteStrategiesByMnemonic(mnemonic, strategies);
return this;
}
@Override
public SpawnStrategyRegistry.Builder addDynamicLocalStrategiesByMnemonic(
String mnemonic, List<String> strategies) {
this.executorBuilder.addDynamicLocalStrategiesByMnemonic(mnemonic, strategies);
return this;
}
@Override
public SpawnStrategyRegistry.Builder setRemoteLocalFallbackStrategyIdentifier(
String commandlineIdentifier) {
this.executorBuilder.setRemoteFallbackStrategy(commandlineIdentifier);
return this;
}
@Override
public SpawnStrategyRegistry build() throws ExecutorInitException {
throw new UnsupportedOperationException("not a real builder");
}
}
}
|
|
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package org.uma.jmetal.algorithm.multiobjective.smpso;
import org.uma.jmetal.operator.MutationOperator;
import org.uma.jmetal.operator.impl.mutation.PolynomialMutation;
import org.uma.jmetal.problem.DoubleProblem;
import org.uma.jmetal.solution.DoubleSolution;
import org.uma.jmetal.util.AlgorithmBuilder;
import org.uma.jmetal.util.archive.BoundedArchive;
import org.uma.jmetal.util.evaluator.SolutionListEvaluator;
import org.uma.jmetal.util.evaluator.impl.SequentialSolutionListEvaluator;
import org.uma.jmetal.util.pseudorandom.JMetalRandom;
import org.uma.jmetal.util.pseudorandom.PseudoRandomGenerator;
/**
* @author Antonio J. Nebro <[email protected]>
*/
public class SMPSOBuilder implements AlgorithmBuilder<SMPSO> {
public enum SMPSOVariant {SMPSO, Measures}
private DoubleProblem problem;
private double c1Max;
private double c1Min;
private double c2Max;
private double c2Min;
private double r1Max;
private double r1Min;
private double r2Max;
private double r2Min;
private double weightMax;
private double weightMin;
private double changeVelocity1;
private double changeVelocity2;
private int swarmSize;
private int maxIterations;
protected int archiveSize;
protected MutationOperator<DoubleSolution> mutationOperator;
private BoundedArchive<DoubleSolution> leaders;
private SolutionListEvaluator<DoubleSolution> evaluator;
private SMPSOVariant variant ;
public SMPSOBuilder(DoubleProblem problem, BoundedArchive<DoubleSolution> leaders) {
this.problem = problem;
this.leaders = leaders;
swarmSize = 100;
maxIterations = 250;
r1Max = 1.0;
r1Min = 0.0;
r2Max = 1.0;
r2Min = 0.0;
c1Max = 2.5;
c1Min = 1.5;
c2Max = 2.5;
c2Min = 1.5;
weightMax = 0.1;
weightMin = 0.1;
changeVelocity1 = -1;
changeVelocity2 = -1;
mutationOperator = new PolynomialMutation(1.0/problem.getNumberOfVariables(), 20.0) ;
evaluator = new SequentialSolutionListEvaluator<DoubleSolution>() ;
this.variant = SMPSOVariant.SMPSO ;
}
/* Getters */
public int getSwarmSize() {
return swarmSize;
}
public int getMaxIterations() {
return maxIterations;
}
public double getR1Max() {
return r1Max;
}
public double getR1Min() {
return r1Min;
}
public double getR2Max() {
return r2Max;
}
public double getR2Min() {
return r2Min;
}
public double getC1Max() {
return c1Max;
}
public double getC1Min() {
return c1Min;
}
public double getC2Max() {
return c2Max;
}
public double getC2Min() {
return c2Min;
}
public MutationOperator<DoubleSolution> getMutation() {
return mutationOperator;
}
public double getWeightMax() {
return weightMax;
}
public double getWeightMin() {
return weightMin;
}
public double getChangeVelocity1() {
return changeVelocity1;
}
public double getChangeVelocity2() {
return changeVelocity2;
}
/* Setters */
public SMPSOBuilder setSwarmSize(int swarmSize) {
this.swarmSize = swarmSize;
return this;
}
public SMPSOBuilder setMaxIterations(int maxIterations) {
this.maxIterations = maxIterations;
return this;
}
public SMPSOBuilder setMutation(MutationOperator<DoubleSolution> mutation) {
mutationOperator = mutation;
return this;
}
public SMPSOBuilder setC1Max(double c1Max) {
this.c1Max = c1Max;
return this;
}
public SMPSOBuilder setC1Min(double c1Min) {
this.c1Min = c1Min;
return this;
}
public SMPSOBuilder setC2Max(double c2Max) {
this.c2Max = c2Max;
return this;
}
public SMPSOBuilder setC2Min(double c2Min) {
this.c2Min = c2Min;
return this;
}
public SMPSOBuilder setR1Max(double r1Max) {
this.r1Max = r1Max;
return this;
}
public SMPSOBuilder setR1Min(double r1Min) {
this.r1Min = r1Min;
return this;
}
public SMPSOBuilder setR2Max(double r2Max) {
this.r2Max = r2Max;
return this;
}
public SMPSOBuilder setR2Min(double r2Min) {
this.r2Min = r2Min;
return this;
}
public SMPSOBuilder setWeightMax(double weightMax) {
this.weightMax = weightMax;
return this;
}
public SMPSOBuilder setWeightMin(double weightMin) {
this.weightMin = weightMin;
return this;
}
public SMPSOBuilder setChangeVelocity1(double changeVelocity1) {
this.changeVelocity1 = changeVelocity1;
return this;
}
public SMPSOBuilder setChangeVelocity2(double changeVelocity2) {
this.changeVelocity2 = changeVelocity2;
return this;
}
public SMPSOBuilder setRandomGenerator(PseudoRandomGenerator randomGenerator) {
JMetalRandom.getInstance().setRandomGenerator(randomGenerator);
return this;
}
public SMPSOBuilder setSolutionListEvaluator(SolutionListEvaluator<DoubleSolution> evaluator) {
this.evaluator = evaluator ;
return this ;
}
public SMPSOBuilder setVariant(SMPSOVariant variant) {
this.variant = variant;
return this;
}
public SMPSO build() {
if (variant.equals(SMPSOVariant.SMPSO)) {
return new SMPSO(problem, swarmSize, leaders, mutationOperator, maxIterations, r1Min, r1Max,
r2Min, r2Max, c1Min, c1Max, c2Min, c2Max, weightMin, weightMax, changeVelocity1,
changeVelocity2, evaluator);
} else {
return new SMPSOMeasures(problem, swarmSize, leaders, mutationOperator, maxIterations, r1Min, r1Max,
r2Min, r2Max, c1Min, c1Max, c2Min, c2Max, weightMin, weightMax, changeVelocity1,
changeVelocity2, evaluator);
}
}
/*
* Getters
*/
public DoubleProblem getProblem() {
return problem;
}
public int getArchiveSize() {
return archiveSize;
}
public MutationOperator<DoubleSolution> getMutationOperator() {
return mutationOperator;
}
public BoundedArchive<DoubleSolution> getLeaders() {
return leaders;
}
public SolutionListEvaluator<DoubleSolution> getEvaluator() {
return evaluator;
}
}
|
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.prometheus.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* Represents the input of a ListWorkspaces operation.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/amp-2020-08-01/ListWorkspaces" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListWorkspacesRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* Optional filter for workspace alias. Only the workspaces with aliases that begin with this value will be
* returned.
* </p>
*/
private String alias;
/**
* <p>
* Maximum results to return in response (default=100, maximum=1000).
* </p>
*/
private Integer maxResults;
/**
* <p>
* Pagination token to request the next page in a paginated list. This token is obtained from the output of the
* previous ListWorkspaces request.
* </p>
*/
private String nextToken;
/**
* <p>
* Optional filter for workspace alias. Only the workspaces with aliases that begin with this value will be
* returned.
* </p>
*
* @param alias
* Optional filter for workspace alias. Only the workspaces with aliases that begin with this value will be
* returned.
*/
public void setAlias(String alias) {
this.alias = alias;
}
/**
* <p>
* Optional filter for workspace alias. Only the workspaces with aliases that begin with this value will be
* returned.
* </p>
*
* @return Optional filter for workspace alias. Only the workspaces with aliases that begin with this value will be
* returned.
*/
public String getAlias() {
return this.alias;
}
/**
* <p>
* Optional filter for workspace alias. Only the workspaces with aliases that begin with this value will be
* returned.
* </p>
*
* @param alias
* Optional filter for workspace alias. Only the workspaces with aliases that begin with this value will be
* returned.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListWorkspacesRequest withAlias(String alias) {
setAlias(alias);
return this;
}
/**
* <p>
* Maximum results to return in response (default=100, maximum=1000).
* </p>
*
* @param maxResults
* Maximum results to return in response (default=100, maximum=1000).
*/
public void setMaxResults(Integer maxResults) {
this.maxResults = maxResults;
}
/**
* <p>
* Maximum results to return in response (default=100, maximum=1000).
* </p>
*
* @return Maximum results to return in response (default=100, maximum=1000).
*/
public Integer getMaxResults() {
return this.maxResults;
}
/**
* <p>
* Maximum results to return in response (default=100, maximum=1000).
* </p>
*
* @param maxResults
* Maximum results to return in response (default=100, maximum=1000).
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListWorkspacesRequest withMaxResults(Integer maxResults) {
setMaxResults(maxResults);
return this;
}
/**
* <p>
* Pagination token to request the next page in a paginated list. This token is obtained from the output of the
* previous ListWorkspaces request.
* </p>
*
* @param nextToken
* Pagination token to request the next page in a paginated list. This token is obtained from the output of
* the previous ListWorkspaces request.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* Pagination token to request the next page in a paginated list. This token is obtained from the output of the
* previous ListWorkspaces request.
* </p>
*
* @return Pagination token to request the next page in a paginated list. This token is obtained from the output of
* the previous ListWorkspaces request.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* Pagination token to request the next page in a paginated list. This token is obtained from the output of the
* previous ListWorkspaces request.
* </p>
*
* @param nextToken
* Pagination token to request the next page in a paginated list. This token is obtained from the output of
* the previous ListWorkspaces request.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListWorkspacesRequest withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getAlias() != null)
sb.append("Alias: ").append(getAlias()).append(",");
if (getMaxResults() != null)
sb.append("MaxResults: ").append(getMaxResults()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListWorkspacesRequest == false)
return false;
ListWorkspacesRequest other = (ListWorkspacesRequest) obj;
if (other.getAlias() == null ^ this.getAlias() == null)
return false;
if (other.getAlias() != null && other.getAlias().equals(this.getAlias()) == false)
return false;
if (other.getMaxResults() == null ^ this.getMaxResults() == null)
return false;
if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getAlias() == null) ? 0 : getAlias().hashCode());
hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public ListWorkspacesRequest clone() {
return (ListWorkspacesRequest) super.clone();
}
}
|
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.sql.util;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.xpack.ql.expression.Expression;
import org.elasticsearch.xpack.ql.expression.Foldables;
import org.elasticsearch.xpack.ql.type.DataTypes;
import org.elasticsearch.xpack.sql.parser.ParsingException;
import org.elasticsearch.xpack.sql.proto.StringUtils;
import org.elasticsearch.xpack.sql.type.SqlDataTypeConverter;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.OffsetTime;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeFormatterBuilder;
import java.time.temporal.TemporalAccessor;
import static java.time.format.DateTimeFormatter.ISO_LOCAL_DATE;
import static java.time.format.DateTimeFormatter.ISO_LOCAL_TIME;
public final class DateUtils {
public static final ZoneId UTC = ZoneId.of("Z");
// In Java 8 LocalDate.EPOCH is not available, introduced with later Java versions
public static final LocalDate EPOCH = LocalDate.of(1970, 1, 1);
public static final long DAY_IN_MILLIS = 60 * 60 * 24 * 1000L;
private static final DateTimeFormatter ISO_LOCAL_TIME_OPTIONAL_TZ = new DateTimeFormatterBuilder()
.append(ISO_LOCAL_TIME)
.optionalStart()
.appendZoneOrOffsetId()
.toFormatter().withZone(UTC);
private static final DateTimeFormatter ISO_LOCAL_DATE_OPTIONAL_TIME_FORMATTER_WHITESPACE = new DateTimeFormatterBuilder()
.append(ISO_LOCAL_DATE)
.optionalStart()
.appendLiteral(' ')
.append(ISO_LOCAL_TIME_OPTIONAL_TZ)
.optionalEnd()
.toFormatter().withZone(UTC);
private static final DateTimeFormatter ISO_LOCAL_DATE_OPTIONAL_TIME_FORMATTER_T_LITERAL = new DateTimeFormatterBuilder()
.append(ISO_LOCAL_DATE)
.optionalStart()
.appendLiteral('T')
.append(ISO_LOCAL_TIME_OPTIONAL_TZ)
.optionalEnd()
.toFormatter().withZone(UTC);
private static final DateFormatter UTC_DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_optional_time").withZone(UTC);
private static final int DEFAULT_PRECISION_FOR_CURRENT_FUNCTIONS = 3;
private DateUtils() {}
/**
* Creates an date for SQL DATE type from the millis since epoch.
*/
public static ZonedDateTime asDateOnly(long millis) {
return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), UTC).toLocalDate().atStartOfDay(UTC);
}
/**
* Creates an date for SQL TIME type from the millis since epoch.
*/
public static OffsetTime asTimeOnly(long millis) {
return OffsetTime.ofInstant(Instant.ofEpochMilli(millis % DAY_IN_MILLIS), UTC);
}
/**
* Creates an date for SQL TIME type from the millis since epoch.
*/
public static OffsetTime asTimeOnly(long millis, ZoneId zoneId) {
return OffsetTime.ofInstant(Instant.ofEpochMilli(millis % DAY_IN_MILLIS), zoneId);
}
public static OffsetTime asTimeAtZone(OffsetTime time, ZoneId zonedId) {
return time.atDate(DateUtils.EPOCH).atZoneSameInstant(zonedId).toOffsetDateTime().toOffsetTime();
}
/**
* Creates a datetime from the millis since epoch (thus the time-zone is UTC).
*/
public static ZonedDateTime asDateTimeWithMillis(long millis) {
return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), UTC);
}
/**
* Creates a datetime from the millis since epoch then translates the date into the given timezone.
*/
public static ZonedDateTime asDateTimeWithMillis(long millis, ZoneId id) {
return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), id);
}
/**
* Parses the given string into a Date (SQL DATE type) using UTC as a default timezone.
*/
public static ZonedDateTime asDateOnly(String dateFormat) {
int separatorIdx = timeSeparatorIdx(dateFormat);
// Avoid index out of bounds - it will lead to DateTimeParseException anyways
if (separatorIdx >= dateFormat.length() || dateFormat.charAt(separatorIdx) == 'T') {
return LocalDate.parse(dateFormat, ISO_LOCAL_DATE_OPTIONAL_TIME_FORMATTER_T_LITERAL).atStartOfDay(UTC);
} else {
return LocalDate.parse(dateFormat, ISO_LOCAL_DATE_OPTIONAL_TIME_FORMATTER_WHITESPACE).atStartOfDay(UTC);
}
}
public static ZonedDateTime asDateOnly(ZonedDateTime zdt) {
return zdt.toLocalDate().atStartOfDay(zdt.getZone());
}
public static OffsetTime asTimeOnly(String timeFormat) {
return DateFormatters.from(ISO_LOCAL_TIME_OPTIONAL_TZ.parse(timeFormat)).toOffsetDateTime().toOffsetTime();
}
/**
* Parses the given string into a DateTime using UTC as a default timezone.
*/
public static ZonedDateTime asDateTimeWithNanos(String dateFormat) {
return DateFormatters.from(UTC_DATE_TIME_FORMATTER.parse(dateFormat)).withZoneSameInstant(UTC);
}
public static ZonedDateTime dateTimeOfEscapedLiteral(String dateFormat) {
int separatorIdx = timeSeparatorIdx(dateFormat);
// Avoid index out of bounds - it will lead to DateTimeParseException anyways
if (separatorIdx >= dateFormat.length() || dateFormat.charAt(separatorIdx) == 'T') {
return ZonedDateTime.parse(dateFormat, ISO_LOCAL_DATE_OPTIONAL_TIME_FORMATTER_T_LITERAL);
} else {
return ZonedDateTime.parse(dateFormat, ISO_LOCAL_DATE_OPTIONAL_TIME_FORMATTER_WHITESPACE);
}
}
public static String toString(ZonedDateTime dateTime) {
return StringUtils.toString(dateTime);
}
public static String toDateString(ZonedDateTime date) {
return date.format(ISO_LOCAL_DATE);
}
public static String toTimeString(OffsetTime time) {
return StringUtils.toString(time);
}
public static long minDayInterval(long l) {
if (l < DAY_IN_MILLIS ) {
return DAY_IN_MILLIS;
}
return l - (l % DAY_IN_MILLIS);
}
public static int getNanoPrecision(Expression precisionExpression, int nano) {
int precision = DEFAULT_PRECISION_FOR_CURRENT_FUNCTIONS;
if (precisionExpression != null) {
try {
precision = (Integer) SqlDataTypeConverter.convert(Foldables.valueOf(precisionExpression), DataTypes.INTEGER);
} catch (Exception e) {
throw new ParsingException(precisionExpression.source(), "invalid precision; " + e.getMessage());
}
}
if (precision < 0 || precision > 9) {
throw new ParsingException(precisionExpression.source(), "precision needs to be between [0-9], received [{}]",
precisionExpression.sourceText());
}
// remove the remainder
nano = nano - nano % (int) Math.pow(10, (9 - precision));
return nano;
}
public static ZonedDateTime atTimeZone(LocalDate ld, ZoneId zoneId) {
return ld.atStartOfDay(zoneId);
}
public static ZonedDateTime atTimeZone(LocalDateTime ldt, ZoneId zoneId) {
return ZonedDateTime.ofInstant(ldt, zoneId.getRules().getValidOffsets(ldt).get(0), zoneId);
}
public static OffsetTime atTimeZone(OffsetTime ot, ZoneId zoneId) {
LocalDateTime ldt = ot.atDate(LocalDate.EPOCH).toLocalDateTime();
return ot.withOffsetSameInstant(zoneId.getRules().getValidOffsets(ldt).get(0));
}
public static OffsetTime atTimeZone(LocalTime lt, ZoneId zoneId) {
LocalDateTime ldt = lt.atDate(LocalDate.EPOCH);
return OffsetTime.of(lt, zoneId.getRules().getValidOffsets(ldt).get(0));
}
public static ZonedDateTime atTimeZone(ZonedDateTime zdt, ZoneId zoneId) {
return zdt.withZoneSameInstant(zoneId);
}
public static TemporalAccessor atTimeZone(TemporalAccessor ta, ZoneId zoneId) {
if (ta instanceof LocalDateTime) {
return atTimeZone((LocalDateTime) ta, zoneId);
} else if (ta instanceof ZonedDateTime){
return atTimeZone((ZonedDateTime)ta, zoneId);
} else if (ta instanceof OffsetTime) {
return atTimeZone((OffsetTime) ta, zoneId);
} else if (ta instanceof LocalTime) {
return atTimeZone((LocalTime) ta, zoneId);
} else if (ta instanceof LocalDate) {
return atTimeZone((LocalDate) ta, zoneId);
} else {
return ta;
}
}
private static int timeSeparatorIdx(String timestampStr) {
int separatorIdx = timestampStr.indexOf('-'); // Find the first `-` date separator
if (separatorIdx == 0) { // first char = `-` denotes a negative year
separatorIdx = timestampStr.indexOf('-', 1); // Find the first `-` date separator past the negative year
}
// Find the second `-` date separator and move 3 places past the dayOfYear to find the time separator
// e.g. 2020-06-01T10:20:30....
// ^
// +3 = ^
return timestampStr.indexOf('-', separatorIdx + 1) + 3;
}
}
|
|
// ============================================================================
// Copyright 2006-2012 Daniel W. Dyer
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ============================================================================
package squidpony.squidmath;
import java.io.Serializable;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
/**
* Permutation generator for generating all permutations for all sets up to
* 20 elements in size. While 20 may seem a low limit, bear in mind that
* the number of permutations of a set of size n is n! For a set of 21
* items, the number of permutations is bigger than can be stored in Java's
* 64-bit long integer data type. Therefore it seems unlikely that you
* could ever generate, let alone process, all of the permutations in a
* reasonable time frame. For this reason the implementation is optimised for
* sets of size 20 or less (this affords better performance by allowing primitive
* numeric types to be used for calculations rather than
* {@link java.math.BigInteger}).
* <br>
* Originally part of the <a href="http://maths.uncommons.org/">Uncommon Maths software package</a>.
* @param <T> The type of element that the permutation will consist of.
* @author Daniel Dyer (modified from the original version written by Michael
* Gilleland of Merriam Park Software -
* <a href="http://www.merriampark.com/perm.htm">http://www.merriampark.com/perm.htm</a>).
* @see CombinationGenerator
*/
public class PermutationGenerator<T> implements Iterable<List<T>>, Serializable
{
private static final long serialVersionUID = 514276118639629743L;
private final T[] elements;
private final int[] permutationIndices;
private long remainingPermutations;
private long totalPermutations;
/**
* Permutation generator that generates all possible orderings of
* the elements in the specified set.
* @param elements The elements to permute; will be modified, so this should be copied beforehand
*/
public PermutationGenerator(T[] elements)
{
if (elements.length > 20)
{
throw new IllegalArgumentException("Size must be less than or equal to 20.");
}
this.elements = elements;
permutationIndices = new int[elements.length];
totalPermutations = MathExtras.factorial(elements.length);
reset();
}
/**
* Permutation generator that generates all possible orderings of
* the elements in the specified set.
* @param elements The elements to permute.
* @param filler An array of T with the same length as elements; needed because GWT can't create a generic array.
*/
@SuppressWarnings("unchecked")
public PermutationGenerator(Collection<T> elements, T[] filler)
{
this(elements.toArray(filler));
}
/**
* Resets the generator state.
*/
public final void reset()
{
for (int i = 0; i < permutationIndices.length; i++)
{
permutationIndices[i] = i;
}
remainingPermutations = totalPermutations;
}
/**
* Returns the number of permutations not yet generated.
* @return The number of unique permutations still to be generated.
*/
public long getRemainingPermutations()
{
return remainingPermutations;
}
/**
* Returns the total number of unique permutations that can be
* generated for the given set of elements.
* @return The total number of permutations.
*/
public long getTotalPermutations()
{
return totalPermutations;
}
/**
* Returns the total number of unique permutations that can be
* generated for the given count of permute-able elements.
* Typically used with the static methods of this class that
* find permutation indices.
* @param count the number of elements (typically indices) you want to find a permutation of
* @return The total number of permutations.
*/
public static long getTotalPermutations(int count)
{
return MathExtras.factorial(count);
}
/**
* Returns the total number of unique permutations that can be
* generated for the given count of permute-able elements.
* Typically used with the static methods of this class that
* find permutation indices and involve BigInteger values.
* @param count the number of elements (typically indices) you want to find a permutation of
* @return The total number of permutations.
*/
public static BigInteger getBigTotalPermutations(int count)
{
return MathExtras.bigFactorial(count);
}
/**
* Are there more permutations that have not yet been returned?
* @return true if there are more permutations, false otherwise.
*/
public boolean hasMore()
{
return remainingPermutations > 0;
}
/**
* Generate the next permutation and return an array containing
* the elements in the appropriate order. This overloaded method
* allows the caller to provide an array that will be used and returned.
* The purpose of this is to improve performance when iterating over
* permutations. This method allows a single array instance to be reused.
* @param destination Provides an array to use to create the
* permutation. The specified array must be the same length as a
* permutation. This is the array that will be returned, once
* it has been filled with the elements in the appropriate order.
* @return The next permutation as an array.
*/
public T[] nextPermutationAsArray(T[] destination)
{
if (destination.length != elements.length)
{
throw new IllegalArgumentException("Destination array must be the same length as permutations.");
}
generateNextPermutationIndices();
// Generate actual permutation.
for (int i = 0; i < permutationIndices.length; i++)
{
destination[i] = elements[permutationIndices[i]];
}
return destination;
}
/**
* Generate the next permutation and return a list containing
* the elements in the appropriate order.
* @see #nextPermutationAsList(List)
* @return The next permutation as a list.
*/
public List<T> nextPermutationAsList()
{
List<T> permutation = new ArrayList<T>(elements.length);
return nextPermutationAsList(permutation);
}
/**
* Generate the next permutation and return a list containing
* the elements in the appropriate order. This overloaded method
* allows the caller to provide a list that will be used and returned.
* The purpose of this is to improve performance when iterating over
* permutations. If the {@link #nextPermutationAsList()} method is
* used it will create a new list every time. When iterating over
* permutations this will result in lots of short-lived objects that
* have to be garbage collected. This method allows a single list
* instance to be reused in such circumstances.
* @param destination Provides a list to use to create the
* permutation. This is the list that will be returned, once
* it has been filled with the elements in the appropriate order.
* @return The next permutation as a list.
*/
public List<T> nextPermutationAsList(List<T> destination)
{
generateNextPermutationIndices();
// Generate actual permutation.
destination.clear();
for (int i : permutationIndices)
{
destination.add(elements[i]);
}
return destination;
}
/**
* Generate the indices into the elements array for the next permutation. The
* algorithm is from Kenneth H. Rosen, Discrete Mathematics and its Applications,
* 2nd edition (NY: McGraw-Hill, 1991), p. 284)
*/
private void generateNextPermutationIndices()
{
if (remainingPermutations == 0)
{
throw new IllegalStateException("There are no permutations remaining. " +
"Generator must be reset to continue using.");
}
else if (remainingPermutations < totalPermutations)
{
// Find largest index j with permutationIndices[j] < permutationIndices[j + 1]
int j = permutationIndices.length - 2;
while (permutationIndices[j] > permutationIndices[j + 1])
{
j--;
}
// Find index k such that permutationIndices[k] is smallest integer greater than
// permutationIndices[j] to the right of permutationIndices[j].
int k = permutationIndices.length - 1;
while (permutationIndices[j] > permutationIndices[k])
{
k--;
}
// Interchange permutation indices.
int temp = permutationIndices[k];
permutationIndices[k] = permutationIndices[j];
permutationIndices[j] = temp;
// Put tail end of permutation after jth position in increasing order.
int r = permutationIndices.length - 1;
int s = j + 1;
while (r > s)
{
temp = permutationIndices[s];
permutationIndices[s] = permutationIndices[r];
permutationIndices[r] = temp;
r--;
s++;
}
}
--remainingPermutations;
}
private int[] getPermutationShift(T[] perm) {
int[] sh = new int[perm.length];
boolean[] taken = new boolean[perm.length];
for (int i = 0; i < perm.length - 1; i++) {
int ctr = -1;
for (int j = 0; j < perm.length; j++) {
if (!taken[j])
ctr++;
if (perm[j] == elements[i]) {
taken[j] = true;
sh[i] = ctr;
break;
}
}
}
return sh;
}
private int[] getPermutationShift(List<T> perm) {
int length = perm.size();
int[] sh = new int[length];
boolean[] taken = new boolean[length];
for (int i = 0; i < length - 1; i++) {
int ctr = -1;
for (int j = 0; j < length; j++) {
if (!taken[j])
ctr++;
if (perm.get(j) == elements[i]) {
taken[j] = true;
sh[i] = ctr;
break;
}
}
}
return sh;
}
/**
* Given an array of T that constitutes a permutation of the elements this was constructed with, finds the specific
* index of the permutation given a factoradic numbering scheme (not used by the rest of this class, except the
* decodePermutation() method). The index can be passed to decodePermutation to reproduce the permutation passed to
* this, or modified and then passed to decodePermutation(). Determines equality by identity, not by .equals(), so
* that equivalent values that have different references/identities can appear in the permuted elements.
* <br>
* Credit goes to user Joren on StackOverflow, http://stackoverflow.com/a/1506337
* @param perm an array of T that must be a valid permutation of this object's elements
* @return an encoded number that can be used to reconstruct the permutation when passed to decodePermutation()
*/
public long encodePermutation(T[] perm)
{
long e = 0;
if(perm == null || perm.length != elements.length)
return e;
int[] shift = getPermutationShift(perm);
for (int i = 1; i < shift.length; i++) {
e += shift[i] * MathExtras.factorialsStart[i];
}
return e;
}
/**
* Given a List of T that constitutes a permutation of the elements this was constructed with, finds the specific
* index of the permutation given a factoradic numbering scheme (not used by the rest of this class, except the
* decodePermutation() method). The index can be passed to decodePermutation to reproduce the permutation passed to
* this, or modified and then passed to decodePermutation(). Determines equality by identity, not by .equals(), so
* that equivalent values that have different references/identities can appear in the permuted elements.
* <br>
* Credit goes to user Joren on StackOverflow, http://stackoverflow.com/a/1506337
* @param perm a List of T that must be a valid permutation of this object's elements
* @return an encoded number that can be used to reconstruct the permutation when passed to decodePermutation()
*/
public long encodePermutation(List<T> perm)
{
long e = 0;
if(perm == null || perm.size() != elements.length)
return e;
int[] shift = getPermutationShift(perm);
for (int i = 1; i < shift.length; i++) {
e += shift[i] * MathExtras.factorialsStart[i];
}
return e;
}
private int[] factoradicDecode(long e)
{
int[] sequence = new int[elements.length];
int base = 2;
for (int k = 1; k < elements.length; k++)
{
sequence[elements.length - 1 - k] = (int)(e % base);
e /= base;
base++;
}
return sequence;
}
/**
* Given a long between 0 and the total number of permutations possible (see getTotalPermutations() for how to access
* this) and an array of T with the same length as the elements this was constructed with, fills the array with the
* permutation described by the long as a special (factoradic) index into the possible permutations. You can get an
* index for a specific permutation with encodePermutation() or by generating a random number between 0 and
* getTotalPermutations(), if you want it randomly.
* <br>
* Credit goes to user Joren on StackOverflow, http://stackoverflow.com/a/1506337
* @param encoded the index encoded as a long
* @param destination an array of T that must have equivalent length to the elements this was constructed with
* @return the looked-up permutation, which is the same value destination will be assigned
*/
public T[] decodePermutation(long encoded, T[] destination)
{
if(destination == null)
return null;
encoded %= totalPermutations;
int[] sequence = factoradicDecode(encoded);
//char[] list = new char[] { 'a', 'b', 'c', 'd', 'e' }; //change for elements
//char[] permuted = new char[n]; //change for destination
boolean[] set = new boolean[elements.length];
for (int i = 0; i < elements.length; i++)
{
int s = sequence[i];
int remainingPosition = 0;
int index;
// Find the s'th position in the permuted list that has not been set yet.
for (index = 0; index < elements.length; index++)
{
if (!set[index])
{
if (remainingPosition == s)
break;
remainingPosition++;
}
}
destination[index] = elements[i];
set[index] = true;
}
return destination;
}
/**
* Given a long between 0 and the total number of permutations possible (see getTotalPermutations() for how to access
* this), creates a List filled with the permutation described by the long as a special (factoradic) index into the
* possible permutations. You can get an index for a specific permutation with encodePermutation() or by generating a
* random number between 0 and getTotalPermutations(), if you want it randomly.
* <br>
* Credit goes to user Joren on StackOverflow, http://stackoverflow.com/a/1506337
* @param encoded the index encoded as a long
* @return a List of T that corresponds to the permutation at the encoded index
*/
public List<T> decodePermutation(long encoded)
{
ArrayList<T> list = new ArrayList<T>(elements.length);
Collections.addAll(list, elements);
return decodePermutation(encoded, list);
}
/**
* Given a long between 0 and the total number of permutations possible (see getTotalPermutations() for how to access
* this) and a List of T with the same length as the elements this was constructed with, fills the List with the
* permutation described by the long as a special (factoradic) index into the possible permutations. You can get an
* index for a specific permutation with encodePermutation() or by generating a random number between 0 and
* getTotalPermutations(), if you want it randomly.
* <br>
* Credit goes to user Joren on StackOverflow, http://stackoverflow.com/a/1506337
* @param encoded the index encoded as a long
* @param destination a List of T that must have equivalent size to the elements this was constructed with
* @return the looked-up permutation, which is the same value destination will be assigned
*/
public List<T> decodePermutation(long encoded, List<T> destination)
{
if(destination == null)
return null;
encoded %= totalPermutations;
int[] sequence = factoradicDecode(encoded);
//char[] list = new char[] { 'a', 'b', 'c', 'd', 'e' }; //change for elements
//char[] permuted = new char[n]; //change for destination
boolean[] set = new boolean[elements.length];
for (int i = 0; i < elements.length; i++)
{
int s = sequence[i];
int remainingPosition = 0;
int index;
// Find the s'th position in the permuted list that has not been set yet.
for (index = 0; index < elements.length; index++)
{
if (!set[index])
{
if (remainingPosition == s)
break;
remainingPosition++;
}
}
destination.set(index, elements[i]);
set[index] = true;
}
return destination;
}
/**
* <p>Provides a read-only iterator for iterating over the permutations
* generated by this object. This method is the implementation of the
* {@link Iterable} interface that permits instances of this class to be
* used with the new-style for loop.</p>
* <p>For example:</p>
* <pre>
* List<Integer> elements = Arrays.asList(1, 2, 3);
* PermutationGenerator<Integer> permutations = new PermutationGenerator(elements);
* for (List<Integer> p : permutations)
* {
* // Do something with each permutation.
* }
* </pre>
* @return An iterator.
* @since 1.1
*/
public Iterator<List<T>> iterator()
{
return new Iterator<List<T>>()
{
public boolean hasNext()
{
return hasMore();
}
public List<T> next()
{
return nextPermutationAsList();
}
public void remove()
{
throw new UnsupportedOperationException("Iterator does not support removal.");
}
};
}
private static int[] getPermutationShift(int[] perm) {
int[] sh = new int[perm.length];
boolean[] taken = new boolean[perm.length];
for (int i = 0; i < perm.length - 1; i++) {
int ctr = -1;
for (int j = 0; j < perm.length; j++) {
if (!taken[j])
ctr++;
if (perm[j] == i) {
taken[j] = true;
sh[i] = ctr;
break;
}
}
}
return sh;
}
/**
* Given an array of int that constitutes a permutation of indices, where no element in perm is repeated and all
* ints are less than perm.length, finds the specific index of the permutation given a factoradic numbering scheme
* (not used by the rest of this class, except the decodePermutation() method). The index can be passed to
* decodePermutation to reproduce the index permutation passed to this, or modified and then passed to
* decodePermutation().
* <br>
* If perm is more than 20 items in length, you should use {@link #encodeBigPermutation} instead.
* <br>
* Credit goes to user Joren on StackOverflow, http://stackoverflow.com/a/1506337
* @param perm an array of int that is a permutation of the range from 0 (inclusive) to perm.length (exclusive, must be no more than 20)
* @return an encoded number that can be used to reconstruct the permutation when passed to decodePermutation()
*/
public static long encodePermutation(int[] perm)
{
long e = 0;
if(perm == null || perm.length <= 0)
return e;
int[] shift = getPermutationShift(perm);
for (int i = 1; i < shift.length; i++) {
e += shift[i] * MathExtras.factorialsStart[i];
}
return e;
}
/**
* Given an array of int that constitutes a permutation of indices, where no element in perm is repeated and all
* ints are less than perm.length, finds the specific index of the permutation given a factoradic numbering scheme
* (not used by the rest of this class, except the decodePermutation() method). The index can be passed to
* decodePermutation to reproduce the index permutation passed to this, or modified and then passed to
* decodePermutation().
* <br>
* If perm is 20 items or less in length, you can use {@link #encodePermutation} instead to get a 64-bit encoding.
* <br>
* Credit goes to user Joren on StackOverflow, http://stackoverflow.com/a/1506337
* @param perm an array of int that is a permutation of the range from 0 (inclusive) to perm.length (exclusive)
* @return an encoded number that can be used to reconstruct the permutation when passed to decodePermutation()
*/
public static BigInteger encodeBigPermutation(int[] perm)
{
BigInteger e = BigInteger.ZERO;
if(perm == null || perm.length <= 0)
return e;
int[] shift = getPermutationShift(perm);
for (int i = 1; i < shift.length; i++) {
e = e.add(MathExtras.bigFactorial(i).multiply(BigInteger.valueOf(shift[i])));
}
return e;
}
private static int[] factoradicDecode(long e, int count)
{
int[] sequence = new int[count];
int base = 2;
for (int k = 1; k < count; k++)
{
sequence[count - 1 - k] = (int)(e % base);
e /= base;
base++;
}
return sequence;
}
private static int[] factoradicDecode(BigInteger e, int count)
{
int[] sequence = new int[count];
BigInteger base = BigInteger.valueOf(2);
for (int k = 1; k < count; k++)
{
sequence[count - 1 - k] = e.mod(base).intValue();
e = e.divide(base);
base = base.add(BigInteger.ONE);
}
return sequence;
}
/**
* Given a long between 0 and the total number of permutations possible (see getTotalPermutations() for how to access
* this) and an int count of how many indices to find a permutation of, returns an array with the permutation
* of the indices described by the long as a special (factoradic) index into the possible permutations. You can get
* an index for a specific permutation with encodePermutation() or by generating a random number between 0 and
* getTotalPermutations(), if you want it randomly.
* <br>
* Credit goes to user Joren on StackOverflow, http://stackoverflow.com/a/1506337
* @param encoded the index encoded as a long
* @param count an int between 1 and 20, inclusive, that will be the size of the returned array
* @return the looked-up permutation as an int array with length equal to count
*/
public static int[] decodePermutation(long encoded, int count)
{
if(count <= 0)
return new int[0];
encoded %= MathExtras.factorial(count);
int[] sequence = factoradicDecode(encoded, count), destination = new int[count];
//char[] list = new char[] { 'a', 'b', 'c', 'd', 'e' }; //change for elements
//char[] permuted = new char[n]; //change for destination
boolean[] set = new boolean[count];
for (int i = 0; i < count; i++)
{
int s = sequence[i];
int remainingPosition = 0;
int index;
// Find the s'th position in the permuted list that has not been set yet.
for (index = 0; index < count; index++)
{
if (!set[index])
{
if (remainingPosition == s)
break;
remainingPosition++;
}
}
destination[index] = i;
set[index] = true;
}
return destination;
}
/**
* Given a long between 0 and the total number of permutations possible (see getBigTotalPermutations() for how to
* access this) and an int count of how many indices to find a permutation of, returns an array with the permutation
* of the indices described by the long as a special (factoradic) index into the possible permutations. You can get
* an index for a specific permutation with encodeBigPermutation() or by generating a random number between 0 and
* getBigTotalPermutations(), if you want it randomly.
* <br>
* Credit goes to user Joren on StackOverflow, http://stackoverflow.com/a/1506337
* @param encoded the index encoded as a BigInteger
* @param count a positive int that will be the size of the returned array
* @return the looked-up permutation as an int array with length equal to count
*/
public static int[] decodePermutation(BigInteger encoded, int count)
{
if(count <= 0)
return new int[0];
BigInteger enc = encoded.mod(MathExtras.bigFactorial(count));
int[] sequence = factoradicDecode(enc, count), destination = new int[count];
//char[] list = new char[] { 'a', 'b', 'c', 'd', 'e' }; //change for elements
//char[] permuted = new char[n]; //change for destination
boolean[] set = new boolean[count];
for (int i = 0; i < count; i++)
{
int s = sequence[i];
int remainingPosition = 0;
int index;
// Find the s'th position in the permuted list that has not been set yet.
for (index = 0; index < count; index++)
{
if (!set[index])
{
if (remainingPosition == s)
break;
remainingPosition++;
}
}
destination[index] = i;
set[index] = true;
}
return destination;
}
/**
* Given a long between 0 and the total number of permutations possible (see getTotalPermutations() for how to access
* this) and an int count of how many indices to find a permutation of, returns an array with the permutation
* of the indices described by the long as a special (factoradic) index into the possible permutations. You can get
* an index for a specific permutation with encodePermutation() or by generating a random number between 0 and
* getTotalPermutations(), if you want it randomly. This variant adds an int to each item in the returned array,
* which may be useful if generating indices that don't start at 0.
* <br>
* Credit goes to user Joren on StackOverflow, http://stackoverflow.com/a/1506337
* @param encoded the index encoded as a long
* @param count an int between 1 and 20, inclusive, that will be the size of the returned array
* @param add an int to add to each item of the permutation
* @return the looked-up permutation as an int array with length equal to count
*/
public static int[] decodePermutation(long encoded, int count, int add)
{
int[] p = decodePermutation(encoded, count);
for (int i = 0; i < p.length; i++) {
p[i] += add;
}
return p;
}
/**
* Given a long between 0 and the total number of permutations possible (see getBigTotalPermutations() for how to
* access this) and an int count of how many indices to find a permutation of, returns an array with the permutation
* of the indices described by the long as a special (factoradic) index into the possible permutations. You can get
* an index for a specific permutation with encodeBigPermutation() or by generating a random number between 0 and
* getBigTotalPermutations(), if you want it randomly. This variant adds an int to each item in the returned array,
* which may be useful if generating indices that don't start at 0.
* <br>
* Credit goes to user Joren on StackOverflow, http://stackoverflow.com/a/1506337
* @param encoded the index encoded as a BigInteger
* @param count a positive int that will be the size of the returned array
* @param add an int to add to each item of the permutation
* @return the looked-up permutation as an int array with length equal to count
*/
public static int[] decodePermutation(BigInteger encoded, int count, int add)
{
int[] p = decodePermutation(encoded, count);
for (int i = 0; i < p.length; i++) {
p[i] += add;
}
return p;
}
}
|
|
package com.dbsys.rs.client.frame;
import com.dbsys.rs.client.UnitFrame;
import com.dbsys.rs.client.document.DocumentException;
import com.dbsys.rs.client.document.pdf.KartuPasienPdfView;
import com.dbsys.rs.client.document.pdf.PdfProcessor;
import com.dbsys.rs.client.tableModel.PendudukTableModel;
import com.dbsys.rs.connector.ServiceException;
import com.dbsys.rs.connector.TokenHolder;
import com.dbsys.rs.connector.service.PasienService;
import com.dbsys.rs.connector.service.PendudukService;
import com.dbsys.rs.connector.service.TokenService;
import com.dbsys.rs.client.DateUtil;
import com.dbsys.rs.client.Kelas;
import com.dbsys.rs.client.Penanggung;
import com.dbsys.rs.client.entity.Pasien;
import com.dbsys.rs.client.entity.Pasien.Pendaftaran;
import com.dbsys.rs.client.entity.Pelayanan;
import com.dbsys.rs.client.entity.Penduduk;
import com.dbsys.rs.client.entity.Tindakan;
import com.dbsys.rs.client.entity.Unit;
import com.dbsys.rs.connector.service.PelayananService;
import com.dbsys.rs.connector.service.TindakanService;
import java.awt.Color;
import java.sql.Date;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.swing.JOptionPane;
/**
*
* @author Bramwell Kasaedja
* @author Deddy Christoper Kakunsi
*/
public final class LoketPendaftaran extends javax.swing.JFrame implements UnitFrame {
private final PendudukService pendudukService = PendudukService.getInstance();
private final PasienService pasienService = PasienService.getInstance();
private final TokenService tokenService= TokenService.getInstance();
private Penduduk penduduk;
private Unit tujuan;
private Pasien pasien;
/**
* Creates new form Pendaftaran
*/
public LoketPendaftaran() {
initComponents();
lblOperator.setText(TokenHolder.getNamaOperator());
resetForm();
}
@Override
public void setUnit(Unit unit) {
tujuan = unit;
txtPasienTujuan.setText(tujuan.getNama());
}
private void resetForm() {
penduduk = null;
tujuan = null;
txtPendudukKode.setText(Penduduk.createKode());
txtPendudukNik.setText(null);
txtPendudukNama.setText(null);
cbPendudukKelamin.setSelectedIndex(0);
txtPendudukUmur.setText(null);
txtPasienNomor.setText(Pasien.createKode());
txtPasienTujuan.setText(null);
cbPasienKelas.setSelectedIndex(0);
cbPasienTanggungan.setSelectedIndex(0);
Calendar now = Calendar.getInstance();
now.setTime(DateUtil.getDate());
txtPasienTanggalMasuk.setSelectedDate(now);
}
private void tambahTagihanKarcis(Pasien pasien) throws ServiceException {
final PelayananService pelayananService = PelayananService.getInstance();
final TindakanService tindakanService = TindakanService.getInstance();
Tindakan tindakan = tindakanService.get("Karcis Rawat Jalan", Kelas.NONE);
Pelayanan pelayanan = new Pelayanan();
pelayanan.setTindakan(tindakan);
pelayanan.setBiayaTambahan(0L);
pelayanan.setJumlah(1);
pelayanan.setKeterangan(null);
pelayanan.setPasien(pasien);
pelayanan.setUnit(TokenHolder.getUnit());
pelayanan.setTanggal(DateUtil.getDate());
pelayananService.simpan(pelayanan);
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
pnlDetail = new javax.swing.JPanel();
jLabel4 = new javax.swing.JLabel();
jLabel5 = new javax.swing.JLabel();
jLabel6 = new javax.swing.JLabel();
jLabel7 = new javax.swing.JLabel();
jLabel9 = new javax.swing.JLabel();
txtPendudukKode = new javax.swing.JTextField();
txtPendudukNik = new javax.swing.JTextField();
txtPendudukNama = new javax.swing.JTextField();
cbPendudukKelamin = new javax.swing.JComboBox();
txtPendudukUmur = new javax.swing.JTextField();
btnPendudukUpdate = new javax.swing.JButton();
dateChooserCombo1 = new datechooser.beans.DateChooserCombo();
btnPendudukTambah = new javax.swing.JButton();
jScrollPane1 = new javax.swing.JScrollPane();
tblPenduduk = new javax.swing.JTable();
pnlCari = new javax.swing.JPanel();
jLabel13 = new javax.swing.JLabel();
txtKeyword = new javax.swing.JTextField();
pnlPendaftaran = new javax.swing.JPanel();
jLabel12 = new javax.swing.JLabel();
jLabel16 = new javax.swing.JLabel();
jLabel15 = new javax.swing.JLabel();
jLabel14 = new javax.swing.JLabel();
jLabel17 = new javax.swing.JLabel();
txtPasienNomor = new javax.swing.JTextField();
txtPasienTujuan = new javax.swing.JTextField();
cbPasienKelas = new javax.swing.JComboBox();
cbPasienTanggungan = new javax.swing.JComboBox();
btnPasienTambah = new javax.swing.JButton();
txtPasienTanggalMasuk = new datechooser.beans.DateChooserCombo();
btnCetakKartu = new javax.swing.JButton();
btnReset = new javax.swing.JButton();
jToolBar1 = new javax.swing.JToolBar();
jLabel1 = new javax.swing.JLabel();
lblOperator = new javax.swing.JLabel();
jSeparator1 = new javax.swing.JToolBar.Separator();
btnLogout = new javax.swing.JButton();
btnUbahPasien = new javax.swing.JButton();
btnCetakKartuTagihan = new javax.swing.JButton();
btnCetakPasien = new javax.swing.JButton();
Image = new javax.swing.JLabel();
setDefaultCloseOperation(javax.swing.WindowConstants.DO_NOTHING_ON_CLOSE);
setTitle("Rumah Sakit Liun Kendage Tahuna");
setExtendedState(javax.swing.JFrame.MAXIMIZED_BOTH);
setUndecorated(true);
setResizable(false);
getContentPane().setLayout(new org.netbeans.lib.awtextra.AbsoluteLayout());
pnlDetail.setBackground(Utama.colorTransparentPanel);
pnlDetail.setBorder(javax.swing.BorderFactory.createTitledBorder(javax.swing.BorderFactory.createEtchedBorder(), "DATA REKAM MEDIK", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, new java.awt.Font("Tahoma", 1, 11))); // NOI18N
pnlDetail.setBackground(new Color(0,0,0,20));
pnlDetail.setLayout(null);
jLabel4.setText("NO. MEDREK");
pnlDetail.add(jLabel4);
jLabel4.setBounds(20, 30, 100, 25);
jLabel5.setText("NO. JAMINAN");
pnlDetail.add(jLabel5);
jLabel5.setBounds(20, 60, 100, 25);
jLabel6.setText("NAMA");
pnlDetail.add(jLabel6);
jLabel6.setBounds(20, 90, 100, 25);
jLabel7.setText("KELAMIN");
pnlDetail.add(jLabel7);
jLabel7.setBounds(20, 120, 100, 25);
jLabel9.setText("UMUR");
pnlDetail.add(jLabel9);
jLabel9.setBounds(20, 150, 100, 25);
txtPendudukKode.setBorder(javax.swing.BorderFactory.createEtchedBorder());
txtPendudukKode.setEnabled(false);
pnlDetail.add(txtPendudukKode);
txtPendudukKode.setBounds(130, 30, 250, 25);
txtPendudukNik.setBorder(javax.swing.BorderFactory.createEtchedBorder());
txtPendudukNik.setEnabled(false);
pnlDetail.add(txtPendudukNik);
txtPendudukNik.setBounds(130, 60, 250, 25);
txtPendudukNama.setBorder(javax.swing.BorderFactory.createEtchedBorder());
txtPendudukNama.setEnabled(false);
pnlDetail.add(txtPendudukNama);
txtPendudukNama.setBounds(130, 90, 250, 25);
cbPendudukKelamin.setModel(new javax.swing.DefaultComboBoxModel(new String[] { "- Pilih -", "PRIA", "WANITA" }));
cbPendudukKelamin.setBorder(null);
cbPendudukKelamin.setEnabled(false);
pnlDetail.add(cbPendudukKelamin);
cbPendudukKelamin.setBounds(130, 120, 250, 25);
txtPendudukUmur.setBorder(javax.swing.BorderFactory.createEtchedBorder());
txtPendudukUmur.setEnabled(false);
pnlDetail.add(txtPendudukUmur);
txtPendudukUmur.setBounds(130, 150, 250, 25);
btnPendudukUpdate.setIcon(new javax.swing.ImageIcon(getClass().getResource("/com/dbsys/rs/client/images/btn_update.png"))); // NOI18N
btnPendudukUpdate.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnPendudukUpdateActionPerformed(evt);
}
});
pnlDetail.add(btnPendudukUpdate);
btnPendudukUpdate.setBounds(300, 180, 80, 30);
pnlDetail.add(dateChooserCombo1);
dateChooserCombo1.setBounds(70, 380, 155, 20);
btnPendudukTambah.setIcon(new javax.swing.ImageIcon(getClass().getResource("/com/dbsys/rs/client/images/btn_tambah.png"))); // NOI18N
btnPendudukTambah.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnPendudukTambahActionPerformed(evt);
}
});
pnlDetail.add(btnPendudukTambah);
btnPendudukTambah.setBounds(210, 180, 80, 30);
getContentPane().add(pnlDetail, new org.netbeans.lib.awtextra.AbsoluteConstraints(860, 160, 400, 220));
jScrollPane1.setBackground(Utama.colorTransparentPanel);
tblPenduduk.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null}
},
new String [] {
"Title 1", "Title 2", "Title 3", "Title 4"
}
));
tblPenduduk.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
tblPendudukMouseClicked(evt);
}
});
jScrollPane1.setViewportView(tblPenduduk);
getContentPane().add(jScrollPane1, new org.netbeans.lib.awtextra.AbsoluteConstraints(20, 230, 830, 520));
pnlCari.setBackground(Utama.colorTransparentPanel);
pnlCari.setBorder(javax.swing.BorderFactory.createTitledBorder(javax.swing.BorderFactory.createEtchedBorder(), "PENCARIAN REKAM MEDIK", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, new java.awt.Font("Tahoma", 1, 11))); // NOI18N
pnlCari.setLayout(null);
pnlCari.setBackground(new Color(0, 0, 0, 20));
jLabel13.setText("NAMA PASIEN");
pnlCari.add(jLabel13);
jLabel13.setBounds(20, 20, 110, 25);
txtKeyword.addFocusListener(new java.awt.event.FocusAdapter() {
public void focusLost(java.awt.event.FocusEvent evt) {
txtKeywordFocusLost(evt);
}
});
txtKeyword.addKeyListener(new java.awt.event.KeyAdapter() {
public void keyPressed(java.awt.event.KeyEvent evt) {
txtKeywordKeyPressed(evt);
}
});
pnlCari.add(txtKeyword);
txtKeyword.setBounds(140, 20, 670, 25);
getContentPane().add(pnlCari, new org.netbeans.lib.awtextra.AbsoluteConstraints(20, 160, 830, 60));
pnlPendaftaran.setBackground(Utama.colorTransparentPanel);
pnlPendaftaran.setBorder(javax.swing.BorderFactory.createTitledBorder(javax.swing.BorderFactory.createEtchedBorder(), "DATA PASIEN", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, new java.awt.Font("Tahoma", 1, 11))); // NOI18N
pnlPendaftaran.setLayout(null);
pnlPendaftaran.setBackground(new Color(0,0,0,20));
jLabel12.setText("UNIT TUJUAN");
pnlPendaftaran.add(jLabel12);
jLabel12.setBounds(20, 90, 100, 25);
jLabel16.setText("NO. PASIEN");
pnlPendaftaran.add(jLabel16);
jLabel16.setBounds(20, 30, 100, 25);
jLabel15.setText("TANGGAL MASUK");
pnlPendaftaran.add(jLabel15);
jLabel15.setBounds(20, 60, 100, 25);
jLabel14.setText("TANGGUNGAN");
pnlPendaftaran.add(jLabel14);
jLabel14.setBounds(20, 150, 100, 25);
jLabel17.setText("KELAS");
pnlPendaftaran.add(jLabel17);
jLabel17.setBounds(20, 120, 100, 25);
txtPasienNomor.setBorder(javax.swing.BorderFactory.createEtchedBorder());
pnlPendaftaran.add(txtPasienNomor);
txtPasienNomor.setBounds(130, 30, 250, 25);
txtPasienTujuan.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
txtPasienTujuanMouseClicked(evt);
}
});
pnlPendaftaran.add(txtPasienTujuan);
txtPasienTujuan.setBounds(130, 90, 250, 25);
cbPasienKelas.setModel(new javax.swing.DefaultComboBoxModel(new String[] { "NONE", "VVIP", "VIP", "I", "II", "III" }));
pnlPendaftaran.add(cbPasienKelas);
cbPasienKelas.setBounds(130, 120, 250, 25);
cbPasienTanggungan.setModel(new javax.swing.DefaultComboBoxModel(new String[] { "- Pilih -", "BPJS", "UMUM" }));
cbPasienTanggungan.setBorder(null);
pnlPendaftaran.add(cbPasienTanggungan);
cbPasienTanggungan.setBounds(130, 150, 250, 25);
btnPasienTambah.setIcon(new javax.swing.ImageIcon(getClass().getResource("/com/dbsys/rs/client/images/btn_simpan.png"))); // NOI18N
btnPasienTambah.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnPasienTambahActionPerformed(evt);
}
});
pnlPendaftaran.add(btnPasienTambah);
btnPasienTambah.setBounds(210, 180, 80, 30);
pnlPendaftaran.add(txtPasienTanggalMasuk);
txtPasienTanggalMasuk.setBounds(130, 60, 250, 25);
btnCetakKartu.setIcon(new javax.swing.ImageIcon(getClass().getResource("/com/dbsys/rs/client/images/btn_cetak.png"))); // NOI18N
btnCetakKartu.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnCetakKartuActionPerformed(evt);
}
});
pnlPendaftaran.add(btnCetakKartu);
btnCetakKartu.setBounds(300, 180, 80, 30);
getContentPane().add(pnlPendaftaran, new org.netbeans.lib.awtextra.AbsoluteConstraints(860, 390, 400, 220));
btnReset.setIcon(new javax.swing.ImageIcon(getClass().getResource("/com/dbsys/rs/client/images/btn_reset.png"))); // NOI18N
btnReset.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnResetActionPerformed(evt);
}
});
getContentPane().add(btnReset, new org.netbeans.lib.awtextra.AbsoluteConstraints(1170, 720, 80, 30));
jToolBar1.setBackground(java.awt.SystemColor.activeCaptionBorder);
jToolBar1.setRollover(true);
jToolBar1.setEnabled(false);
jLabel1.setText("ANDA LOGIN SEBEGAI :");
jToolBar1.add(jLabel1);
lblOperator.setText("jLabel1");
jToolBar1.add(lblOperator);
jSeparator1.setMaximumSize(new java.awt.Dimension(20, 32767));
jToolBar1.add(jSeparator1);
btnLogout.setText("LOGOUT");
btnLogout.setFocusable(false);
btnLogout.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER);
btnLogout.setMaximumSize(new java.awt.Dimension(80, 20));
btnLogout.setVerticalTextPosition(javax.swing.SwingConstants.BOTTOM);
btnLogout.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnLogoutActionPerformed(evt);
}
});
jToolBar1.add(btnLogout);
btnUbahPasien.setText("UBAH DATA PASIEN");
btnUbahPasien.setFocusable(false);
btnUbahPasien.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER);
btnUbahPasien.setMaximumSize(new java.awt.Dimension(120, 20));
btnUbahPasien.setVerticalTextPosition(javax.swing.SwingConstants.BOTTOM);
btnUbahPasien.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnUbahPasienActionPerformed(evt);
}
});
jToolBar1.add(btnUbahPasien);
btnCetakKartuTagihan.setText("CETAK KARTU TAGIHAN");
btnCetakKartuTagihan.setFocusable(false);
btnCetakKartuTagihan.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER);
btnCetakKartuTagihan.setMaximumSize(new java.awt.Dimension(123, 20));
btnCetakKartuTagihan.setVerticalTextPosition(javax.swing.SwingConstants.BOTTOM);
btnCetakKartuTagihan.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnCetakKartuTagihanActionPerformed(evt);
}
});
jToolBar1.add(btnCetakKartuTagihan);
btnCetakPasien.setText("DAFTAR PASIEN");
btnCetakPasien.setFocusable(false);
btnCetakPasien.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER);
btnCetakPasien.setMaximumSize(new java.awt.Dimension(120, 20));
btnCetakPasien.setVerticalTextPosition(javax.swing.SwingConstants.BOTTOM);
btnCetakPasien.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnCetakPasienActionPerformed(evt);
}
});
jToolBar1.add(btnCetakPasien);
getContentPane().add(jToolBar1, new org.netbeans.lib.awtextra.AbsoluteConstraints(0, 770, 1280, 30));
Image.setIcon(new javax.swing.ImageIcon(getClass().getResource("/com/dbsys/rs/client/images/bg_pendaftaran.png"))); // NOI18N
getContentPane().add(Image, new org.netbeans.lib.awtextra.AbsoluteConstraints(0, 0, 1280, 800));
pack();
}// </editor-fold>//GEN-END:initComponents
private void tblPendudukMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_tblPendudukMouseClicked
int index = tblPenduduk.getSelectedRow();
PendudukTableModel tableModel = (PendudukTableModel) tblPenduduk.getModel();
penduduk = tableModel.getPenduduk(index);
txtPendudukKode.setText(penduduk.getKode());
txtPendudukNik.setText(penduduk.getNik());
txtPendudukNama.setText(penduduk.getNama());
cbPendudukKelamin.setSelectedItem(penduduk.getKelamin().toString());
Date hariIni = DateUtil.getDate();
Date lahir = penduduk.getTanggalLahir();
int umur = DateUtil.calculate(lahir, hariIni) / 365;
txtPendudukUmur.setText(Integer.toString(umur));
// Data Pasien
txtPasienNomor.setText(Pasien.createKode());
Calendar now = Calendar.getInstance();
now.setTime(DateUtil.getDate());
txtPasienTanggalMasuk.setSelectedDate(now);
}//GEN-LAST:event_tblPendudukMouseClicked
private void btnPendudukTambahActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnPendudukTambahActionPerformed
pasien = null;
TambahPasien tambahPasien = new TambahPasien();
tambahPasien.setVisible(true);
}//GEN-LAST:event_btnPendudukTambahActionPerformed
private void btnPasienTambahActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnPasienTambahActionPerformed
String kode = txtPasienNomor.getText();
String tanggungan = cbPasienTanggungan.getSelectedItem().toString();
String kelas = cbPasienKelas.getSelectedItem().toString();
Calendar calendar = txtPasienTanggalMasuk.getSelectedDate();
long lTime = calendar.getTimeInMillis();
try {
if (tanggungan == null || tanggungan.equals("- Pilih -"))
throw new ServiceException("Silahkan memilih tanggungan");
if (kelas == null || kelas.equals("- Pilih -"))
throw new ServiceException("Silahkan memilih kelas");
if (tujuan == null)
throw new ServiceException("Silahkan masukan unit tujuan");
pasien = pasienService.daftar(penduduk, Penanggung.valueOf(tanggungan), new Date(lTime), kode, Pendaftaran.LOKET, Kelas.valueOf(kelas), tujuan);
txtPasienNomor.setText(pasien.getKode());
// Otomatis tambah tagihan karcis pasien rawat jalan
tambahTagihanKarcis(pasien);
JOptionPane.showMessageDialog(this, "Berhasil menyimpan data pasien.");
} catch (ServiceException ex) {
JOptionPane.showMessageDialog(this, ex.getMessage());
}
}//GEN-LAST:event_btnPasienTambahActionPerformed
private void btnLogoutActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnLogoutActionPerformed
try {
tokenService.lock(TokenHolder.getKode());
new Utama().setVisible(true);
this.dispose();
} catch (ServiceException ex) {
JOptionPane.showMessageDialog(this, ex.getMessage());
}
}//GEN-LAST:event_btnLogoutActionPerformed
private void txtKeywordFocusLost(java.awt.event.FocusEvent evt) {//GEN-FIRST:event_txtKeywordFocusLost
String keyword = txtKeyword.getText();
if (keyword.equals(""))
return;
try {
List<Penduduk> list = pendudukService.cari(keyword);
PendudukTableModel tableModel = new PendudukTableModel(list);
tblPenduduk.setModel(tableModel);
} catch (ServiceException ex) {
JOptionPane.showMessageDialog(this, ex.getMessage());
}
}//GEN-LAST:event_txtKeywordFocusLost
private void txtPasienTujuanMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_txtPasienTujuanMouseClicked
Pencarian frameCari = new Pencarian(this, Unit.class);
frameCari.setVisible(true);
}//GEN-LAST:event_txtPasienTujuanMouseClicked
private void txtKeywordKeyPressed(java.awt.event.KeyEvent evt) {//GEN-FIRST:event_txtKeywordKeyPressed
if (evt.getKeyCode() == 10)
btnPendudukTambah.requestFocus();
}//GEN-LAST:event_txtKeywordKeyPressed
private void btnUbahPasienActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnUbahPasienActionPerformed
new DetailPasien().setVisible(true);
}//GEN-LAST:event_btnUbahPasienActionPerformed
private void btnCetakKartuActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnCetakKartuActionPerformed
if (pasien == null) {
JOptionPane.showMessageDialog(this, "Silahkan mengisi data pasien terlebih dahulu");
return;
}
PdfProcessor pdfProcessor = new PdfProcessor();
KartuPasienPdfView pdfView = new KartuPasienPdfView();
try {
Map<String, Object> model = new HashMap<>();
model.put("pasien", pasien);
pdfProcessor.process(pdfView, model, String.format("pasien-%s.pdf", DateUtil.getTime().hashCode()));
} catch (DocumentException ex) {
JOptionPane.showMessageDialog(this, ex.getMessage());
}
}//GEN-LAST:event_btnCetakKartuActionPerformed
private void btnCetakKartuTagihanActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnCetakKartuTagihanActionPerformed
String kodePasien = JOptionPane.showInputDialog(this, "Masukan Nomor Pasien");
try {
Pasien lPasien = pasienService.get(kodePasien);
PdfProcessor pdfProcessor = new PdfProcessor();
KartuPasienPdfView pdfView = new KartuPasienPdfView();
Map<String, Object> model = new HashMap<>();
model.put("pasien", lPasien);
pdfProcessor.process(pdfView, model, String.format("pasien-%s.pdf", DateUtil.getTime().hashCode()));
} catch (ServiceException | DocumentException ex) {
JOptionPane.showMessageDialog(this, ex.getMessage());
}
}//GEN-LAST:event_btnCetakKartuTagihanActionPerformed
private void btnCetakPasienActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnCetakPasienActionPerformed
try {
RangeTanggal frame = new RangeTanggal(this, Pasien.class);
frame.setPendaftaran(Pasien.Pendaftaran.LOKET);
frame.setVisible(true);
} catch (ServiceException ex) {
JOptionPane.showMessageDialog(this, ex.getMessage());
}
}//GEN-LAST:event_btnCetakPasienActionPerformed
private void btnPendudukUpdateActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnPendudukUpdateActionPerformed
TambahPasien tambahPasien = new TambahPasien(penduduk);
tambahPasien.setVisible(true);
}//GEN-LAST:event_btnPendudukUpdateActionPerformed
private void btnResetActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnResetActionPerformed
resetForm();
}//GEN-LAST:event_btnResetActionPerformed
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JLabel Image;
private javax.swing.JButton btnCetakKartu;
private javax.swing.JButton btnCetakKartuTagihan;
private javax.swing.JButton btnCetakPasien;
private javax.swing.JButton btnLogout;
private javax.swing.JButton btnPasienTambah;
private javax.swing.JButton btnPendudukTambah;
private javax.swing.JButton btnPendudukUpdate;
private javax.swing.JButton btnReset;
private javax.swing.JButton btnUbahPasien;
private javax.swing.JComboBox cbPasienKelas;
private javax.swing.JComboBox cbPasienTanggungan;
private javax.swing.JComboBox cbPendudukKelamin;
private datechooser.beans.DateChooserCombo dateChooserCombo1;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel12;
private javax.swing.JLabel jLabel13;
private javax.swing.JLabel jLabel14;
private javax.swing.JLabel jLabel15;
private javax.swing.JLabel jLabel16;
private javax.swing.JLabel jLabel17;
private javax.swing.JLabel jLabel4;
private javax.swing.JLabel jLabel5;
private javax.swing.JLabel jLabel6;
private javax.swing.JLabel jLabel7;
private javax.swing.JLabel jLabel9;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JToolBar.Separator jSeparator1;
private javax.swing.JToolBar jToolBar1;
private javax.swing.JLabel lblOperator;
private javax.swing.JPanel pnlCari;
private javax.swing.JPanel pnlDetail;
private javax.swing.JPanel pnlPendaftaran;
private javax.swing.JTable tblPenduduk;
private javax.swing.JTextField txtKeyword;
private javax.swing.JTextField txtPasienNomor;
private datechooser.beans.DateChooserCombo txtPasienTanggalMasuk;
private javax.swing.JTextField txtPasienTujuan;
private javax.swing.JTextField txtPendudukKode;
private javax.swing.JTextField txtPendudukNama;
private javax.swing.JTextField txtPendudukNik;
private javax.swing.JTextField txtPendudukUmur;
// End of variables declaration//GEN-END:variables
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
*/
package org.apache.catalina.tribes.group.interceptors;
import java.net.ConnectException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.SocketTimeoutException;
import java.util.Arrays;
import java.util.HashMap;
import org.apache.catalina.tribes.Channel;
import org.apache.catalina.tribes.ChannelException;
import org.apache.catalina.tribes.ChannelException.FaultyMember;
import org.apache.catalina.tribes.ChannelMessage;
import org.apache.catalina.tribes.Member;
import org.apache.catalina.tribes.RemoteProcessException;
import org.apache.catalina.tribes.group.ChannelInterceptorBase;
import org.apache.catalina.tribes.group.InterceptorPayload;
import org.apache.catalina.tribes.io.ChannelData;
import org.apache.catalina.tribes.io.XByteBuffer;
import org.apache.catalina.tribes.membership.MemberImpl;
import org.apache.catalina.tribes.membership.Membership;
import org.apache.catalina.tribes.membership.StaticMember;
/**
* <p>Title: A perfect failure detector </p>
*
* <p>Description: The TcpFailureDetector is a useful interceptor
* that adds reliability to the membership layer.</p>
* <p>
* If the network is busy, or the system is busy so that the membership receiver thread
* is not getting enough time to update its table, members can be "timed out"
* This failure detector will intercept the memberDisappeared message(unless its a true shutdown message)
* and connect to the member using TCP.
* </p>
* <p>
* The TcpFailureDetector works in two ways. <br>
* 1. It intercepts memberDisappeared events
* 2. It catches send errors
* </p>
*
* @author Filip Hanik
* @version 1.0
*/
public class TcpFailureDetector extends ChannelInterceptorBase {
private static org.apache.juli.logging.Log log = org.apache.juli.logging.LogFactory.getLog( TcpFailureDetector.class );
protected static byte[] TCP_FAIL_DETECT = new byte[] {
79, -89, 115, 72, 121, -126, 67, -55, -97, 111, -119, -128, -95, 91, 7, 20,
125, -39, 82, 91, -21, -15, 67, -102, -73, 126, -66, -113, -127, 103, 30, -74,
55, 21, -66, -121, 69, 126, 76, -88, -65, 10, 77, 19, 83, 56, 21, 50,
85, -10, -108, -73, 58, -6, 64, 120, -111, 4, 125, -41, 114, -124, -64, -43};
protected boolean performConnectTest = true;
protected long connectTimeout = 1000;//1 second default
protected boolean performSendTest = true;
protected boolean performReadTest = false;
protected long readTestTimeout = 5000;//5 seconds
protected Membership membership = null;
protected HashMap removeSuspects = new HashMap();
protected HashMap addSuspects = new HashMap();
public void sendMessage(Member[] destination, ChannelMessage msg, InterceptorPayload payload) throws ChannelException {
try {
super.sendMessage(destination, msg, payload);
}catch ( ChannelException cx ) {
FaultyMember[] mbrs = cx.getFaultyMembers();
for ( int i=0; i<mbrs.length; i++ ) {
if ( mbrs[i].getCause()!=null &&
(!(mbrs[i].getCause() instanceof RemoteProcessException)) ) {//RemoteProcessException's are ok
this.memberDisappeared(mbrs[i].getMember());
}//end if
}//for
throw cx;
}
}
public void messageReceived(ChannelMessage msg) {
//catch incoming
boolean process = true;
if ( okToProcess(msg.getOptions()) ) {
//check to see if it is a testMessage, if so, process = false
process = ( (msg.getMessage().getLength() != TCP_FAIL_DETECT.length) ||
(!Arrays.equals(TCP_FAIL_DETECT,msg.getMessage().getBytes()) ) );
}//end if
//ignore the message, it doesnt have the flag set
if ( process ) super.messageReceived(msg);
else if ( log.isDebugEnabled() ) log.debug("Received a failure detector packet:"+msg);
}//messageReceived
public void memberAdded(Member member) {
if ( membership == null ) setupMembership();
boolean notify = false;
synchronized (membership) {
if (removeSuspects.containsKey(member)) {
//previously marked suspect, system below picked up the member again
removeSuspects.remove(member);
} else if (membership.getMember( (MemberImpl) member) == null){
//if we add it here, then add it upwards too
//check to see if it is alive
if (memberAlive(member)) {
membership.memberAlive( (MemberImpl) member);
notify = true;
} else {
addSuspects.put(member, new Long(System.currentTimeMillis()));
}
}
}
if ( notify ) super.memberAdded(member);
}
public void memberDisappeared(Member member) {
if ( membership == null ) setupMembership();
boolean notify = false;
boolean shutdown = Arrays.equals(member.getCommand(),Member.SHUTDOWN_PAYLOAD);
if ( !shutdown )
if(log.isInfoEnabled())
log.info("Received memberDisappeared["+member+"] message. Will verify.");
synchronized (membership) {
if (!membership.contains(member)) {
if(log.isInfoEnabled())
log.info("Verification complete. Member already disappeared["+member+"]");
return;
}
//check to see if the member really is gone
//if the payload is not a shutdown message
if (shutdown || !memberAlive(member)) {
//not correct, we need to maintain the map
membership.removeMember( (MemberImpl) member);
removeSuspects.remove(member);
if (member instanceof StaticMember) {
addSuspects.put(member, Long.valueOf(System.currentTimeMillis()));
}
notify = true;
} else {
//add the member as suspect
removeSuspects.put(member, new Long(System.currentTimeMillis()));
}
}
if ( notify ) {
if(log.isInfoEnabled())
log.info("Verification complete. Member disappeared["+member+"]");
super.memberDisappeared(member);
} else {
if(log.isInfoEnabled())
log.info("Verification complete. Member still alive["+member+"]");
}
}
public boolean hasMembers() {
if ( membership == null ) setupMembership();
return membership.hasMembers();
}
public Member[] getMembers() {
if ( membership == null ) setupMembership();
return membership.getMembers();
}
public Member getMember(Member mbr) {
if ( membership == null ) setupMembership();
return membership.getMember(mbr);
}
public Member getLocalMember(boolean incAlive) {
return super.getLocalMember(incAlive);
}
public void heartbeat() {
super.heartbeat();
checkMembers(false);
}
public void checkMembers(boolean checkAll) {
try {
if (membership == null) setupMembership();
synchronized (membership) {
if ( !checkAll ) performBasicCheck();
else performForcedCheck();
}
}catch ( Exception x ) {
log.warn("Unable to perform heartbeat on the TcpFailureDetector.",x);
} finally {
}
}
protected void performForcedCheck() {
//update all alive times
Member[] members = super.getMembers();
for (int i = 0; members != null && i < members.length; i++) {
if (memberAlive(members[i])) {
if (membership.memberAlive((MemberImpl)members[i])) super.memberAdded(members[i]);
addSuspects.remove(members[i]);
} else {
if (membership.getMember(members[i])!=null) {
membership.removeMember((MemberImpl)members[i]);
removeSuspects.remove(members[i]);
if (members[i] instanceof StaticMember) {
addSuspects.put(members[i],
Long.valueOf(System.currentTimeMillis()));
}
super.memberDisappeared(members[i]);
}
} //end if
} //for
}
protected void performBasicCheck() {
//update all alive times
Member[] members = super.getMembers();
for (int i = 0; members != null && i < members.length; i++) {
if (addSuspects.containsKey(members[i]) && membership.getMember(members[i]) == null) {
// avoid temporary adding member.
continue;
}
if (membership.memberAlive( (MemberImpl) members[i])) {
//we don't have this one in our membership, check to see if he/she is alive
if (memberAlive(members[i])) {
log.warn("Member added, even though we werent notified:" + members[i]);
super.memberAdded(members[i]);
} else {
membership.removeMember( (MemberImpl) members[i]);
} //end if
} //end if
} //for
//check suspect members if they are still alive,
//if not, simply issue the memberDisappeared message
MemberImpl[] keys = (MemberImpl[]) removeSuspects.keySet().toArray(new MemberImpl[removeSuspects.size()]);
for (int i = 0; i < keys.length; i++) {
MemberImpl m = (MemberImpl) keys[i];
if (membership.getMember(m) != null && (!memberAlive(m))) {
membership.removeMember(m);
super.memberDisappeared(m);
removeSuspects.remove(m);
if(log.isInfoEnabled())
log.info("Suspect member, confirmed dead.["+m+"]");
} //end if
}
//check add suspects members if they are alive now,
//if they are, simply issue the memberAdded message
keys = (MemberImpl[]) addSuspects.keySet().toArray(new MemberImpl[addSuspects.size()]);
for (int i = 0; i < keys.length; i++) {
MemberImpl m = (MemberImpl) keys[i];
if ( membership.getMember(m) == null && (memberAlive(m))) {
membership.memberAlive(m);
super.memberAdded(m);
addSuspects.remove(m);
if(log.isInfoEnabled())
log.info("Suspect member, confirmed alive.["+m+"]");
} //end if
}
}
protected synchronized void setupMembership() {
if ( membership == null ) {
membership = new Membership((MemberImpl)super.getLocalMember(true));
}
}
protected boolean memberAlive(Member mbr) {
return memberAlive(mbr,TCP_FAIL_DETECT,performSendTest,performReadTest,readTestTimeout,connectTimeout,getOptionFlag());
}
protected static boolean memberAlive(Member mbr, byte[] msgData,
boolean sendTest, boolean readTest,
long readTimeout, long conTimeout,
int optionFlag) {
//could be a shutdown notification
if ( Arrays.equals(mbr.getCommand(),Member.SHUTDOWN_PAYLOAD) ) return false;
Socket socket = new Socket();
try {
InetAddress ia = InetAddress.getByAddress(mbr.getHost());
InetSocketAddress addr = new InetSocketAddress(ia, mbr.getPort());
socket.setSoTimeout((int)readTimeout);
socket.connect(addr, (int) conTimeout);
if ( sendTest ) {
ChannelData data = new ChannelData(true);
data.setAddress(mbr);
data.setMessage(new XByteBuffer(msgData,false));
data.setTimestamp(System.currentTimeMillis());
int options = optionFlag | Channel.SEND_OPTIONS_BYTE_MESSAGE;
if ( readTest ) options = (options | Channel.SEND_OPTIONS_USE_ACK);
else options = (options & (~Channel.SEND_OPTIONS_USE_ACK));
data.setOptions(options);
byte[] message = XByteBuffer.createDataPackage(data);
socket.getOutputStream().write(message);
if ( readTest ) {
int length = socket.getInputStream().read(message);
return length > 0;
}
}//end if
return true;
} catch ( SocketTimeoutException sx) {
//do nothing, we couldn't connect
} catch ( ConnectException cx) {
//do nothing, we couldn't connect
}catch (Exception x ) {
log.error("Unable to perform failure detection check, assuming member down.",x);
} finally {
try {socket.close(); } catch ( Exception ignore ){}
}
return false;
}
public boolean getPerformConnectTest() {
return performConnectTest;
}
public long getReadTestTimeout() {
return readTestTimeout;
}
public boolean getPerformSendTest() {
return performSendTest;
}
public boolean getPerformReadTest() {
return performReadTest;
}
public long getConnectTimeout() {
return connectTimeout;
}
public void setPerformConnectTest(boolean performConnectTest) {
this.performConnectTest = performConnectTest;
}
public void setPerformReadTest(boolean performReadTest) {
this.performReadTest = performReadTest;
}
public void setPerformSendTest(boolean performSendTest) {
this.performSendTest = performSendTest;
}
public void setReadTestTimeout(long readTestTimeout) {
this.readTestTimeout = readTestTimeout;
}
public void setConnectTimeout(long connectTimeout) {
this.connectTimeout = connectTimeout;
}
}
|
|
// Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.skyframe;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.google.common.base.Predicates;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import com.google.devtools.build.lib.analysis.util.BuildViewTestCase;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.cmdline.PackageIdentifier;
import com.google.devtools.build.lib.packages.ConstantRuleVisibility;
import com.google.devtools.build.lib.packages.NoSuchTargetException;
import com.google.devtools.build.lib.packages.Preprocessor;
import com.google.devtools.build.lib.packages.util.SubincludePreprocessor;
import com.google.devtools.build.lib.pkgcache.PackageCacheOptions;
import com.google.devtools.build.lib.pkgcache.PathPackageLocator;
import com.google.devtools.build.lib.skyframe.util.SkyframeExecutorTestUtils;
import com.google.devtools.build.lib.testutil.ManualClock;
import com.google.devtools.build.lib.testutil.TestUtils;
import com.google.devtools.build.lib.util.BlazeClock;
import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor;
import com.google.devtools.build.lib.vfs.Dirent;
import com.google.devtools.build.lib.vfs.FileStatus;
import com.google.devtools.build.lib.vfs.FileSystem;
import com.google.devtools.build.lib.vfs.FileSystemUtils;
import com.google.devtools.build.lib.vfs.ModifiedFileSet;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.lib.vfs.RootedPath;
import com.google.devtools.build.lib.vfs.inmemoryfs.InMemoryFileSystem;
import com.google.devtools.build.skyframe.ErrorInfo;
import com.google.devtools.build.skyframe.EvaluationResult;
import com.google.devtools.build.skyframe.RecordingDifferencer;
import com.google.devtools.build.skyframe.SkyKey;
import com.google.devtools.build.skyframe.SkyValue;
import com.google.devtools.common.options.Options;
import java.io.IOException;
import java.util.Collection;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.annotation.Nullable;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Unit tests of specific functionality of PackageFunction. Note that it's already tested
* indirectly in several other places.
*/
@RunWith(JUnit4.class)
public class PackageFunctionTest extends BuildViewTestCase {
private CustomInMemoryFs fs = new CustomInMemoryFs(new ManualClock());
private void preparePackageLoading(Path... roots) {
PackageCacheOptions packageCacheOptions = Options.getDefaults(PackageCacheOptions.class);
packageCacheOptions.defaultVisibility = ConstantRuleVisibility.PUBLIC;
packageCacheOptions.showLoadingProgress = true;
packageCacheOptions.globbingThreads = 7;
getSkyframeExecutor()
.preparePackageLoading(
new PathPackageLocator(outputBase, ImmutableList.copyOf(roots)),
packageCacheOptions,
"",
UUID.randomUUID(),
ImmutableMap.<String, String>of(),
new TimestampGranularityMonitor(BlazeClock.instance()));
}
@Override
protected Preprocessor.Factory.Supplier getPreprocessorFactorySupplier() {
return new SubincludePreprocessor.FactorySupplier();
}
@Override
protected FileSystem createFileSystem() {
return fs;
}
private PackageValue validPackage(SkyKey skyKey) throws InterruptedException {
EvaluationResult<PackageValue> result = SkyframeExecutorTestUtils.evaluate(
getSkyframeExecutor(), skyKey, /*keepGoing=*/false, reporter);
if (result.hasError()) {
fail(result.getError(skyKey).getException().getMessage());
}
PackageValue value = result.get(skyKey);
assertFalse(value.getPackage().containsErrors());
return value;
}
@Test
public void testValidPackage() throws Exception {
scratch.file("pkg/BUILD");
validPackage(PackageValue.key(PackageIdentifier.parse("@//pkg")));
}
@Test
public void testInconsistentNewPackage() throws Exception {
scratch.file("pkg/BUILD", "subinclude('//foo:sub')");
scratch.file("foo/sub");
preparePackageLoading(rootDirectory);
SkyKey pkgLookupKey = PackageLookupValue.key(new PathFragment("foo"));
EvaluationResult<PackageLookupValue> result = SkyframeExecutorTestUtils.evaluate(
getSkyframeExecutor(), pkgLookupKey, /*keepGoing=*/false, reporter);
assertFalse(result.hasError());
assertFalse(result.get(pkgLookupKey).packageExists());
scratch.file("foo/BUILD");
SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//pkg"));
result = SkyframeExecutorTestUtils.evaluate(getSkyframeExecutor(),
skyKey, /*keepGoing=*/false, reporter);
assertTrue(result.hasError());
Throwable exception = result.getError(skyKey).getException();
assertThat(exception.getMessage()).contains("Inconsistent filesystem operations");
assertThat(exception.getMessage()).contains("Unexpected package");
}
@Test
public void testInconsistentMissingPackage() throws Exception {
reporter.removeHandler(failFastHandler);
Path root1 = fs.getPath("/root1");
scratch.file("/root1/WORKSPACE");
scratch.file("/root1/foo/sub");
scratch.file("/root1/pkg/BUILD", "subinclude('//foo:sub')");
Path root2 = fs.getPath("/root2");
scratch.file("/root2/foo/BUILD");
scratch.file("/root2/foo/sub");
preparePackageLoading(root1, root2);
SkyKey pkgLookupKey = PackageLookupValue.key(PackageIdentifier.parse("@//foo"));
EvaluationResult<PackageLookupValue> result = SkyframeExecutorTestUtils.evaluate(
getSkyframeExecutor(), pkgLookupKey, /*keepGoing=*/false, reporter);
assertFalse(result.hasError());
assertEquals(root2, result.get(pkgLookupKey).getRoot());
scratch.file("/root1/foo/BUILD");
SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//pkg"));
result = SkyframeExecutorTestUtils.evaluate(getSkyframeExecutor(),
skyKey, /*keepGoing=*/false, reporter);
assertTrue(result.hasError());
Throwable exception = result.getError(skyKey).getException();
System.out.println("exception: " + exception.getMessage());
assertThat(exception.getMessage()).contains("Inconsistent filesystem operations");
assertThat(exception.getMessage()).contains("Inconsistent package location");
}
@Test
public void testPropagatesFilesystemInconsistencies() throws Exception {
reporter.removeHandler(failFastHandler);
RecordingDifferencer differencer = getSkyframeExecutor().getDifferencerForTesting();
Path pkgRoot = getSkyframeExecutor().getPathEntries().get(0);
Path fooBuildFile = scratch.file("foo/BUILD");
Path fooDir = fooBuildFile.getParentDirectory();
// Our custom filesystem says "foo/BUILD" exists but its parent "foo" is a file.
FileStatus inconsistentParentFileStatus = new FileStatus() {
@Override
public boolean isFile() {
return true;
}
@Override
public boolean isDirectory() {
return false;
}
@Override
public boolean isSymbolicLink() {
return false;
}
@Override
public boolean isSpecialFile() {
return false;
}
@Override
public long getSize() throws IOException {
return 0;
}
@Override
public long getLastModifiedTime() throws IOException {
return 0;
}
@Override
public long getLastChangeTime() throws IOException {
return 0;
}
@Override
public long getNodeId() throws IOException {
return 0;
}
};
fs.stubStat(fooDir, inconsistentParentFileStatus);
RootedPath pkgRootedPath = RootedPath.toRootedPath(pkgRoot, fooDir);
SkyValue fooDirValue = FileStateValue.create(pkgRootedPath, tsgm);
differencer.inject(ImmutableMap.of(FileStateValue.key(pkgRootedPath), fooDirValue));
SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//foo"));
String expectedMessage = "/workspace/foo/BUILD exists but its parent path /workspace/foo isn't "
+ "an existing directory";
EvaluationResult<PackageValue> result = SkyframeExecutorTestUtils.evaluate(
getSkyframeExecutor(), skyKey, /*keepGoing=*/false, reporter);
assertTrue(result.hasError());
ErrorInfo errorInfo = result.getError(skyKey);
String errorMessage = errorInfo.getException().getMessage();
assertThat(errorMessage).contains("Inconsistent filesystem operations");
assertThat(errorMessage).contains(expectedMessage);
}
@Test
public void testPropagatesFilesystemInconsistencies_Globbing() throws Exception {
reporter.removeHandler(failFastHandler);
RecordingDifferencer differencer = getSkyframeExecutor().getDifferencerForTesting();
Path pkgRoot = getSkyframeExecutor().getPathEntries().get(0);
scratch.file("foo/BUILD",
"subinclude('//a:a')",
"sh_library(name = 'foo', srcs = glob(['bar/**/baz.sh']))");
scratch.file("a/BUILD");
scratch.file("a/a");
Path bazFile = scratch.file("foo/bar/baz/baz.sh");
Path bazDir = bazFile.getParentDirectory();
Path barDir = bazDir.getParentDirectory();
long bazFileNodeId = bazFile.stat().getNodeId();
// Our custom filesystem says "foo/bar/baz" does not exist but it also says that "foo/bar"
// has a child directory "baz".
fs.stubStat(bazDir, null);
RootedPath barDirRootedPath = RootedPath.toRootedPath(pkgRoot, barDir);
FileStateValue barDirFileStateValue = FileStateValue.create(barDirRootedPath, tsgm);
FileValue barDirFileValue = FileValue.value(barDirRootedPath, barDirFileStateValue,
barDirRootedPath, barDirFileStateValue);
DirectoryListingValue barDirListing = DirectoryListingValue.value(barDirRootedPath,
barDirFileValue, DirectoryListingStateValue.create(ImmutableList.of(
new Dirent("baz", Dirent.Type.DIRECTORY))));
differencer.inject(ImmutableMap.of(DirectoryListingValue.key(barDirRootedPath), barDirListing));
SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//foo"));
String expectedMessage = "Some filesystem operations implied /workspace/foo/bar/baz/baz.sh was "
+ "a regular file with size of 0 and mtime of 0 and nodeId of " + bazFileNodeId + " and "
+ "mtime of 0 but others made us think it was a nonexistent path";
EvaluationResult<PackageValue> result = SkyframeExecutorTestUtils.evaluate(
getSkyframeExecutor(), skyKey, /*keepGoing=*/false, reporter);
assertTrue(result.hasError());
ErrorInfo errorInfo = result.getError(skyKey);
String errorMessage = errorInfo.getException().getMessage();
assertThat(errorMessage).contains("Inconsistent filesystem operations");
assertThat(errorMessage).contains(expectedMessage);
}
/** Regression test for unexpected exception type from PackageValue. */
@Test
public void testDiscrepancyBetweenLegacyAndSkyframePackageLoadingErrors() throws Exception {
reporter.removeHandler(failFastHandler);
Path fooBuildFile = scratch.file("foo/BUILD",
"sh_library(name = 'foo', srcs = glob(['bar/*.sh']))");
Path fooDir = fooBuildFile.getParentDirectory();
Path barDir = fooDir.getRelative("bar");
scratch.file("foo/bar/baz.sh");
fs.scheduleMakeUnreadableAfterReaddir(barDir);
SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//foo"));
String expectedMessage = "Encountered error 'Directory is not readable'";
EvaluationResult<PackageValue> result = SkyframeExecutorTestUtils.evaluate(
getSkyframeExecutor(), skyKey, /*keepGoing=*/false, reporter);
assertTrue(result.hasError());
ErrorInfo errorInfo = result.getError(skyKey);
String errorMessage = errorInfo.getException().getMessage();
assertThat(errorMessage).contains("Inconsistent filesystem operations");
assertThat(errorMessage).contains(expectedMessage);
}
@Test
public void testMultipleSubincludesFromSamePackage() throws Exception {
scratch.file("foo/BUILD",
"subinclude('//bar:a')",
"subinclude('//bar:b')");
scratch.file("bar/BUILD",
"exports_files(['a', 'b'])");
scratch.file("bar/a");
scratch.file("bar/b");
preparePackageLoading(rootDirectory);
SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//foo"));
validPackage(skyKey);
}
@Test
public void testTransitiveSubincludesStoredInPackage() throws Exception {
scratch.file("foo/BUILD",
"subinclude('//bar:a')");
scratch.file("bar/BUILD",
"exports_files(['a'])");
scratch.file("bar/a",
"subinclude('//baz:b')");
scratch.file("baz/BUILD",
"exports_files(['b', 'c'])");
scratch.file("baz/b");
scratch.file("baz/c");
preparePackageLoading(rootDirectory);
SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//foo"));
PackageValue value = validPackage(skyKey);
assertThat(value.getPackage().getSubincludeLabels()).containsExactly(
Label.parseAbsolute("//bar:a"), Label.parseAbsolute("//baz:b"));
scratch.overwriteFile("bar/a",
"subinclude('//baz:c')");
getSkyframeExecutor().invalidateFilesUnderPathForTesting(reporter,
ModifiedFileSet.builder().modify(new PathFragment("bar/a")).build(), rootDirectory);
value = validPackage(skyKey);
assertThat(value.getPackage().getSubincludeLabels()).containsExactly(
Label.parseAbsolute("//bar:a"), Label.parseAbsolute("//baz:c"));
}
@SuppressWarnings("unchecked") // Cast of srcs attribute to Iterable<Label>.
@Test
public void testGlobOrderStable() throws Exception {
scratch.file("foo/BUILD", "sh_library(name = 'foo', srcs = glob(['**/*.txt']))");
scratch.file("foo/b.txt");
scratch.file("foo/c/c.txt");
preparePackageLoading(rootDirectory);
SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//foo"));
PackageValue value = validPackage(skyKey);
assertThat(
(Iterable<Label>)
value
.getPackage()
.getTarget("foo")
.getAssociatedRule()
.getAttributeContainer()
.getAttr("srcs"))
.containsExactly(
Label.parseAbsoluteUnchecked("//foo:b.txt"),
Label.parseAbsoluteUnchecked("//foo:c/c.txt"))
.inOrder();
scratch.file("foo/d.txt");
getSkyframeExecutor()
.invalidateFilesUnderPathForTesting(
reporter,
ModifiedFileSet.builder().modify(new PathFragment("foo/d.txt")).build(),
rootDirectory);
value = validPackage(skyKey);
assertThat(
(Iterable<Label>)
value
.getPackage()
.getTarget("foo")
.getAssociatedRule()
.getAttributeContainer()
.getAttr("srcs"))
.containsExactly(
Label.parseAbsoluteUnchecked("//foo:b.txt"),
Label.parseAbsoluteUnchecked("//foo:c/c.txt"),
Label.parseAbsoluteUnchecked("//foo:d.txt"))
.inOrder();
}
@SuppressWarnings("unchecked") // Cast of srcs attribute to Iterable<Label>.
@Test
public void testGlobOrderStableWithLegacyAndSkyframeComponents() throws Exception {
scratch.file("foo/BUILD", "sh_library(name = 'foo', srcs = glob(['*.txt']))");
scratch.file("foo/b.txt");
scratch.file("foo/a.config");
preparePackageLoading(rootDirectory);
SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//foo"));
PackageValue value = validPackage(skyKey);
assertThat(
(Iterable<Label>)
value
.getPackage()
.getTarget("foo")
.getAssociatedRule()
.getAttributeContainer()
.getAttr("srcs"))
.containsExactly(Label.parseAbsoluteUnchecked("//foo:b.txt"));
scratch.overwriteFile(
"foo/BUILD", "sh_library(name = 'foo', srcs = glob(['*.txt', '*.config']))");
getSkyframeExecutor()
.invalidateFilesUnderPathForTesting(
reporter,
ModifiedFileSet.builder().modify(new PathFragment("foo/BUILD")).build(),
rootDirectory);
value = validPackage(skyKey);
assertThat(
(Iterable<Label>)
value
.getPackage()
.getTarget("foo")
.getAssociatedRule()
.getAttributeContainer()
.getAttr("srcs"))
.containsExactly(
Label.parseAbsoluteUnchecked("//foo:a.config"),
Label.parseAbsoluteUnchecked("//foo:b.txt"))
.inOrder();
scratch.overwriteFile(
"foo/BUILD", "sh_library(name = 'foo', srcs = glob(['*.txt', '*.config'])) # comment");
getSkyframeExecutor()
.invalidateFilesUnderPathForTesting(
reporter,
ModifiedFileSet.builder().modify(new PathFragment("foo/BUILD")).build(),
rootDirectory);
value = validPackage(skyKey);
assertThat(
(Iterable<Label>)
value
.getPackage()
.getTarget("foo")
.getAssociatedRule()
.getAttributeContainer()
.getAttr("srcs"))
.containsExactly(
Label.parseAbsoluteUnchecked("//foo:a.config"),
Label.parseAbsoluteUnchecked("//foo:b.txt"))
.inOrder();
getSkyframeExecutor().resetEvaluator();
PackageCacheOptions packageCacheOptions = Options.getDefaults(PackageCacheOptions.class);
packageCacheOptions.defaultVisibility = ConstantRuleVisibility.PUBLIC;
packageCacheOptions.showLoadingProgress = true;
packageCacheOptions.globbingThreads = 7;
getSkyframeExecutor()
.preparePackageLoading(
new PathPackageLocator(outputBase, ImmutableList.<Path>of(rootDirectory)),
packageCacheOptions,
"",
UUID.randomUUID(),
ImmutableMap.<String, String>of(),
tsgm);
value = validPackage(skyKey);
assertThat(
(Iterable<Label>)
value
.getPackage()
.getTarget("foo")
.getAssociatedRule()
.getAttributeContainer()
.getAttr("srcs"))
.containsExactly(
Label.parseAbsoluteUnchecked("//foo:a.config"),
Label.parseAbsoluteUnchecked("//foo:b.txt"))
.inOrder();
}
@Test
public void testIncludeInMainAndDefaultRepository() throws Exception {
scratch.file("foo/BUILD",
"subinclude('//baz:a')");
scratch.file("bar/BUILD",
"subinclude('@//baz:a')");
scratch.file("baz/BUILD",
"exports_files(['a'])");
scratch.file("baz/a");
preparePackageLoading(rootDirectory);
SkyKey fooKey = PackageValue.key(PackageIdentifier.parse("@//foo"));
PackageValue fooValue = validPackage(fooKey);
assertThat(fooValue.getPackage().getSubincludeLabels()).containsExactly(
Label.parseAbsolute("//baz:a"));
SkyKey barKey = PackageValue.key(PackageIdentifier.parse("@//bar"));
PackageValue barValue = validPackage(barKey);
assertThat(barValue.getPackage().getSubincludeLabels()).containsExactly(
Label.parseAbsolute("@//baz:a"));
}
@Test
public void testTransitiveSkylarkDepsStoredInPackage() throws Exception {
scratch.file("foo/BUILD",
"load('/bar/ext', 'a')");
scratch.file("bar/BUILD");
scratch.file("bar/ext.bzl",
"load('/baz/ext', 'b')",
"a = b");
scratch.file("baz/BUILD");
scratch.file("baz/ext.bzl",
"b = 1");
scratch.file("qux/BUILD");
scratch.file("qux/ext.bzl",
"c = 1");
preparePackageLoading(rootDirectory);
SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//foo"));
PackageValue value = validPackage(skyKey);
assertThat(value.getPackage().getSkylarkFileDependencies()).containsExactly(
Label.parseAbsolute("//bar:ext.bzl"), Label.parseAbsolute("//baz:ext.bzl"));
scratch.overwriteFile("bar/ext.bzl",
"load('/qux/ext', 'c')",
"a = c");
getSkyframeExecutor().invalidateFilesUnderPathForTesting(reporter,
ModifiedFileSet.builder().modify(new PathFragment("bar/ext.bzl")).build(), rootDirectory);
value = validPackage(skyKey);
assertThat(value.getPackage().getSkylarkFileDependencies()).containsExactly(
Label.parseAbsolute("//bar:ext.bzl"), Label.parseAbsolute("//qux:ext.bzl"));
}
@Test
public void testNonExistingSkylarkExtension() throws Exception {
reporter.removeHandler(failFastHandler);
scratch.file("test/skylark/BUILD",
"load('/test/skylark/bad_extension', 'some_symbol')",
"genrule(name = gr,",
" outs = ['out.txt'],",
" cmd = 'echo hello >@')");
invalidatePackages();
SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//test/skylark"));
EvaluationResult<PackageValue> result = SkyframeExecutorTestUtils.evaluate(
getSkyframeExecutor(), skyKey, /*keepGoing=*/false, reporter);
assertTrue(result.hasError());
ErrorInfo errorInfo = result.getError(skyKey);
String expectedMsg = "error loading package 'test/skylark': "
+ "Extension file not found. Unable to load file '//test/skylark:bad_extension.bzl': "
+ "file doesn't exist or isn't a file";
assertThat(errorInfo.getException())
.hasMessage(expectedMsg);
}
@Test
public void testNonExistingSkylarkExtensionWithPythonPreprocessing() throws Exception {
reporter.removeHandler(failFastHandler);
scratch.file("foo/BUILD",
"exports_files(['a'])");
scratch.file("foo/a",
"load('/test/skylark/bad_extension', 'some_symbol')");
scratch.file("test/skylark/BUILD",
"subinclude('//foo:a')");
invalidatePackages();
SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//test/skylark"));
EvaluationResult<PackageValue> result = SkyframeExecutorTestUtils.evaluate(
getSkyframeExecutor(), skyKey, /*keepGoing=*/false, reporter);
assertTrue(result.hasError());
ErrorInfo errorInfo = result.getError(skyKey);
String expectedMsg = "error loading package 'test/skylark': "
+ "Extension file not found. Unable to load file '//test/skylark:bad_extension.bzl': "
+ "file doesn't exist or isn't a file";
assertThat(errorInfo.getException())
.hasMessage(expectedMsg);
}
@Test
public void testNonExistingSkylarkExtensionFromExtension() throws Exception {
reporter.removeHandler(failFastHandler);
scratch.file("test/skylark/extension.bzl",
"load('/test/skylark/bad_extension', 'some_symbol')",
"a = 'a'");
scratch.file("test/skylark/BUILD",
"load('/test/skylark/extension', 'a')",
"genrule(name = gr,",
" outs = ['out.txt'],",
" cmd = 'echo hello >@')");
invalidatePackages();
SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//test/skylark"));
EvaluationResult<PackageValue> result = SkyframeExecutorTestUtils.evaluate(
getSkyframeExecutor(), skyKey, /*keepGoing=*/false, reporter);
assertTrue(result.hasError());
ErrorInfo errorInfo = result.getError(skyKey);
assertThat(errorInfo.getException())
.hasMessage("error loading package 'test/skylark': Extension file not found. "
+ "Unable to load file '//test/skylark:bad_extension.bzl': "
+ "file doesn't exist or isn't a file");
}
@Test
public void testSymlinkCycleWithSkylarkExtension() throws Exception {
reporter.removeHandler(failFastHandler);
Path extensionFilePath = scratch.resolve("/workspace/test/skylark/extension.bzl");
FileSystemUtils.ensureSymbolicLink(extensionFilePath, new PathFragment("extension.bzl"));
scratch.file("test/skylark/BUILD",
"load('/test/skylark/extension', 'a')",
"genrule(name = gr,",
" outs = ['out.txt'],",
" cmd = 'echo hello >@')");
invalidatePackages();
SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//test/skylark"));
EvaluationResult<PackageValue> result = SkyframeExecutorTestUtils.evaluate(
getSkyframeExecutor(), skyKey, /*keepGoing=*/false, reporter);
assertTrue(result.hasError());
ErrorInfo errorInfo = result.getError(skyKey);
assertEquals(skyKey, errorInfo.getRootCauseOfException());
assertThat(errorInfo.getException())
.hasMessage(
"error loading package 'test/skylark': Encountered error while reading extension "
+ "file 'test/skylark/extension.bzl': Symlink cycle");
}
@Test
public void testIOErrorLookingForSubpackageForLabelIsHandled() throws Exception {
reporter.removeHandler(failFastHandler);
scratch.file("foo/BUILD",
"sh_library(name = 'foo', srcs = ['bar/baz.sh'])");
Path barBuildFile = scratch.file("foo/bar/BUILD");
fs.stubStatError(barBuildFile, new IOException("nope"));
SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//foo"));
EvaluationResult<PackageValue> result = SkyframeExecutorTestUtils.evaluate(
getSkyframeExecutor(), skyKey, /*keepGoing=*/false, reporter);
assertTrue(result.hasError());
assertContainsEvent("nope");
}
@Test
public void testLoadRelativePath() throws Exception {
scratch.file("pkg/BUILD", "load('ext', 'a')");
scratch.file("pkg/ext.bzl", "a = 1");
validPackage(PackageValue.key(PackageIdentifier.parse("@//pkg")));
}
@Test
public void testLoadAbsolutePath() throws Exception {
scratch.file("pkg1/BUILD");
scratch.file("pkg2/BUILD",
"load('/pkg1/ext', 'a')");
scratch.file("pkg1/ext.bzl", "a = 1");
validPackage(PackageValue.key(PackageIdentifier.parse("@//pkg2")));
}
@Test
public void testBadWorkspaceFile() throws Exception {
Path workspacePath = scratch.overwriteFile("WORKSPACE", "junk");
SkyKey skyKey = PackageValue.key(PackageIdentifier.createInMainRepo("external"));
getSkyframeExecutor()
.invalidate(
Predicates.equalTo(
FileStateValue.key(
RootedPath.toRootedPath(
workspacePath.getParentDirectory(),
new PathFragment(workspacePath.getBaseName())))));
reporter.removeHandler(failFastHandler);
EvaluationResult<PackageValue> result =
SkyframeExecutorTestUtils.evaluate(
getSkyframeExecutor(), skyKey, /*keepGoing=*/ false, reporter);
assertFalse(result.hasError());
assertTrue(result.get(skyKey).getPackage().containsErrors());
}
// Regression test for the two ugly consequences of a bug where GlobFunction incorrectly matched
// dangling symlinks.
@Test
public void testIncrementalSkyframeHybridGlobbingOnDanglingSymlink() throws Exception {
Path packageDirPath = scratch.file("foo/BUILD",
"exports_files(glob(['*.txt']))").getParentDirectory();
scratch.file("foo/existing.txt");
FileSystemUtils.ensureSymbolicLink(packageDirPath.getChild("dangling.txt"), "nope");
preparePackageLoading(rootDirectory);
SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//foo"));
PackageValue value = validPackage(skyKey);
assertFalse(value.getPackage().containsErrors());
assertThat(value.getPackage().getTarget("existing.txt").getName()).isEqualTo("existing.txt");
try {
value.getPackage().getTarget("dangling.txt");
fail();
} catch (NoSuchTargetException expected) {
}
scratch.overwriteFile("foo/BUILD",
"exports_files(glob(['*.txt'])),",
"#some-irrelevant-comment");
getSkyframeExecutor().invalidateFilesUnderPathForTesting(reporter,
ModifiedFileSet.builder().modify(new PathFragment("foo/BUILD")).build(), rootDirectory);
value = validPackage(skyKey);
assertFalse(value.getPackage().containsErrors());
assertThat(value.getPackage().getTarget("existing.txt").getName()).isEqualTo("existing.txt");
try {
value.getPackage().getTarget("dangling.txt");
fail();
} catch (NoSuchTargetException expected) {
// One consequence of the bug was that dangling symlinks were matched by globs evaluated by
// Skyframe globbing, meaning there would incorrectly be corresponding targets in packages
// that had skyframe cache hits during skyframe hybrid globbing.
}
scratch.file("foo/nope");
getSkyframeExecutor().invalidateFilesUnderPathForTesting(reporter,
ModifiedFileSet.builder().modify(new PathFragment("foo/nope")).build(), rootDirectory);
PackageValue newValue = validPackage(skyKey);
assertFalse(newValue.getPackage().containsErrors());
assertThat(newValue.getPackage().getTarget("existing.txt").getName()).isEqualTo("existing.txt");
// Another consequence of the bug is that change pruning would incorrectly cut off changes that
// caused a dangling symlink potentially matched by a glob to come into existence.
assertThat(newValue.getPackage().getTarget("dangling.txt").getName()).isEqualTo("dangling.txt");
assertThat(newValue.getPackage()).isNotSameAs(value.getPackage());
}
// Regression test for Skyframe globbing incorrectly matching the package's directory path on
// 'glob(['**'], exclude_directories = 0)'. We test for this directly by triggering
// hybrid globbing (gives coverage for both legacy globbing and skyframe globbing).
@Test
public void testRecursiveGlobNeverMatchesPackageDirectory() throws Exception {
scratch.file("foo/BUILD",
"[sh_library(name = x + '-matched') for x in glob(['**'], exclude_directories = 0)]");
scratch.file("foo/bar");
preparePackageLoading(rootDirectory);
SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//foo"));
PackageValue value = validPackage(skyKey);
assertFalse(value.getPackage().containsErrors());
assertThat(value.getPackage().getTarget("bar-matched").getName()).isEqualTo("bar-matched");
try {
value.getPackage().getTarget("-matched");
fail();
} catch (NoSuchTargetException expected) {
}
scratch.overwriteFile("foo/BUILD",
"[sh_library(name = x + '-matched') for x in glob(['**'], exclude_directories = 0)]",
"#some-irrelevant-comment");
getSkyframeExecutor().invalidateFilesUnderPathForTesting(reporter,
ModifiedFileSet.builder().modify(new PathFragment("foo/BUILD")).build(), rootDirectory);
value = validPackage(skyKey);
assertFalse(value.getPackage().containsErrors());
assertThat(value.getPackage().getTarget("bar-matched").getName()).isEqualTo("bar-matched");
try {
value.getPackage().getTarget("-matched");
fail();
} catch (NoSuchTargetException expected) {
}
}
@Test
public void testGlobsHappenInParallel() throws Exception {
scratch.file(
"foo/BUILD",
"load('//foo:my_library.bzl', 'my_library')",
"[sh_library(name = x + '-matched') for x in glob(['bar/*'], exclude_directories = 0)]",
"cc_library(name = 'cc', srcs = glob(['cc/*']))",
"my_library(name = 'my', srcs = glob(['sh/*']))");
scratch.file(
"foo/my_library.bzl",
"def my_library(name = None, srcs = []):",
" native.sh_library(name = name, srcs = srcs, deps = native.glob(['inner/*']))");
scratch.file("foo/bar/1");
Path barPath = scratch.file("foo/bar/2").getParentDirectory();
Path ccPath = scratch.file("foo/cc/src.file").getParentDirectory();
Path shPath = scratch.dir("foo/sh");
Path innerPath = scratch.dir("foo/inner");
PackageCacheOptions packageCacheOptions = Options.getDefaults(PackageCacheOptions.class);
packageCacheOptions.defaultVisibility = ConstantRuleVisibility.PUBLIC;
packageCacheOptions.showLoadingProgress = true;
packageCacheOptions.globbingThreads = 7;
packageCacheOptions.maxDirectoriesToEagerlyVisitInGlobbing = 10;
getSkyframeExecutor()
.preparePackageLoading(
new PathPackageLocator(outputBase, ImmutableList.of(rootDirectory)),
packageCacheOptions,
"",
UUID.randomUUID(),
ImmutableMap.<String, String>of(),
new TimestampGranularityMonitor(BlazeClock.instance()));
SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//foo"));
final CountDownLatch allDirsRequested = new CountDownLatch(4);
Listener synchronizeListener =
new Listener() {
@Override
public Object accept(Path path, FileOp op, Order order) throws IOException {
if (op == FileOp.READDIR && order == Order.BEFORE) {
allDirsRequested.countDown();
try {
assertThat(
allDirsRequested.await(
TestUtils.WAIT_TIMEOUT_MILLISECONDS, TimeUnit.MILLISECONDS))
.isTrue();
} catch (InterruptedException e) {
throw new IllegalStateException(e);
}
}
return NO_RESULT_MARKER;
}
};
fs.setCustomOverride(barPath, synchronizeListener);
fs.setCustomOverride(ccPath, synchronizeListener);
fs.setCustomOverride(shPath, synchronizeListener);
fs.setCustomOverride(innerPath, synchronizeListener);
PackageValue value = validPackage(skyKey);
assertFalse(value.getPackage().containsErrors());
assertThat(value.getPackage().getTarget("bar/1-matched").getName()).isEqualTo("bar/1-matched");
assertThat(value.getPackage().getTarget("cc/src.file")).isNotNull();
assertThat(
(Iterable<?>)
value
.getPackage()
.getTarget("my")
.getAssociatedRule()
.getAttributeContainer()
.getAttr("srcs"))
.isEmpty();
}
@Test
public void testGlobsDontHappenInParallel() throws Exception {
scratch.file(
"foo/BUILD",
"load('//foo:my_library.bzl', 'my_library')",
"[sh_library(name = x + '-matched') for x in glob(['bar/*'], exclude_directories = 0)]",
"cc_library(name = 'cc', srcs = glob(['cc/*']))",
"my_library(name = 'my', srcs = glob(['sh/*']))");
scratch.file(
"foo/my_library.bzl",
"def my_library(name = None, srcs = []):",
" native.sh_library(name = name, srcs = srcs, deps = native.glob(['inner/*']))");
scratch.file("foo/bar/1");
Path barPath = scratch.file("foo/bar/2").getParentDirectory();
Path ccPath = scratch.file("foo/cc/src.file").getParentDirectory();
Path shPath = scratch.dir("foo/sh");
Path innerPath = scratch.dir("foo/inner");
PackageCacheOptions packageCacheOptions = Options.getDefaults(PackageCacheOptions.class);
packageCacheOptions.defaultVisibility = ConstantRuleVisibility.PUBLIC;
packageCacheOptions.showLoadingProgress = true;
packageCacheOptions.globbingThreads = 7;
packageCacheOptions.maxDirectoriesToEagerlyVisitInGlobbing = -1;
getSkyframeExecutor()
.preparePackageLoading(
new PathPackageLocator(outputBase, ImmutableList.of(rootDirectory)),
packageCacheOptions,
"",
UUID.randomUUID(),
ImmutableMap.<String, String>of(),
new TimestampGranularityMonitor(BlazeClock.instance()));
SkyKey skyKey = PackageValue.key(PackageIdentifier.parse("@//foo"));
final AtomicBoolean atLeastOneUnfinishedRequest = new AtomicBoolean(false);
final CountDownLatch allDirsRequested = new CountDownLatch(4);
Listener synchronizeListener =
new Listener() {
@Override
public Object accept(Path path, FileOp op, Order order) throws IOException {
if (op == FileOp.READDIR && order == Order.BEFORE) {
allDirsRequested.countDown();
try {
if (!allDirsRequested.await(1, TimeUnit.SECONDS)) {
atLeastOneUnfinishedRequest.set(true);
}
} catch (InterruptedException e) {
throw new IllegalStateException(e);
}
}
return NO_RESULT_MARKER;
}
};
fs.setCustomOverride(barPath, synchronizeListener);
fs.setCustomOverride(ccPath, synchronizeListener);
fs.setCustomOverride(shPath, synchronizeListener);
fs.setCustomOverride(innerPath, synchronizeListener);
PackageValue value = validPackage(skyKey);
assertFalse(value.getPackage().containsErrors());
assertThat(value.getPackage().getTarget("bar/1-matched").getName()).isEqualTo("bar/1-matched");
assertThat(value.getPackage().getTarget("cc/src.file")).isNotNull();
assertThat(
(Iterable<?>)
value
.getPackage()
.getTarget("my")
.getAssociatedRule()
.getAttributeContainer()
.getAttr("srcs"))
.isEmpty();
assertThat(atLeastOneUnfinishedRequest.get()).isTrue();
}
private static class CustomInMemoryFs extends InMemoryFileSystem {
private final Map<Path, Listener> customOverrides = Maps.newHashMap();
public CustomInMemoryFs(ManualClock manualClock) {
super(manualClock);
}
public void stubStat(final Path targetPath, @Nullable final FileStatus stubbedResult) {
setCustomOverride(
targetPath,
new Listener() {
@Override
public Object accept(Path path, FileOp op, Order order) {
if (targetPath.equals(path) && op == FileOp.STAT && order == Order.BEFORE) {
return stubbedResult;
} else {
return NO_RESULT_MARKER;
}
}
});
}
public void stubStatError(final Path targetPath, final IOException stubbedResult) {
setCustomOverride(
targetPath,
new Listener() {
@Override
public Object accept(Path path, FileOp op, Order order) throws IOException {
if (targetPath.equals(path) && op == FileOp.STAT && order == Order.BEFORE) {
throw stubbedResult;
} else {
return NO_RESULT_MARKER;
}
}
});
}
void setCustomOverride(Path path, Listener listener) {
customOverrides.put(path, listener);
}
@Override
public FileStatus stat(Path path, boolean followSymlinks) throws IOException {
Listener listener = customOverrides.get(path);
if (listener != null) {
Object status = listener.accept(path, FileOp.STAT, Order.BEFORE);
if (status != NO_RESULT_MARKER) {
return (FileStatus) status;
}
}
FileStatus fileStatus = super.stat(path, followSymlinks);
if (listener != null) {
Object status = listener.accept(path, FileOp.STAT, Order.AFTER);
if (status != NO_RESULT_MARKER) {
return (FileStatus) status;
}
}
return fileStatus;
}
public void scheduleMakeUnreadableAfterReaddir(final Path targetPath) {
setCustomOverride(
targetPath,
new Listener() {
@Override
public Object accept(Path path, FileOp op, Order order) throws IOException {
if (targetPath.equals(path) && op == FileOp.READDIR && order == Order.AFTER) {
targetPath.setReadable(false);
}
return NO_RESULT_MARKER;
}
});
}
@SuppressWarnings("unchecked")
@Override
public Collection<Dirent> readdir(Path path, boolean followSymlinks) throws IOException {
Listener listener = customOverrides.get(path);
if (listener != null) {
Object status = listener.accept(path, FileOp.READDIR, Order.BEFORE);
if (status != NO_RESULT_MARKER) {
return (Collection<Dirent>) status;
}
}
Collection<Dirent> result = super.readdir(path, followSymlinks);
if (listener != null) {
Object status = listener.accept(path, FileOp.READDIR, Order.AFTER);
if (status != NO_RESULT_MARKER) {
return (Collection<Dirent>) status;
}
}
return result;
}
}
private static final Object NO_RESULT_MARKER = new Object();
private enum Order {
BEFORE,
AFTER
}
private enum FileOp {
STAT,
READDIR
}
private interface Listener {
Object accept(Path path, FileOp op, Order order) throws IOException;
}
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.hive;
import com.facebook.presto.hive.InternalHiveSplit.InternalHiveBlock;
import com.facebook.presto.spi.ConnectorSplit;
import com.facebook.presto.spi.ConnectorSplitSource;
import com.facebook.presto.spi.HostAddress;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.predicate.TupleDomain;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.util.concurrent.SettableFuture;
import io.airlift.stats.CounterStat;
import io.airlift.units.DataSize;
import org.testng.annotations.Test;
import java.util.List;
import java.util.OptionalInt;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import static com.facebook.presto.hive.HiveTestUtils.SESSION;
import static com.facebook.presto.spi.connector.NotPartitionedPartitionHandle.NOT_PARTITIONED;
import static io.airlift.concurrent.MoreFutures.getFutureValue;
import static io.airlift.testing.Assertions.assertContains;
import static io.airlift.units.DataSize.Unit.MEGABYTE;
import static java.lang.Math.toIntExact;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
public class TestHiveSplitSource
{
@Test
public void testOutstandingSplitCount()
{
HiveSplitSource hiveSplitSource = HiveSplitSource.allAtOnce(
SESSION,
"database",
"table",
TupleDomain.all(),
10,
10,
new DataSize(1, MEGABYTE),
new TestingHiveSplitLoader(),
Executors.newFixedThreadPool(5),
new CounterStat());
// add 10 splits
for (int i = 0; i < 10; i++) {
hiveSplitSource.addToQueue(new TestSplit(i));
assertEquals(hiveSplitSource.getBufferedInternalSplitCount(), i + 1);
}
// remove 1 split
assertEquals(getSplits(hiveSplitSource, 1).size(), 1);
assertEquals(hiveSplitSource.getBufferedInternalSplitCount(), 9);
// remove 4 splits
assertEquals(getSplits(hiveSplitSource, 4).size(), 4);
assertEquals(hiveSplitSource.getBufferedInternalSplitCount(), 5);
// try to remove 20 splits, and verify we only got 5
assertEquals(getSplits(hiveSplitSource, 20).size(), 5);
assertEquals(hiveSplitSource.getBufferedInternalSplitCount(), 0);
}
@Test
public void testFail()
{
HiveSplitSource hiveSplitSource = HiveSplitSource.allAtOnce(
SESSION,
"database",
"table",
TupleDomain.all(),
10,
10,
new DataSize(1, MEGABYTE),
new TestingHiveSplitLoader(),
Executors.newFixedThreadPool(5),
new CounterStat());
// add some splits
for (int i = 0; i < 5; i++) {
hiveSplitSource.addToQueue(new TestSplit(i));
assertEquals(hiveSplitSource.getBufferedInternalSplitCount(), i + 1);
}
// remove a split and verify
assertEquals(getSplits(hiveSplitSource, 1).size(), 1);
assertEquals(hiveSplitSource.getBufferedInternalSplitCount(), 4);
// fail source
hiveSplitSource.fail(new RuntimeException("test"));
assertEquals(hiveSplitSource.getBufferedInternalSplitCount(), 4);
// try to remove a split and verify we got the expected exception
try {
getSplits(hiveSplitSource, 1);
fail("expected RuntimeException");
}
catch (RuntimeException e) {
assertEquals(e.getMessage(), "test");
}
assertEquals(hiveSplitSource.getBufferedInternalSplitCount(), 4); // 3 splits + poison
// attempt to add another split and verify it does not work
hiveSplitSource.addToQueue(new TestSplit(99));
assertEquals(hiveSplitSource.getBufferedInternalSplitCount(), 4); // 3 splits + poison
// fail source again
hiveSplitSource.fail(new RuntimeException("another failure"));
assertEquals(hiveSplitSource.getBufferedInternalSplitCount(), 4); // 3 splits + poison
// try to remove a split and verify we got the first exception
try {
getSplits(hiveSplitSource, 1);
fail("expected RuntimeException");
}
catch (RuntimeException e) {
assertEquals(e.getMessage(), "test");
}
}
@Test
public void testReaderWaitsForSplits()
throws Exception
{
final HiveSplitSource hiveSplitSource = HiveSplitSource.allAtOnce(
SESSION,
"database",
"table",
TupleDomain.all(),
10,
10,
new DataSize(1, MEGABYTE),
new TestingHiveSplitLoader(),
Executors.newFixedThreadPool(5),
new CounterStat());
final SettableFuture<ConnectorSplit> splits = SettableFuture.create();
// create a thread that will get a split
final CountDownLatch started = new CountDownLatch(1);
Thread getterThread = new Thread(new Runnable()
{
@Override
public void run()
{
try {
started.countDown();
List<ConnectorSplit> batch = getSplits(hiveSplitSource, 1);
assertEquals(batch.size(), 1);
splits.set(batch.get(0));
}
catch (Throwable e) {
splits.setException(e);
}
}
});
getterThread.start();
try {
// wait for the thread to be started
assertTrue(started.await(1, TimeUnit.SECONDS));
// sleep for a bit, and assure the thread is blocked
TimeUnit.MILLISECONDS.sleep(200);
assertTrue(!splits.isDone());
// add a split
hiveSplitSource.addToQueue(new TestSplit(33));
// wait for thread to get the split
ConnectorSplit split = splits.get(800, TimeUnit.MILLISECONDS);
assertEquals(((HiveSplit) split).getSchema().getProperty("id"), "33");
}
finally {
// make sure the thread exits
getterThread.interrupt();
}
}
@Test(enabled = false)
public void testOutstandingSplitSize()
{
DataSize maxOutstandingSplitsSize = new DataSize(1, MEGABYTE);
HiveSplitSource hiveSplitSource = HiveSplitSource.allAtOnce(
SESSION,
"database",
"table",
TupleDomain.all(),
10,
10000,
maxOutstandingSplitsSize,
new TestingHiveSplitLoader(),
Executors.newFixedThreadPool(5),
new CounterStat());
InternalHiveSplit testSplit = new InternalHiveSplit(
"partition-name",
"path",
0,
100,
100,
new Properties(),
ImmutableList.of(new HivePartitionKey("pk_col", "pk_value")),
ImmutableList.of(new InternalHiveBlock(0, 100, ImmutableList.of(HostAddress.fromString("localhost")))),
OptionalInt.empty(),
true,
false,
ImmutableMap.of());
int testSplitSizeInBytes = testSplit.getEstimatedSizeInBytes();
int maxSplitCount = toIntExact(maxOutstandingSplitsSize.toBytes()) / testSplitSizeInBytes;
for (int i = 0; i < maxSplitCount; i++) {
hiveSplitSource.addToQueue(testSplit);
assertEquals(hiveSplitSource.getBufferedInternalSplitCount(), i + 1);
}
assertEquals(getSplits(hiveSplitSource, maxSplitCount).size(), maxSplitCount);
for (int i = 0; i < maxSplitCount; i++) {
hiveSplitSource.addToQueue(testSplit);
assertEquals(hiveSplitSource.getBufferedInternalSplitCount(), i + 1);
}
try {
hiveSplitSource.addToQueue(testSplit);
fail("expect failure");
}
catch (PrestoException e) {
assertContains(e.getMessage(), "Split buffering for database.table exceeded memory limit");
}
}
private static List<ConnectorSplit> getSplits(ConnectorSplitSource source, int maxSize)
{
return getFutureValue(source.getNextBatch(NOT_PARTITIONED, maxSize)).getSplits();
}
private static class TestingHiveSplitLoader
implements HiveSplitLoader
{
@Override
public void start(HiveSplitSource splitSource)
{
}
@Override
public void stop()
{
}
}
private static class TestSplit
extends InternalHiveSplit
{
private TestSplit(int id)
{
super(
"partition-name",
"path",
0,
100,
100,
properties("id", String.valueOf(id)),
ImmutableList.of(),
ImmutableList.of(new InternalHiveBlock(0, 100, ImmutableList.of())),
OptionalInt.empty(),
true,
false,
ImmutableMap.of());
}
private static Properties properties(String key, String value)
{
Properties properties = new Properties();
properties.put(key, value);
return properties;
}
}
}
|
|
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2018 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.metainject;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.RowSet;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.exception.KettleValueException;
import org.pentaho.di.core.injection.bean.BeanInjectionInfo;
import org.pentaho.di.core.injection.bean.BeanInjector;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.repository.RepositoryDirectory;
import org.pentaho.di.repository.RepositoryDirectoryInterface;
import org.pentaho.di.trans.RowProducer;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.TransStoppedListener;
import org.pentaho.di.trans.step.BaseStep;
import org.pentaho.di.trans.step.RowAdapter;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInjectionMetaEntry;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInjectionInterface;
import org.pentaho.di.trans.step.StepMetaInterface;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/**
* Read a simple CSV file Just output Strings found in the file...
*
* @author Matt
* @since 2007-07-05
*/
public class MetaInject extends BaseStep implements StepInterface {
private static Class<?> PKG = MetaInject.class; // for i18n purposes, needed by Translator2!!
//Added for PDI-17530
private static final Lock repoSaveLock = new ReentrantLock();
private MetaInjectMeta meta;
private MetaInjectData data;
public MetaInject(
StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) {
super( stepMeta, stepDataInterface, copyNr, transMeta, trans );
}
public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException {
meta = (MetaInjectMeta) smi;
data = (MetaInjectData) sdi;
// Read the data from all input steps and keep it in memory...
// Skip the step from which we stream data. Keep that available for runtime action.
//
data.rowMap = new HashMap<String, List<RowMetaAndData>>();
for ( String prevStepName : getTransMeta().getPrevStepNames( getStepMeta() ) ) {
// Don't read from the streaming source step
//
if ( !data.streaming || !prevStepName.equalsIgnoreCase( data.streamingSourceStepname ) ) {
List<RowMetaAndData> list = new ArrayList<RowMetaAndData>();
RowSet rowSet = findInputRowSet( prevStepName );
Object[] row = getRowFrom( rowSet );
while ( row != null ) {
RowMetaAndData rd = new RowMetaAndData();
rd.setRowMeta( rowSet.getRowMeta() );
rd.setData( row );
list.add( rd );
row = getRowFrom( rowSet );
}
if ( !list.isEmpty() ) {
data.rowMap.put( prevStepName, list );
}
}
}
List<StepMeta> steps = data.transMeta.getSteps();
for ( Map.Entry<String, StepMetaInterface> en : data.stepInjectionMetasMap.entrySet() ) {
newInjection( en.getKey(), en.getValue() );
}
/*
* constants injection should be executed after steps, because if constant should be inserted into target with array
* in path, constants should be inserted into all arrays items
*/
for ( Map.Entry<String, StepMetaInterface> en : data.stepInjectionMetasMap.entrySet() ) {
newInjectionConstants( en.getKey(), en.getValue() );
}
for ( Map.Entry<String, StepMetaInterface> en : data.stepInjectionMetasMap.entrySet() ) {
en.getValue().searchInfoAndTargetSteps( steps );
}
for ( String targetStepName : data.stepInjectionMap.keySet() ) {
if ( !data.stepInjectionMetasMap.containsKey( targetStepName ) ) {
oldInjection( targetStepName );
StepMeta targetStep = StepMeta.findStep( steps, targetStepName );
if ( targetStep != null ) {
targetStep.getStepMetaInterface().searchInfoAndTargetSteps( steps );
}
}
}
if ( !meta.isNoExecution() ) {
// Now we can execute this modified transformation metadata.
//
final Trans injectTrans = createInjectTrans();
injectTrans.setParentTrans( getTrans() );
injectTrans.setMetaStore( getMetaStore() );
if ( getTrans().getParentJob() != null ) {
injectTrans.setParentJob( getTrans().getParentJob() ); // See PDI-13224
}
getTrans().addTransStoppedListener( new TransStoppedListener() {
public void transStopped( Trans parentTrans ) {
injectTrans.stopAll();
}
} );
injectTrans.prepareExecution( null );
// See if we need to stream some data over...
//
RowProducer rowProducer = null;
if ( data.streaming ) {
rowProducer = injectTrans.addRowProducer( data.streamingTargetStepname, 0 );
}
// Finally, add the mapping transformation to the active sub-transformations
// map in the parent transformation
//
getTrans().addActiveSubTransformation( getStepname(), injectTrans );
if ( !Utils.isEmpty( meta.getSourceStepName() ) ) {
StepInterface stepInterface = injectTrans.getStepInterface( meta.getSourceStepName(), 0 );
if ( stepInterface == null ) {
throw new KettleException( "Unable to find step '" + meta.getSourceStepName() + "' to read from." );
}
stepInterface.addRowListener( new RowAdapter() {
@Override
public void rowWrittenEvent( RowMetaInterface rowMeta, Object[] row ) throws KettleStepException {
// Just pass along the data as output of this step...
//
MetaInject.this.putRow( rowMeta, row );
}
} );
}
injectTrans.startThreads();
if ( data.streaming ) {
// Deplete all the rows from the parent transformation into the modified transformation
//
RowSet rowSet = findInputRowSet( data.streamingSourceStepname );
if ( rowSet == null ) {
throw new KettleException( "Unable to find step '" + data.streamingSourceStepname + "' to stream data from" );
}
Object[] row = getRowFrom( rowSet );
while ( row != null && !isStopped() ) {
rowProducer.putRow( rowSet.getRowMeta(), row );
row = getRowFrom( rowSet );
}
rowProducer.finished();
}
// Wait until the child transformation finished processing...
//
while ( !injectTrans.isFinished() && !injectTrans.isStopped() && !isStopped() ) {
copyResult( injectTrans );
// Wait a little bit.
try {
Thread.sleep( 50 );
} catch ( Exception e ) {
// Ignore errors
}
}
copyResult( injectTrans );
waitUntilFinished( injectTrans );
}
// let the transformation complete it's execution to allow for any customizations to MDI to happen in the init methods of steps
if ( log.isDetailed() ) {
logDetailed( "XML of transformation after injection: " + data.transMeta.getXML() );
}
String targetFile = environmentSubstitute( meta.getTargetFile() );
if ( !Utils.isEmpty( targetFile ) ) {
writeInjectedKtr( targetFile );
}
// All done!
setOutputDone();
return false;
}
void waitUntilFinished( Trans injectTrans ) {
injectTrans.waitUntilFinished();
}
Trans createInjectTrans() {
return new Trans( data.transMeta, this );
}
private void writeInjectedKtr( String targetFilPath ) throws KettleException {
if ( getRepository() == null ) {
writeInjectedKtrToFs( targetFilPath );
} else {
writeInjectedKtrToRepo( targetFilPath );
}
}
/**
* Writes the generated meta injection transformation to the file system.
* @param targetFilePath the filesystem path to which to save the generated injection ktr
* @throws KettleException
*/
private void writeInjectedKtrToFs( String targetFilePath ) throws KettleException {
OutputStream os = null;
try {
os = KettleVFS.getOutputStream( targetFilePath, false );
os.write( XMLHandler.getXMLHeader().getBytes( Const.XML_ENCODING ) );
os.write( data.transMeta.getXML().getBytes( Const.XML_ENCODING ) );
} catch ( IOException e ) {
throw new KettleException( "Unable to write target file (ktr after injection) to file '"
+ targetFilePath + "'", e );
} finally {
if ( os != null ) {
try {
os.close();
} catch ( Exception e ) {
throw new KettleException( e );
}
}
}
}
/**
* Writes the generated meta injection transformation to the repository. It is assumed that the repository
* exists (user is connected).
* @param targetFilePath the repo path to which to save the generated injection ktr
* @throws KettleException
*/
private void writeInjectedKtrToRepo( final String targetFilePath ) throws KettleException {
try {
repoSaveLock.lock();
// clone the transMeta associated with the data, this is the generated meta injection transformation
final TransMeta generatedTrans = (TransMeta) data.transMeta.clone();
// the targetFilePath holds the absolute repo path that is the requested destination of this generated
// transformation, extract the file name (no extension) and the containing directory and adjust the generated
// transformation properties accordingly
List<String> targetPath = new ArrayList( Arrays.asList( Const.splitPath( targetFilePath,
RepositoryDirectory.DIRECTORY_SEPARATOR ) ) );
final String fileName = targetPath.get( targetPath.size() - 1 ).replace( ".ktr", "" );
generatedTrans.setName( fileName );
// remove the last targetPath element, so we're left with the target directory path
targetPath.remove( targetPath.size() - 1 );
if ( targetPath.size() > 0 ) {
final String dirPath = String.join( RepositoryDirectory.DIRECTORY_SEPARATOR, targetPath );
RepositoryDirectoryInterface directory = getRepository().findDirectory( dirPath );
// if the directory does not exist, try to create it
if ( directory == null ) {
directory = getRepository().createRepositoryDirectory( new RepositoryDirectory( null, "/" ), dirPath );
}
generatedTrans.setRepositoryDirectory( directory );
} else {
// if the directory is null, set it to the directory of the cloned template ktr
if ( log.isDebug() ) {
log.logDebug( "The target injection ktr file path provided by the user is not a valid fully qualified "
+ "repository path - will store the generated ktr in the same directory as the template ktr: ",
data.transMeta.getRepositoryDirectory() );
}
generatedTrans.setRepositoryDirectory( data.transMeta.getRepositoryDirectory() );
}
// set the objectId, in case the injected transformation already exists in the repo, so that is is updated in
// the repository - the objectId will remain null, if the transformation is begin generated for the first time,
// in which a new ktr will be created in the repo
generatedTrans.setObjectId( getRepository().getTransformationID( fileName, generatedTrans.getRepositoryDirectory() ) );
getRepository().save( generatedTrans, null, null, true );
} finally {
repoSaveLock.unlock();
}
}
/**
* Inject values from steps.
*/
private void newInjection( String targetStep, StepMetaInterface targetStepMeta ) throws KettleException {
if ( log.isDetailed() ) {
logDetailed( "Handing step '" + targetStep + "' injection!" );
}
BeanInjectionInfo injectionInfo = new BeanInjectionInfo( targetStepMeta.getClass() );
BeanInjector injector = new BeanInjector( injectionInfo );
// Collect all the metadata for this target step...
//
Map<TargetStepAttribute, SourceStepField> targetMap = meta.getTargetSourceMapping();
boolean wasInjection = false;
for ( TargetStepAttribute target : targetMap.keySet() ) {
SourceStepField source = targetMap.get( target );
if ( target.getStepname().equalsIgnoreCase( targetStep ) ) {
// This is the step to collect data for...
// We also know which step to read the data from. (source)
//
if ( source.getStepname() != null ) {
// from specified steo
List<RowMetaAndData> rows = data.rowMap.get( source.getStepname() );
if ( rows != null && !rows.isEmpty() ) {
// Which metadata key is this referencing? Find the attribute key in the metadata entries...
//
if ( injector.hasProperty( targetStepMeta, target.getAttributeKey() ) ) {
// target step has specified key
boolean skip = false;
for ( RowMetaAndData r : rows ) {
if ( r.getRowMeta().indexOfValue( source.getField() ) < 0 ) {
logError( BaseMessages.getString( PKG, "MetaInject.SourceFieldIsNotDefined.Message", source
.getField(), getTransMeta().getName() ) );
// source step doesn't contain specified field
skip = true;
}
}
if ( !skip ) {
// specified field exist - need to inject
injector.setProperty( targetStepMeta, target.getAttributeKey(), rows, source.getField() );
wasInjection = true;
}
} else {
// target step doesn't have specified key - just report but don't fail like in 6.0 (BACKLOG-6753)
logError( BaseMessages.getString( PKG, "MetaInject.TargetKeyIsNotDefined.Message", target
.getAttributeKey(), getTransMeta().getName() ) );
}
}
}
}
}
if ( wasInjection ) {
injector.runPostInjectionProcessing( targetStepMeta );
}
}
/**
* Inject constant values.
*/
private void newInjectionConstants( String targetStep, StepMetaInterface targetStepMeta ) throws KettleException {
if ( log.isDetailed() ) {
logDetailed( "Handing step '" + targetStep + "' constants injection!" );
}
BeanInjectionInfo injectionInfo = new BeanInjectionInfo( targetStepMeta.getClass() );
BeanInjector injector = new BeanInjector( injectionInfo );
// Collect all the metadata for this target step...
//
Map<TargetStepAttribute, SourceStepField> targetMap = meta.getTargetSourceMapping();
for ( TargetStepAttribute target : targetMap.keySet() ) {
SourceStepField source = targetMap.get( target );
if ( target.getStepname().equalsIgnoreCase( targetStep ) ) {
// This is the step to collect data for...
// We also know which step to read the data from. (source)
//
if ( source.getStepname() == null ) {
// inject constant
if ( injector.hasProperty( targetStepMeta, target.getAttributeKey() ) ) {
// target step has specified key
injector.setProperty( targetStepMeta, target.getAttributeKey(), null, source.getField() );
} else {
// target step doesn't have specified key - just report but don't fail like in 6.0 (BACKLOG-6753)
logError( BaseMessages.getString( PKG, "MetaInject.TargetKeyIsNotDefined.Message", target.getAttributeKey(),
getTransMeta().getName() ) );
}
}
}
}
}
private void oldInjection( String targetStep ) throws KettleException {
if ( log.isDetailed() ) {
logDetailed( "Handing step '" + targetStep + "' injection!" );
}
// This is the injection interface:
//
StepMetaInjectionInterface injectionInterface = data.stepInjectionMap.get( targetStep );
// This is the injection description:
//
List<StepInjectionMetaEntry> metadataEntries = injectionInterface.getStepInjectionMetadataEntries();
// Create a new list of metadata injection entries...
//
List<StepInjectionMetaEntry> inject = new ArrayList<StepInjectionMetaEntry>();
// Collect all the metadata for this target step...
//
Map<TargetStepAttribute, SourceStepField> targetMap = meta.getTargetSourceMapping();
for ( TargetStepAttribute target : targetMap.keySet() ) {
SourceStepField source = targetMap.get( target );
if ( target.getStepname().equalsIgnoreCase( targetStep ) ) {
// This is the step to collect data for...
// We also know which step to read the data from. (source)
//
List<RowMetaAndData> rows = data.rowMap.get( source.getStepname() );
if ( rows != null && rows.size() > 0 ) {
// Which metadata key is this referencing? Find the attribute key in the metadata entries...
//
StepInjectionMetaEntry entry = findMetaEntry( metadataEntries, target.getAttributeKey() );
if ( entry != null ) {
if ( !target.isDetail() ) {
setEntryValueIfFieldExists( entry, rows.get( 0 ), source );
inject.add( entry );
} else {
// We are going to pass this entry N times for N target mappings
// As such, we have to see if it's already in the injection list...
//
StepInjectionMetaEntry metaEntries = findMetaEntry( inject, entry.getKey() );
if ( metaEntries == null ) {
StepInjectionMetaEntry rootEntry = findDetailRootEntry( metadataEntries, entry );
// Inject an empty copy
//
metaEntries = rootEntry.clone();
metaEntries.setDetails( new ArrayList<StepInjectionMetaEntry>() );
inject.add( metaEntries );
// We also need to pre-populate the whole grid: X rows by Y attributes
//
StepInjectionMetaEntry metaEntry = rootEntry.getDetails().get( 0 );
for ( int i = 0; i < rows.size(); i++ ) {
StepInjectionMetaEntry metaCopy = metaEntry.clone();
metaEntries.getDetails().add( metaCopy );
metaCopy.setDetails( new ArrayList<StepInjectionMetaEntry>() );
for ( StepInjectionMetaEntry me : metaEntry.getDetails() ) {
StepInjectionMetaEntry meCopy = me.clone();
metaCopy.getDetails().add( meCopy );
}
}
// From now on we can simply refer to the correct X,Y coordinate.
} else {
StepInjectionMetaEntry rootEntry = findDetailRootEntry( inject, metaEntries );
metaEntries = rootEntry;
}
for ( int i = 0; i < rows.size(); i++ ) {
RowMetaAndData row = rows.get( i );
try {
List<StepInjectionMetaEntry> rowEntries = metaEntries.getDetails().get( i ).getDetails();
for ( StepInjectionMetaEntry rowEntry : rowEntries ) {
// We have to look up the sources for these targets again in the target-2-source mapping
// That is because we only want handle this as few times as possible...
//
SourceStepField detailSource = findDetailSource( targetMap, targetStep, rowEntry.getKey() );
if ( detailSource != null ) {
setEntryValueIfFieldExists( rowEntry, row, detailSource );
} else {
if ( log.isDetailed() ) {
logDetailed( "No detail source found for key: " + rowEntry.getKey() + " and target step: "
+ targetStep );
}
}
}
} catch ( Exception e ) {
throw new KettleException( "Unexpected error occurred while injecting metadata", e );
}
}
if ( log.isDetailed() ) {
logDetailed( "injected entry: " + entry );
}
}
// End of TopLevel/Detail if block
} else {
if ( log.isDetailed() ) {
logDetailed( "entry not found: " + target.getAttributeKey() );
}
}
} else {
if ( log.isDetailed() ) {
logDetailed( "No rows found for source step: " + source.getStepname() );
}
}
}
}
// Inject the metadata into the step!
//
injectionInterface.injectStepMetadataEntries( inject );
}
private void copyResult( Trans trans ) {
Result result = trans.getResult();
setLinesInput( result.getNrLinesInput() );
setLinesOutput( result.getNrLinesOutput() );
setLinesRead( result.getNrLinesRead() );
setLinesWritten( result.getNrLinesWritten() );
setLinesUpdated( result.getNrLinesUpdated() );
setLinesRejected( result.getNrLinesRejected() );
setErrors( result.getNrErrors() );
}
private StepInjectionMetaEntry findDetailRootEntry( List<StepInjectionMetaEntry> metadataEntries,
StepInjectionMetaEntry entry ) {
for ( StepInjectionMetaEntry rowsEntry : metadataEntries ) {
for ( StepInjectionMetaEntry rowEntry : rowsEntry.getDetails() ) {
for ( StepInjectionMetaEntry detailEntry : rowEntry.getDetails() ) {
if ( detailEntry.equals( entry ) ) {
return rowsEntry;
}
}
}
}
return null;
}
private SourceStepField findDetailSource( Map<TargetStepAttribute, SourceStepField> targetMap, String targetStep,
String key ) {
return targetMap.get( new TargetStepAttribute( targetStep, key, true ) );
}
private StepInjectionMetaEntry findMetaEntry( List<StepInjectionMetaEntry> metadataEntries, String attributeKey ) {
for ( StepInjectionMetaEntry entry : metadataEntries ) {
if ( entry.getKey().equals( attributeKey ) ) {
return entry;
}
entry = findMetaEntry( entry.getDetails(), attributeKey );
if ( entry != null ) {
return entry;
}
}
return null;
}
/**
* package-local visibility for testing purposes
*/
void setEntryValueIfFieldExists( StepInjectionMetaEntry entry, RowMetaAndData row, SourceStepField source )
throws KettleValueException {
RowMetaInterface rowMeta = row.getRowMeta();
if ( rowMeta.indexOfValue( source.getField() ) < 0 ) {
return;
}
setEntryValue( entry, row, source );
}
/**
* package-local visibility for testing purposes
*/
static void setEntryValue( StepInjectionMetaEntry entry, RowMetaAndData row, SourceStepField source )
throws KettleValueException {
// A standard attribute, a single row of data...
//
Object value = null;
switch ( entry.getValueType() ) {
case ValueMetaInterface.TYPE_STRING:
value = row.getString( source.getField(), null );
break;
case ValueMetaInterface.TYPE_BOOLEAN:
value = row.getBoolean( source.getField(), false );
break;
case ValueMetaInterface.TYPE_INTEGER:
value = row.getInteger( source.getField(), 0L );
break;
case ValueMetaInterface.TYPE_NUMBER:
value = row.getNumber( source.getField(), 0.0D );
break;
case ValueMetaInterface.TYPE_DATE:
value = row.getDate( source.getField(), null );
break;
case ValueMetaInterface.TYPE_BIGNUMBER:
value = row.getBigNumber( source.getField(), null );
break;
default:
break;
}
entry.setValue( value );
}
public boolean init( StepMetaInterface smi, StepDataInterface sdi ) {
meta = (MetaInjectMeta) smi;
data = (MetaInjectData) sdi;
if ( super.init( smi, sdi ) ) {
try {
meta.actualizeMetaInjectMapping();
data.transMeta = loadTransformationMeta();
data.transMeta.copyVariablesFrom( this );
data.transMeta.mergeParametersWith( this.getTrans(), true );
checkSoureStepsAvailability();
checkTargetStepsAvailability();
// Get a mapping between the step name and the injection...
//
// Get new injection info
data.stepInjectionMetasMap = new HashMap<String, StepMetaInterface>();
for ( StepMeta stepMeta : data.transMeta.getUsedSteps() ) {
StepMetaInterface meta = stepMeta.getStepMetaInterface();
if ( BeanInjectionInfo.isInjectionSupported( meta.getClass() ) ) {
data.stepInjectionMetasMap.put( stepMeta.getName(), meta );
}
}
// Get old injection info
data.stepInjectionMap = new HashMap<String, StepMetaInjectionInterface>();
for ( StepMeta stepMeta : data.transMeta.getUsedSteps() ) {
StepMetaInjectionInterface injectionInterface =
stepMeta.getStepMetaInterface().getStepMetaInjectionInterface();
if ( injectionInterface != null ) {
data.stepInjectionMap.put( stepMeta.getName(), injectionInterface );
}
}
// See if we need to stream data from a specific step into the template
//
if ( meta.getStreamSourceStep() != null && !Utils.isEmpty( meta.getStreamTargetStepname() ) ) {
data.streaming = true;
data.streamingSourceStepname = meta.getStreamSourceStep().getName();
data.streamingTargetStepname = meta.getStreamTargetStepname();
}
return true;
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "MetaInject.BadEncoding.Message" ), e );
return false;
}
}
return false;
}
private void checkTargetStepsAvailability() {
Set<String> existedStepNames = convertToUpperCaseSet( data.transMeta.getStepNames() );
Map<TargetStepAttribute, SourceStepField> targetMap = meta.getTargetSourceMapping();
Set<TargetStepAttribute> unavailableTargetSteps = getUnavailableTargetSteps( targetMap, data.transMeta );
Set<String> alreadyMarkedSteps = new HashSet<String>();
for ( TargetStepAttribute currentTarget : unavailableTargetSteps ) {
if ( alreadyMarkedSteps.contains( currentTarget.getStepname() ) ) {
continue;
}
alreadyMarkedSteps.add( currentTarget.getStepname() );
if ( existedStepNames.contains( currentTarget.getStepname().toUpperCase() ) ) {
logError( BaseMessages.getString( PKG, "MetaInject.TargetStepIsNotUsed.Message", currentTarget.getStepname(),
data.transMeta.getName() ) );
} else {
logError( BaseMessages.getString( PKG, "MetaInject.TargetStepIsNotDefined.Message", currentTarget.getStepname(),
data.transMeta.getName() ) );
}
}
// alreadyMarked contains wrong steps. Spoon can report error if it will not fail transformation [BACKLOG-6753]
}
public static void removeUnavailableStepsFromMapping( Map<TargetStepAttribute, SourceStepField> targetMap,
Set<SourceStepField> unavailableSourceSteps, Set<TargetStepAttribute> unavailableTargetSteps ) {
Iterator<Entry<TargetStepAttribute, SourceStepField>> targetMapIterator = targetMap.entrySet().iterator();
while ( targetMapIterator.hasNext() ) {
Entry<TargetStepAttribute, SourceStepField> entry = targetMapIterator.next();
SourceStepField currentSourceStepField = entry.getValue();
TargetStepAttribute currentTargetStepAttribute = entry.getKey();
if ( unavailableSourceSteps.contains( currentSourceStepField ) || unavailableTargetSteps.contains(
currentTargetStepAttribute ) ) {
targetMapIterator.remove();
}
}
}
public static Set<TargetStepAttribute> getUnavailableTargetSteps( Map<TargetStepAttribute, SourceStepField> targetMap,
TransMeta injectedTransMeta ) {
Set<String> usedStepNames = getUsedStepsForReferencendTransformation( injectedTransMeta );
Set<TargetStepAttribute> unavailableTargetSteps = new HashSet<TargetStepAttribute>();
for ( TargetStepAttribute currentTarget : targetMap.keySet() ) {
if ( !usedStepNames.contains( currentTarget.getStepname().toUpperCase() ) ) {
unavailableTargetSteps.add( currentTarget );
}
}
return Collections.unmodifiableSet( unavailableTargetSteps );
}
public static Set<TargetStepAttribute> getUnavailableTargetKeys( Map<TargetStepAttribute, SourceStepField> targetMap,
TransMeta injectedTransMeta, Set<TargetStepAttribute> unavailableTargetSteps ) {
Set<TargetStepAttribute> missingKeys = new HashSet<>();
Map<String, BeanInjectionInfo> beanInfos = getUsedStepBeanInfos( injectedTransMeta );
for ( TargetStepAttribute key : targetMap.keySet() ) {
if ( !unavailableTargetSteps.contains( key ) ) {
BeanInjectionInfo info = beanInfos.get( key.getStepname().toUpperCase() );
if ( info != null && !info.getProperties().containsKey( key.getAttributeKey() ) ) {
missingKeys.add( key );
}
}
}
return missingKeys;
}
private static Map<String, BeanInjectionInfo> getUsedStepBeanInfos( TransMeta transMeta ) {
Map<String, BeanInjectionInfo> res = new HashMap<>();
for ( StepMeta step : transMeta.getUsedSteps() ) {
Class<? extends StepMetaInterface> stepMetaClass = step.getStepMetaInterface().getClass();
if ( BeanInjectionInfo.isInjectionSupported( stepMetaClass ) ) {
res.put( step.getName().toUpperCase(), new BeanInjectionInfo( stepMetaClass ) );
}
}
return res;
}
private static Set<String> getUsedStepsForReferencendTransformation( TransMeta transMeta ) {
Set<String> usedStepNames = new HashSet<String>();
for ( StepMeta currentStep : transMeta.getUsedSteps() ) {
usedStepNames.add( currentStep.getName().toUpperCase() );
}
return usedStepNames;
}
public static Set<SourceStepField> getUnavailableSourceSteps( Map<TargetStepAttribute, SourceStepField> targetMap,
TransMeta sourceTransMeta, StepMeta stepMeta ) {
String[] stepNamesArray = sourceTransMeta.getPrevStepNames( stepMeta );
Set<String> existedStepNames = convertToUpperCaseSet( stepNamesArray );
Set<SourceStepField> unavailableSourceSteps = new HashSet<SourceStepField>();
for ( SourceStepField currentSource : targetMap.values() ) {
if ( currentSource.getStepname() != null ) {
if ( !existedStepNames.contains( currentSource.getStepname().toUpperCase() ) ) {
unavailableSourceSteps.add( currentSource );
}
}
}
return Collections.unmodifiableSet( unavailableSourceSteps );
}
private void checkSoureStepsAvailability() {
Map<TargetStepAttribute, SourceStepField> targetMap = meta.getTargetSourceMapping();
Set<SourceStepField> unavailableSourceSteps =
getUnavailableSourceSteps( targetMap, getTransMeta(), getStepMeta() );
Set<String> alreadyMarkedSteps = new HashSet<String>();
for ( SourceStepField currentSource : unavailableSourceSteps ) {
if ( alreadyMarkedSteps.contains( currentSource.getStepname() ) ) {
continue;
}
alreadyMarkedSteps.add( currentSource.getStepname() );
logError( BaseMessages.getString( PKG, "MetaInject.SourceStepIsNotAvailable.Message", currentSource.getStepname(),
getTransMeta().getName() ) );
}
// alreadyMarked contains wrong steps. Spoon can report error if it will not fail transformation [BACKLOG-6753]
}
/**
* package-local visibility for testing purposes
*/
static Set<String> convertToUpperCaseSet( String[] array ) {
if ( array == null ) {
return Collections.emptySet();
}
Set<String> strings = new HashSet<String>();
for ( String currentString : array ) {
strings.add( currentString.toUpperCase() );
}
return strings;
}
/**
* package-local visibility for testing purposes
*/
TransMeta loadTransformationMeta() throws KettleException {
return MetaInjectMeta.loadTransformationMeta( meta, getTrans().getRepository(), getTrans().getMetaStore(), this );
}
}
|
|
package net.violet.platform.message.application.factories;
import java.util.HashMap;
import java.util.Map;
import net.violet.platform.applications.TraficHandler;
import net.violet.platform.daemons.schedulers.AbstractScheduler.MessageProcessUnit;
import net.violet.platform.datamodel.Files;
import net.violet.platform.datamodel.Lang;
import net.violet.platform.datamodel.MockTestBase;
import net.violet.platform.datamodel.SchedulingType;
import net.violet.platform.datamodel.Source;
import net.violet.platform.datamodel.Subscription;
import net.violet.platform.datamodel.ConfigFiles.SERVICES;
import net.violet.platform.datamodel.factories.Factories;
import net.violet.platform.datamodel.mock.SourceMock;
import net.violet.platform.datamodel.mock.SubscriptionMock;
import net.violet.platform.dataobjects.SubscriptionData;
import net.violet.platform.dataobjects.SubscriptionSchedulingData;
import net.violet.platform.dataobjects.SubscriptionSchedulingSettingsData;
import net.violet.platform.dataobjects.MessageData.Palette;
import net.violet.platform.message.MessageDraft;
import net.violet.platform.message.MessageSignature;
import net.violet.platform.message.application.factories.AbstractMessageFactory.Message2Send;
import net.violet.platform.schedulers.AmbiantHandler;
import net.violet.platform.util.CCalendar;
import net.violet.platform.util.Constantes;
import net.violet.platform.xmpp.JabberMessageFactory;
import org.apache.log4j.Logger;
import org.junit.Assert;
import org.junit.Test;
public class TraficMessageFactoryTest extends MockTestBase {
private static final Logger LOGGER = Logger.getLogger(TraficMessageFactoryTest.class);
@Test
public void getMessageWithoutContentTest() {
final Subscription theSubscription = new SubscriptionMock(1, Factories.APPLICATION.findByName("net.violet.trafic"), getKowalskyObject());
final CCalendar theDeliveryDate = new CCalendar(false);
theDeliveryDate.addMillis(3600000L);
final TraficMessageFactory factory = (TraficMessageFactory) AbstractMessageFactory.getFactoryByApplication(theSubscription.getApplication());
try {
final SubscriptionData theSubscriptionData = SubscriptionData.getData(theSubscription);
final SubscriptionSchedulingData theScheduling = SubscriptionSchedulingData.create(theSubscriptionData, SchedulingType.SCHEDULING_TYPE.Ambiant);
final Message2Send message = factory.getMessage(new MessageProcessUnit(theScheduling, theDeliveryDate, null) {
@Override
public void runWhenSuccessful() {
}
}).get(0);
Assert.assertEquals(Constantes.QUEUE_TTL_SERVICE, message.getTTL());
Assert.assertEquals(TraficMessageFactory.TITLE, message.getTitle());
Assert.assertEquals(Palette.RANDOM, message.getColorPal());
Assert.assertEquals(theSubscriptionData, message.getSubscription());
Assert.assertEquals(MessageSignature.TRAFFIC_SIGNATURE, message.getSignature());
Assert.assertEquals(theSubscription.getObject(), message.getRecipient());
Assert.assertEquals(JabberMessageFactory.IDLE_MODE, message.getMode());
Assert.assertNull(message.getBody());
} catch (final Exception e) {
TraficMessageFactoryTest.LOGGER.fatal(e, e);
Assert.fail(e.getMessage());
}
}
@Test
public void getMessageWithSourceTest() {
final Subscription theSubscription = new SubscriptionMock(1, Factories.APPLICATION.findByName("net.violet.trafic"), getKowalskyObject());
new SourceMock(8205, "trafic.chapelle.bagnolet", 3);
new SourceMock(8206, "trafic.chapelle.bagnolet.time", 21);
final Map<String, Object> theSettings = new HashMap<String, Object>();
// Creates the settings
theSettings.put(TraficHandler.END, "bagnolet");
theSettings.put(TraficHandler.START, "chapelle");
theSettings.put(TraficHandler.SOURCE, "trafic.chapelle.bagnolet");
theSubscription.setSettings(theSettings);
final CCalendar theDeliveryDate = new CCalendar(false);
theDeliveryDate.setTimeMYSQL("08:00:00");
final TraficMessageFactory factory = (TraficMessageFactory) AbstractMessageFactory.getFactoryByApplication(theSubscription.getApplication());
try {
final SubscriptionData theSubscriptionData = SubscriptionData.getData(theSubscription);
final SubscriptionSchedulingData theScheduling = SubscriptionSchedulingData.create(theSubscriptionData, SchedulingType.SCHEDULING_TYPE.Ambiant);
final Message2Send message = factory.getMessage(new MessageProcessUnit(theScheduling, theDeliveryDate, null) {
@Override
public void runWhenSuccessful() {
}
}).get(0);
Assert.assertEquals(Constantes.QUEUE_TTL_SERVICE, message.getTTL());
Assert.assertEquals(TraficMessageFactory.TITLE, message.getTitle());
Assert.assertEquals(Palette.RANDOM, message.getColorPal());
Assert.assertEquals(theSubscriptionData, message.getSubscription());
Assert.assertEquals(MessageSignature.TRAFFIC_SIGNATURE, message.getSignature());
Assert.assertEquals(theSubscription.getObject(), message.getRecipient());
Assert.assertEquals(JabberMessageFactory.IDLE_MODE, message.getMode());
Assert.assertEquals(theDeliveryDate, message.getDeliveryDate());
Assert.assertFalse(message.isStream());
final Files[] theFiles = message.getBody();
Assert.assertEquals(1, theFiles.length);
final Lang theLang = theSubscription.getObject().getPreferences().getLangPreferences();
Assert.assertEquals(Factories.CONFIG_FILES.findAllByServiceAndLang(SERVICES.TRAFFIC, theLang).get("3").get(0).getFiles().getPath(), theFiles[0].getPath());
} catch (final Exception e) {
TraficMessageFactoryTest.LOGGER.fatal(e, e);
Assert.fail(e.toString());
}
}
@Test
public void getMessageWithoutDeliveryTest() {
final Subscription theSubscription = new SubscriptionMock(1, Factories.APPLICATION.findByName("net.violet.trafic"), getKowalskyObject());
new SourceMock(8205, "trafic.chapelle.bagnolet", 3);
new SourceMock(8206, "trafic.chapelle.bagnolet.time", 21);
final Map<String, Object> theSettings = new HashMap<String, Object>();
// Creates the settings
theSettings.put(TraficHandler.END, "bagnolet");
theSettings.put(TraficHandler.START, "chapelle");
theSettings.put(TraficHandler.SOURCE, "trafic.chapelle.bagnolet");
theSubscription.setSettings(theSettings);
final TraficMessageFactory factory = (TraficMessageFactory) AbstractMessageFactory.getFactoryByApplication(theSubscription.getApplication());
try {
final SubscriptionData theSubscriptionData = SubscriptionData.getData(theSubscription);
final SubscriptionSchedulingData theScheduling = SubscriptionSchedulingData.create(theSubscriptionData, SchedulingType.SCHEDULING_TYPE.Ambiant);
final Message2Send message = factory.getMessage(new MessageProcessUnit(theScheduling, null, null) {
@Override
public void runWhenSuccessful() {
}
}).get(0);
Assert.assertEquals(Constantes.QUEUE_TTL_SERVICE, message.getTTL());
Assert.assertEquals(TraficMessageFactory.TITLE, message.getTitle());
Assert.assertEquals(Palette.RANDOM, message.getColorPal());
Assert.assertEquals(theSubscriptionData, message.getSubscription());
Assert.assertEquals(MessageSignature.TRAFFIC_SIGNATURE, message.getSignature());
Assert.assertEquals(theSubscription.getObject(), message.getRecipient());
Assert.assertEquals(JabberMessageFactory.IDLE_MODE, message.getMode());
Assert.assertFalse(message.isStream());
final Files[] theFiles = message.getBody();
Assert.assertEquals(1, theFiles.length);
final Lang theLang = theSubscription.getObject().getPreferences().getLangPreferences();
Assert.assertEquals(Factories.CONFIG_FILES.findAllByServiceAndLang(SERVICES.TRAFFIC, theLang).get("3").get(0).getFiles().getPath(), theFiles[0].getPath());
Assert.assertNull(message.getDeliveryDate());
} catch (final Exception e) {
TraficMessageFactoryTest.LOGGER.fatal(e, e);
Assert.fail(e.toString());
}
}
@Test
public void getSourceMessageNoLastTimeTest() {
final Source source = new SourceMock(8205, "trafic.chapelle.bagnolet", 3, System.currentTimeMillis());
final Subscription theSubscription = new SubscriptionMock(1, Factories.APPLICATION.findByName("net.violet.trafic"), getKowalskyObject());
// Creates the settings
final Map<String, Object> theSettings = new HashMap<String, Object>();
// Creates the settings
theSettings.put(TraficHandler.END, "bagnolet");
theSettings.put(TraficHandler.START, "chapelle");
theSettings.put(TraficHandler.SOURCE, "trafic.chapelle.bagnolet");
theSubscription.setSettings(theSettings);
final SubscriptionData theSubscriptionData = SubscriptionData.getData(theSubscription);
final SubscriptionSchedulingData theScheduling = SubscriptionSchedulingData.create(theSubscriptionData, SchedulingType.SCHEDULING_TYPE.Ambiant);
final TraficMessageFactory factory = (TraficMessageFactory) AbstractMessageFactory.getFactoryByApplication(theSubscription.getApplication());
final long theLastTime = System.currentTimeMillis();
final MessageDraft theMessage = factory.getSourceMessage(theScheduling, theLastTime);
Assert.assertNotNull(theMessage);
Assert.assertTrue(theMessage.isSourceModeUpdate());
Assert.assertEquals(new Integer((int) source.getSource_val()), theMessage.getSources().get(Integer.toString(factory.getSource().getId())));
Assert.assertEquals(Constantes.QUEUE_TTL_SOURCES, theMessage.getTTLInSecond());
final SubscriptionSchedulingSettingsData theLastTimeSetting = SubscriptionSchedulingSettingsData.findBySubscriptionSchedulingAndKey(theScheduling, AmbiantHandler.LAST_TIME);
Assert.assertNotNull(theLastTimeSetting);
Assert.assertEquals(theLastTime, Long.parseLong(theLastTimeSetting.getValue()));
source.delete();
}
@Test
public void getSourceMessageWithLastTimeNUTest() {
final long sourceUpdateTime = System.currentTimeMillis() / 1000;
final Source source = new SourceMock(3, "trafic.chapelle.bagnolet", 3, sourceUpdateTime);
final long theLastTime = sourceUpdateTime * 1000;
final Subscription theSubscription = new SubscriptionMock(1, Factories.APPLICATION.findByName("net.violet.trafic"), getKowalskyObject());
// Creates the settings
final Map<String, Object> theSettings = new HashMap<String, Object>();
// Creates the settings
theSettings.put(TraficHandler.END, "bagnolet");
theSettings.put(TraficHandler.START, "chapelle");
theSettings.put(TraficHandler.SOURCE, "trafic.chapelle.bagnolet");
theSubscription.setSettings(theSettings);
final SubscriptionData theSubscriptionData = SubscriptionData.getData(theSubscription);
final SubscriptionSchedulingData theScheduling = SubscriptionSchedulingData.create(theSubscriptionData, SchedulingType.SCHEDULING_TYPE.Ambiant);
theScheduling.createSetting(AmbiantHandler.LAST_TIME, Long.toString(theLastTime));
final TraficMessageFactory factory = (TraficMessageFactory) AbstractMessageFactory.getFactoryByApplication(theSubscription.getApplication());
final MessageDraft theMessage = factory.getSourceMessage(theScheduling, theLastTime);
Assert.assertNull(theMessage);
source.delete();
}
@Test
public void getSourceMessageWithLastTimeUTest() {
final long sourceUpdateTime = System.currentTimeMillis() / 1000;
final Source source = new SourceMock(8205, "trafic.chapelle.bagnolet", 3, sourceUpdateTime + 60);
final long theLastTime = sourceUpdateTime * 1000;
final Subscription theSubscription = new SubscriptionMock(1, Factories.APPLICATION.findByName("net.violet.trafic"), getKowalskyObject());
// Creates the settings
final Map<String, Object> theSettings = new HashMap<String, Object>();
// Creates the settings
theSettings.put(TraficHandler.END, "bagnolet");
theSettings.put(TraficHandler.START, "chapelle");
theSettings.put(TraficHandler.SOURCE, "trafic.chapelle.bagnolet");
theSubscription.setSettings(theSettings);
final SubscriptionData theSubscriptionData = SubscriptionData.getData(theSubscription);
final SubscriptionSchedulingData theScheduling = SubscriptionSchedulingData.create(theSubscriptionData, SchedulingType.SCHEDULING_TYPE.Ambiant);
theScheduling.createSetting(AmbiantHandler.LAST_TIME, Long.toString(theLastTime));
final TraficMessageFactory factory = (TraficMessageFactory) AbstractMessageFactory.getFactoryByApplication(theSubscription.getApplication());
final MessageDraft theMessage = factory.getSourceMessage(theScheduling, theLastTime);
Assert.assertNotNull(theMessage);
Assert.assertTrue(theMessage.isSourceModeUpdate());
Assert.assertEquals(new Integer((int) source.getSource_val()), theMessage.getSources().get(Integer.toString(factory.getSource().getId())));
Assert.assertEquals(Constantes.QUEUE_TTL_SOURCES, theMessage.getTTLInSecond());
final SubscriptionSchedulingSettingsData theLastTimeSetting = SubscriptionSchedulingSettingsData.findBySubscriptionSchedulingAndKey(theScheduling, AmbiantHandler.LAST_TIME);
Assert.assertNotNull(theLastTimeSetting);
Assert.assertEquals(theLastTime, Long.parseLong(theLastTimeSetting.getValue()));
source.delete();
}
}
|
|
/*
* This project has received funding from the European Unions Seventh
* Framework Programme for research, technological development and
* demonstration under grant agreement no FP7-601138 PERICLES.
*
* Copyright 2015 Anna Eggers, State- and Univeristy Library Goettingen
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package algorithm;
import static main.Configuration.RESTORED_DIRECTORY;
import static model.Criterion.CARRIER_PROCESSABILITY;
import static model.Criterion.CARRIER_RESTORABILITY;
import static model.Criterion.COMPRESSION;
import static model.Criterion.DETECTABILITY;
import static model.Criterion.ENCAPSULATION_METHOD;
import static model.Criterion.ENCRYPTION;
import static model.Criterion.PAYLOAD_ACCESSIBILITY;
import static model.Criterion.PAYLOAD_RESTORABILITY;
import static model.Criterion.STANDARDS;
import static model.Criterion.VELOCITY;
import static model.Criterion.VISIBILITY;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.List;
import javax.imageio.ImageIO;
import javax.swing.ButtonGroup;
import javax.swing.JComboBox;
import javax.swing.JLabel;
import javax.swing.JRadioButton;
import javax.swing.JTextArea;
import javax.swing.JTextField;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.filefilter.SuffixFileFilter;
import com.google.zxing.BarcodeFormat;
import com.google.zxing.BinaryBitmap;
import com.google.zxing.ChecksumException;
import com.google.zxing.EncodeHintType;
import com.google.zxing.FormatException;
import com.google.zxing.LuminanceSource;
import com.google.zxing.NotFoundException;
import com.google.zxing.Result;
import com.google.zxing.client.j2se.BufferedImageLuminanceSource;
import com.google.zxing.common.BitMatrix;
import com.google.zxing.common.HybridBinarizer;
import com.google.zxing.qrcode.QRCodeReader;
import com.google.zxing.qrcode.QRCodeWriter;
import com.google.zxing.qrcode.decoder.ErrorCorrectionLevel;
import model.PayloadSegment;
import model.RestoredFile;
import model.Scenario;
import view.GUIPanel;
/**
* The ZXing library is used to implement this technique.
*
* This is a digital watermarking technique, that uses QR-codes. The algorithm
* will create QR-codes from the payload files, and either ignore the carriers
* and save the QR-codes into separate files, or use only the first payload and
* add it on each carrier image.
*/
public class QRCodeWatermarking extends AbstractAlgorithm {
private final JRadioButton trueEncapsulate = new JRadioButton("Add the QR-code to carrier image");
private final JRadioButton falseEncapsulate = new JRadioButton(
"Create separate QR-code file for each payload file (this will ignore the carrier!)");
private final JLabel messageLabel = new JLabel();
private final JTextField sizeField = new JTextField(20);
protected final JTextField xPositionField = new JTextField(20);
protected final JTextField yPositionField = new JTextField(20);
private final String[] fileFormats = { "png", "jpeg", "jpg", "bmp", "gif" };
private final JComboBox<String> imageFormatBox = new JComboBox<String>(fileFormats);
private final String ON_IMAGE = "ON_IMAGE";
private final String SEPARATE_FILE = "SEPARATE_FILE";
private final int DEFAULT_SIZE = 200;
{
createConfigurationGui();
}
/*
* ****** ENCAPSULATION **********
*/
/**
* Starts either the creation of QR-code files from each payload file, or
* the embedding of a QR-code created from the first payload file on each
* carrier.
*/
@Override
public File encapsulate(File carrier, List<File> payloadList) throws IOException {
String selectedImageFormat = (String) imageFormatBox.getSelectedItem();
if (embedInCarrierFile()) {
return embedInCarrierFile(carrier, payloadList.get(0), selectedImageFormat);
} else {
return createQrCodeFile(payloadList, selectedImageFormat, SEPARATE_FILE);
}
}
/**
* This method ignores the carrier file. It will create an own QR-code image
* for the first payload file.
*
* @param payloadList
* @return bar code file
* @throws IOException
*/
private File createQrCodeFile(List<File> payloadList, String imageFormat, String usedMethod) throws IOException {
return createBarcodeFile(payloadList.get(0), imageFormat, usedMethod);
}
/**
* Creates a PNG image that contains the QR-code with the information from
* the payload file. The image has the same name as the payload file.
*
* @param payload
* @return qr code as png image file
* @throws IOException
*/
private File createBarcodeFile(File payload, String imageFormat, String usedMethod) throws IOException {
// Create restoration metadata only for the payload file to spare space.
PayloadSegment metadata = new PayloadSegment(payload);
metadata.addOptionalProperty("usedMethod", usedMethod);
byte[] payloadSegment = metadata.getPayloadSegmentBytes();
String barcodeInformation = new String(payloadSegment);
int size = getQRCodeSize();
String outputFileName = FilenameUtils.removeExtension(getOutputFileName(payload)) + "." + imageFormat;
File outputFile = new File(outputFileName);
Hashtable<EncodeHintType, ErrorCorrectionLevel> hintMap = new Hashtable<EncodeHintType, ErrorCorrectionLevel>();
hintMap.put(EncodeHintType.ERROR_CORRECTION, ErrorCorrectionLevel.L);
BitMatrix byteMatrix = encodeWithQRCode(barcodeInformation, hintMap, size);
if (byteMatrix == null) {
return null;
}
BufferedImage image = new BufferedImage(size, size, BufferedImage.TYPE_INT_RGB);
image.createGraphics();
Graphics2D graphics = (Graphics2D) image.getGraphics();
graphics.setColor(Color.WHITE);
graphics.fillRect(0, 0, size, size);
graphics.setColor(Color.BLACK);
for (int x = 0; x < size; x++) {
for (int y = 0; y < size; y++) {
if (byteMatrix.get(x, y)) {
graphics.fillRect(x, y, 1, 1);
}
}
}
ImageIO.write(image, imageFormat, outputFile);
return outputFile;
}
/**
* Creates the BitMatrix for the QR-Code
*
* @param barcodeInformation
* @param hintMap
* @param size
* @return bit matrix
*/
private BitMatrix encodeWithQRCode(String barcodeInformation,
Hashtable<EncodeHintType, ErrorCorrectionLevel> hintMap, int size) {
try {
return new QRCodeWriter().encode(barcodeInformation, BarcodeFormat.QR_CODE, size, size, hintMap);
} catch (Exception e) {
displayMessage(e.getMessage());
}
return null;
}
/**
* This method will create a QR-code from the information of the payload
* file, and add them to each carrier image.
*
* @param carrier
* @param payload
* @return qr code
* @throws IOException
*/
private File embedInCarrierFile(File carrier, File payload, String imageFormat) throws IOException {
List<File> payloadList = new ArrayList<>();
payloadList.add(payload);
File barcode = createQrCodeFile(payloadList, "png", ON_IMAGE);
return writeQRCodeOnImage(barcode, carrier, imageFormat);
}
/**
* This algorithm writes a QR-code image on a carrier image.
*
* @param qrCodeFile
* @param carrierFile
* @param imageFormat
* @return image file with qr code on top of carrier
* @throws IOException
*/
private File writeQRCodeOnImage(File qrCodeFile, File carrierFile, String imageFormat) throws IOException {
BufferedImage barcode = ImageIO.read(qrCodeFile);
BufferedImage carrier = ImageIO.read(carrierFile);
if (barcode.getWidth() > carrier.getWidth() || barcode.getHeight() > carrier.getHeight()) {
displayMessage(
"The QR-code is to big to add it to the carrier image. Try again with a lower QR-code size!");
qrCodeFile.delete();
return null;
}
Graphics graphics = carrier.getGraphics();
graphics.drawImage(barcode, getXPosition(), getYPosition(), null);
String outputFileName = FilenameUtils.removeExtension(getOutputFileName(carrierFile)) + "." + imageFormat;
File outputFile = new File(outputFileName);
ImageIO.write(carrier, imageFormat, outputFile);
qrCodeFile.delete();
return outputFile;
}
/**
* Determines if the QR-code should be saved to a separate file, or embedded
* on the carrier.
*
* @return a boolean, if embedding should be used
*/
private boolean embedInCarrierFile() {
return trueEncapsulate.isSelected();
}
/*
* ****** RESTORATION **********
*/
/**
* Input an image file, look for a QR-code, recreate the payload file.
*/
@Override
public List<RestoredFile> restore(File qrCodeImage) throws IOException {
List<RestoredFile> restoredFiles = new ArrayList<RestoredFile>();
BufferedImage bufferedQrCode = ImageIO.read(qrCodeImage);
LuminanceSource luminance = new BufferedImageLuminanceSource(bufferedQrCode);
BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(luminance));
QRCodeReader reader = new QRCodeReader();
try {
Result result = reader.decode(bitmap);
PayloadSegment payloadSegment = PayloadSegment.getPayloadSegment(result.getText().getBytes());
String payloadName = payloadSegment.getPayloadName();
RestoredFile payloadFile = new RestoredFile(RESTORED_DIRECTORY + payloadName);
FileOutputStream out = new FileOutputStream(payloadFile);
out.write(payloadSegment.getPayloadBytes());
out.close();
payloadFile.algorithm = this;
payloadFile.wasPayload = true;
payloadFile.wasCarrier = false;
payloadFile.validateChecksum(payloadSegment.getPayloadChecksum());
if (payloadSegment.getRestorationMetadata().getProperty("usedMethod").equals(ON_IMAGE)) {
RestoredFile carrier = restoreCarrier(qrCodeImage);
payloadFile.relatedFiles.add(carrier);
carrier.relatedFiles.add(payloadFile);
restoredFiles.add(carrier);
}
restoredFiles.add(payloadFile);
} catch (NotFoundException | ChecksumException | FormatException e) {
e.printStackTrace();
}
return restoredFiles;
}
private RestoredFile restoreCarrier(File qrCodeImage) {
RestoredFile carrier = getRestoredCarrier(qrCodeImage);
carrier.wasCarrier = true;
carrier.wasPayload = false;
carrier.algorithm = this;
carrier.checksumValid = false;
carrier.restorationNote = "Carrier can't be restored.";
return carrier;
}
/*
* **** CONFIGURATION ****************
*/
/**
* Creates the GUI that is used to configure this algorithm.
*/
private void createConfigurationGui() {
initButtons();
panel = new GUIPanel();
panel.setLayout(new GridBagLayout());
GridBagConstraints constraints = new GridBagConstraints();
constraints.gridx = 0;
constraints.gridy = 0;
constraints.anchor = GridBagConstraints.NORTHWEST;
constraints.gridwidth = 2;
panel.add(new JLabel("<html><h2>QR-code options</h2></html>"), constraints);
constraints.gridy++;
constraints.gridwidth = 1;
panel.add(new JLabel("QR-code size: "), constraints);
constraints.gridx++;
sizeField.setText("200");
panel.add(sizeField, constraints);
constraints.gridwidth = 2;
constraints.gridx = 0;
constraints.gridy++;
constraints.gridy++;
constraints.gridx = 0;
constraints.gridwidth = 1;
panel.add(new JLabel("Select output file format:"), constraints);
constraints.gridx++;
panel.add(imageFormatBox, constraints);
constraints.gridx = 0;
constraints.gridy++;
// separate file:
constraints.gridwidth = 2;
panel.add(falseEncapsulate, constraints);
constraints.gridy++;
constraints.gridy++;
// add to carrier:
panel.add(trueEncapsulate, constraints);
constraints.gridy++;
JTextArea infoArea = new JTextArea();
infoArea.setText("" + "\nPlease note: "
+ "\n- The QR-code will be created from the first payload file, if the dataset has more than one payload files, and added to each carrier file. "
+ "All other payload files will be ignored, if this option is selected."
+ "\n- Carrier files need to be one of the following file types:" + "\n\tpng, jpeg, jp2, jpg, bmp, gif"
+ "\n- All carrier files of other file formats are ignored."
+ "\n- The QR-code pixel size should exceed the carrier image size in no dimension."
+ "\n- If the size of the selected payload file is too big to create a QR-code from it, you will get an error message."
+ "\n\n" + "Options for adding the QR-code to the carrier images:");
infoArea.setEditable(false);
infoArea.setLineWrap(true);
constraints.fill = GridBagConstraints.HORIZONTAL;
constraints.weightx = 1;
panel.add(infoArea, constraints);
constraints.fill = GridBagConstraints.NONE;
constraints.gridy++;
panel.add(new JLabel("On which image position should the barcode be added?"), constraints);
constraints.gridy++;
constraints.gridwidth = 1;
panel.add(new JLabel("X :"), constraints);
constraints.gridx++;
xPositionField.setText("0");
panel.add(xPositionField, constraints);
constraints.gridx = 0;
constraints.gridy++;
panel.add(new JLabel("Y :"), constraints);
constraints.gridx++;
yPositionField.setText("0");
panel.add(yPositionField, constraints);
constraints.gridx = 0;
constraints.gridy++;
constraints.gridy++;
panel.add(messageLabel, constraints);
}
/**
* Change the text of a GUI JLabel to display state messages to the user.
*
* @param message
*/
private void displayMessage(String message) {
messageLabel.setText("<html><h3>" + message + "</h3></html>");
}
/**
* Initialises the GUI buttons.
*/
private void initButtons() {
ButtonGroup encapsulateGroup = new ButtonGroup();
encapsulateGroup.add(trueEncapsulate);
encapsulateGroup.add(falseEncapsulate);
trueEncapsulate.setSelected(true);
messageLabel.setForeground(Color.red);
}
/**
* Configures which technique to use.
*
* @param encapsulate
* true: A qr-code image is created from the first payload file
* and embedded on each carrier file.
*
* false: A qr-code image is created from each payload file and
* stored in an own file. The carriers are ignored.
*/
public void setCarrierEncapsulation(boolean encapsulate) {
trueEncapsulate.setSelected(encapsulate);
falseEncapsulate.setSelected(!encapsulate);
}
/**
* This is a visible watermarking algorithm. If the QR-code is added to a
* carrier image, the carrier can't be restored.
*/
@Override
Scenario defineScenario() {
Scenario scenario = new Scenario("QR-code scenario");
scenario.description = "This is the ideal scenario for creating a QR-code.";
scenario.setCriterionValue(ENCAPSULATION_METHOD, EMBEDDING);
scenario.setCriterionValue(VISIBILITY, VISIBLE);
scenario.setCriterionValue(DETECTABILITY, DETECTABLE);
scenario.setCriterionValue(CARRIER_RESTORABILITY, NO);
scenario.setCriterionValue(PAYLOAD_RESTORABILITY, YES);
scenario.setCriterionValue(CARRIER_PROCESSABILITY, YES);
scenario.setCriterionValue(PAYLOAD_ACCESSIBILITY, NO);
scenario.setCriterionValue(ENCRYPTION, NO);
scenario.setCriterionValue(COMPRESSION, YES);
scenario.setCriterionValue(VELOCITY, NO);
scenario.setCriterionValue(STANDARDS, YES);
return scenario;
}
/**
* Only image files can be used as carrier, but this algorithm allows
* non-image files to be in the dataset. All carrier files that aren't an
* image will be ignored for the encapsulation.
*/
@Override
SuffixFileFilter configureCarrierFileFilter() {
return new AcceptAllFilter();
}
/**
* Payload can be of any type.
*/
@Override
SuffixFileFilter configurePayloadFileFilter() {
return new AcceptAllFilter();
}
/**
* The algorithm only outputs images.
*/
@Override
SuffixFileFilter configureDecapsulationFileFilter() {
List<String> supportedFileFormats = new ArrayList<String>();
supportedFileFormats.add("jpg");
supportedFileFormats.add("jpeg");
supportedFileFormats.add("png");
supportedFileFormats.add("bmp");
supportedFileFormats.add("gif");
return new SuffixFileFilter(supportedFileFormats);
}
/**
* Name of the algorithm in the GUI list.
*/
@Override
public String getName() {
return "QR-code watermarking";
}
/**
* This description will be shown in the GUI.
*/
@Override
public String getDescription() {
String description = "This algorithm creates QR-codes from the payload information.\n"
+ "It can be configured to either create a QR-code file for each payload file,"
+ " and ignore the carrier; Or create a QR-code from the first payload file, and "
+ "add it on each carrier image.\n"
+ "All carrier files that aren't images, will be ingored by this algorithm.";
return description;
}
/**
* A carrier file is not needed, but there has to be a minimum of one
* payload file.
*/
@Override
public boolean fulfilledTechnicalCriteria(File carrier, List<File> payloadList) {
return payloadList.size() == 1;
}
/**
* Get the x-position of the carrier image, where the QR-code should be
* added.
*
* @return x-position
*/
private int getXPosition() {
try {
int size = Integer.parseInt(xPositionField.getText());
return size;
} catch (Exception e) {
return 0;
}
}
/**
* Get the y-position of the carrier image, where the QR-code should be
* added.
*
* @return y position
*/
private int getYPosition() {
try {
int size = Integer.parseInt(yPositionField.getText());
return size;
} catch (Exception e) {
return 0;
}
}
/**
* Get the size of the QR-code. This is configure by the user, and has a
* default of 200*200.
*
* @return QR-code size
*/
private int getQRCodeSize() {
try {
int size = Integer.parseInt(sizeField.getText());
return size;
} catch (Exception e) {
return DEFAULT_SIZE;
}
}
}
|
|
package com.nicholasquirk.comicviewer;
import java.io.BufferedInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.os.Environment;
import android.view.View;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import com.github.junrar.Archive;
import com.github.junrar.exception.RarException;
import com.github.junrar.rarfile.FileHeader;
/**
*
* @author Nicholas Quirk
*
*/
public class FileSystem extends Activity implements OnItemClickListener {
private String currDirectory;
private String sdRootDirectory;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.filesystem);
boolean canViewStorage = isExternalStorageAvailable();
if (canViewStorage) {
this.sdRootDirectory = Environment.getExternalStorageDirectory().toString();
populateCurrentDirectory(this.sdRootDirectory, false);
}
}
private boolean isExternalStorageAvailable() {
boolean mExternalStoargeAvailable = false;
String state = Environment.getExternalStorageState();
if (Environment.MEDIA_MOUNTED.equals(state)) {
mExternalStoargeAvailable = true;
} else if (Environment.MEDIA_MOUNTED_READ_ONLY.equals(state)) {
mExternalStoargeAvailable = true;
} else {
mExternalStoargeAvailable = false;
}
return mExternalStoargeAvailable;
}
private void populateCurrentDirectory(String directory, boolean useCurrDirectory) {
File file = null;
File[] files = null;
String[] items = null;
ListView file_browser = (ListView) findViewById(R.id.filesystem_nav_view);
if (useCurrDirectory == true) {
file = new File(currDirectory, directory);
} else {
file = new File(directory);
}
if (file != null && file.isDirectory()) {
this.currDirectory = file.getAbsolutePath();
files = file.listFiles();
}
if (files != null) {
items = new String[(files.length) + 1];
items[0] = "..";
for (int i = 1; i < files.length + 1; i++) {
items[i] = files[i - 1].getName();
}
} else {
items = new String[]{".."};
}
Arrays.sort(items);
file_browser.setAdapter(new ArrayAdapter<String>(this, R.layout.list_item, items));
file_browser.setOnItemClickListener(this);
}
@Override
public void onItemClick(AdapterView<?> arg0, View arg1, int arg2, long arg3) {
TextView tv = (TextView) arg1;
String s = tv.getText().toString();
File f = new File(this.currDirectory, s);
if (isComicBookFile(f)) {
try {
File inflatedDirectory = null;
if (f.getName().endsWith(".cbz") || f.getName().endsWith(".zip")) {
inflatedDirectory = inflateZipComicBookArchive(f);
launchImageViewer(inflatedDirectory.getAbsolutePath());
} else if (f.getName().endsWith(".cbr") || f.getName().endsWith(".rar")) {
inflatedDirectory = inflateRarComicBookArchive(f);
launchImageViewer(inflatedDirectory.getAbsolutePath());
}
} catch (IOException e) {
e.printStackTrace();
}
} else {
if (s.equals("..")) {
String prevDirectory = f.getParentFile().getParentFile().getAbsolutePath();
if (prevDirectory.contains(this.sdRootDirectory)) {
populateCurrentDirectory(prevDirectory, false);
} else {
populateCurrentDirectory(this.sdRootDirectory, false);
}
} else {
if (f.isDirectory()) {
populateCurrentDirectory(f.getName(), true);
} else {
// random file
}
}
}
}
private boolean isComicBookFile(File f) {
boolean isValidFile = false;
int extensionIndex = f.getName().lastIndexOf(".");
if (extensionIndex != -1) {
String ext = f.getName().substring(extensionIndex);
if (ext.equalsIgnoreCase(".cbz") || ext.equalsIgnoreCase(".zip") || ext.equalsIgnoreCase(".rar") || ext.equalsIgnoreCase(".cbr")) {
isValidFile = true;
}
}
return isValidFile;
}
private File inflateRarComicBookArchive(File archive) {
File tempDir = new File(this.sdRootDirectory, ".qComicViewer");
tempDir.mkdirs();
deleteDirContents(getFilesDir());
int prefixIncrement = 1;
Archive arch = null;
try {
arch = new Archive(archive);
} catch (RarException e) {
} catch (IOException e1) {
}
if (arch != null) {
if (arch.isEncrypted()) {
Toast.makeText(getApplicationContext(), "Archive is encryped.", Toast.LENGTH_SHORT).show();
}
FileHeader fh = null;
try {
while (true) {
fh = arch.nextFileHeader();
if (fh == null) {
break;
}
if (fh.isEncrypted()) {
continue;
}
if (fh.isDirectory()) {
Toast.makeText(getApplicationContext(), "Archive contains directory.", Toast.LENGTH_SHORT).show();
} else {
FileOutputStream stream = openFileOutput(String.format("%05d", prefixIncrement) + "-" + fh.getFileNameString(), Context.MODE_PRIVATE);
arch.extractFile(fh, stream);
stream.close();
prefixIncrement++;
}
}
} catch (IOException e) {
} catch (RarException e) {
}
}
return tempDir;
}
private File inflateZipComicBookArchive(File archive) throws IOException {
InputStream is = new FileInputStream(archive);
ZipInputStream zis = new ZipInputStream(new BufferedInputStream(is));
FileOutputStream fos = null;
File tempDir = new File(this.sdRootDirectory, ".qComicViewer");
tempDir.mkdirs();
deleteDirContents(getFilesDir());
int prefixIncrement = 1;
try {
ZipEntry ze;
while ((ze = zis.getNextEntry()) != null) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte[] buffer = new byte[2048];
int count;
while ((count = zis.read(buffer)) != -1) {
baos.write(buffer, 0, count);
}
String filename = null;
if (ze.getName().lastIndexOf("/") != -1) {
filename = ze.getName().substring(ze.getName().lastIndexOf("/") + 1);
} else {
filename = ze.getName();
}
byte[] bytes = baos.toByteArray();
if (isImageFile(filename)) {
fos = openFileOutput(String.format("%05d", prefixIncrement) + "-" + filename, Context.MODE_PRIVATE);
fos.write(bytes);
fos.flush();
prefixIncrement++;
}
}
} finally {
if (zis != null) {
zis.close();
}
if (fos != null) {
fos.close();
}
}
return tempDir;
}
private void launchImageViewer(String directoryPath) {
Intent i = new Intent(this, ImageViewer.class);
i.putExtra("imagesPath", getFilesDir().getAbsolutePath());
startActivity(i);
}
private boolean isImageFile(String filename) {
boolean isValidFile = false;
int extensionIndex = filename.lastIndexOf(".");
if (extensionIndex != -1) {
String ext = filename.substring(extensionIndex);
if (ext.equalsIgnoreCase(".jpg") || ext.equalsIgnoreCase(".png") || ext.equalsIgnoreCase(".gif")
|| ext.equalsIgnoreCase(".jpeg") || ext.equalsIgnoreCase(".bmp")) {
isValidFile = true;
}
}
return isValidFile;
}
public static void deleteDirContents(File dir) {
if (dir.isDirectory()) {
String[] children = dir.list();
File f;
for (int i = 0; i < children.length; i++) {
f = new File(dir, children[i]);
if (!f.getName().equals("last_page.txt")) {
f.delete();
}
}
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.