gt
stringclasses 1
value | context
stringlengths 2.05k
161k
|
---|---|
package org.yeastrc.ms.service.sqtfile;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.yeastrc.ms.dao.BaseDAOTestCase;
import org.yeastrc.ms.dao.DAOFactory;
import org.yeastrc.ms.dao.search.prolucid.ProlucidSearchDAO;
import org.yeastrc.ms.dao.search.prolucid.ProlucidSearchResultDAO;
import org.yeastrc.ms.dao.search.sqtfile.SQTRunSearchDAO;
import org.yeastrc.ms.dao.search.sqtfile.SQTSearchScanDAO;
import org.yeastrc.ms.domain.general.MsEnzyme;
import org.yeastrc.ms.domain.general.MsEnzyme.Sense;
import org.yeastrc.ms.domain.search.MsResidueModification;
import org.yeastrc.ms.domain.search.MsResidueModificationIn;
import org.yeastrc.ms.domain.search.MsResultResidueMod;
import org.yeastrc.ms.domain.search.MsResultTerminalMod;
import org.yeastrc.ms.domain.search.MsSearchDatabase;
import org.yeastrc.ms.domain.search.MsSearchResultPeptide;
import org.yeastrc.ms.domain.search.MsSearchResultProtein;
import org.yeastrc.ms.domain.search.MsTerminalModification;
import org.yeastrc.ms.domain.search.MsTerminalModificationIn;
import org.yeastrc.ms.domain.search.Program;
import org.yeastrc.ms.domain.search.SearchFileFormat;
import org.yeastrc.ms.domain.search.ValidationStatus;
import org.yeastrc.ms.domain.search.MsTerminalModification.Terminal;
import org.yeastrc.ms.domain.search.prolucid.ProlucidParam;
import org.yeastrc.ms.domain.search.prolucid.ProlucidParamIn;
import org.yeastrc.ms.domain.search.prolucid.ProlucidResultData;
import org.yeastrc.ms.domain.search.prolucid.ProlucidSearch;
import org.yeastrc.ms.domain.search.prolucid.ProlucidSearchResult;
import org.yeastrc.ms.domain.search.sqtfile.SQTHeaderItem;
import org.yeastrc.ms.domain.search.sqtfile.SQTRunSearch;
import org.yeastrc.ms.domain.search.sqtfile.SQTSearchScan;
import org.yeastrc.ms.parser.DataProviderException;
import org.yeastrc.ms.service.MsDataUploader;
public class ProlucidSQTDataUploadServiceTest extends BaseDAOTestCase {
private static final ProlucidSearchDAO psearchDao = DAOFactory.instance().getProlucidSearchDAO();
private static final SQTRunSearchDAO sqtRunSearchDao = DAOFactory.instance().getSqtRunSearchDAO();
private static final SQTSearchScanDAO sqtScanDao = DAOFactory.instance().getSqtSpectrumDAO();
private static final ProlucidSearchResultDAO presDao = DAOFactory.instance().getProlucidResultDAO();
protected void setUp() throws Exception {
super.setUp();
resetDatabase();
}
protected void tearDown() throws Exception {
super.tearDown();
}
public void testUploadValidProlucidData2() throws DataProviderException {
String dir = "test_resources/validProlucidData_dir2";
// String dir = "/Users/vagisha/WORK/MS_LIBRARY/ProlucidData_dir/2985/RE/forTest";
MsDataUploader uploader = new MsDataUploader();
int experimentId = 0;
java.util.Date searchDate = new java.util.Date();
uploader.setSpectrumDataDirectory(dir);
uploader.setSearchDirectory(dir);
uploader.setSearchDate(searchDate);
uploader.setRemoteServer("remoteServer");
uploader.setRemoteSearchDataDirectory("remoteDirectory");
uploader.uploadData();
experimentId = uploader.getUploadedExperimentId();
assertEquals(0, uploader.getUploadExceptionList().size());
assertNotSame(0, experimentId);
checkUploadedSearch(uploader.getUploadedSearchId(), searchDate, dir);
}
public void testUploadValidProlucidData1() throws DataProviderException {
String dir = "test_resources/validProlucidData_dir1";
// String dir = "/Users/vagisha/WORK/MS_LIBRARY/ProlucidData_dir/2985/RE/forTest";
MsDataUploader uploader = new MsDataUploader();
int experimentId = 0;
java.util.Date searchDate = new java.util.Date();
uploader.setSpectrumDataDirectory(dir);
uploader.setSearchDirectory(dir);
uploader.setSearchDate(searchDate);
uploader.setRemoteServer("remoteServer");
uploader.setRemoteSearchDataDirectory("remoteDirectory");
uploader.uploadData();
experimentId = uploader.getUploadedExperimentId();
assertEquals(0, uploader.getUploadExceptionList().size());
assertNotSame(0, experimentId);
checkUploadedSearch(uploader.getUploadedSearchId(), searchDate, dir);
}
private void checkUploadedSearch(int searchId, java.util.Date searchDate, String dir) {
// make sure all the data got uploaded
int runId1 = getRunId("1");
int runId2 = getRunId("2");
assertNotSame(0, runId1);
assertNotSame(0, runId2);
checkSearch(searchId, searchDate, dir);
// checkSearchForFile1(searchId, runId1); // 1.ms2
int runSearchId = checkSearchForFile2(searchId, runId2); // 2.ms2
checkSearchResults2(runSearchId, runId2);
checkSearchScan2(runSearchId, runId2);
}
private void checkSearchScan2(int runSearchId, int runId) {
// S 00023 00023 3 22 shamu048 866.46000 1892.2 56.4 4716510
int scanId = scanDao.loadScanIdForScanNumRun(23, runId);
SQTSearchScan scan = sqtScanDao.load(runSearchId, scanId, 3, new BigDecimal("866.46000"));
assertEquals(scanId, scan.getScanId());
assertEquals(runSearchId, scan.getRunSearchId());
assertEquals(3, scan.getCharge());
assertEquals(22, scan.getProcessTime());
assertEquals("shamu048", scan.getServerName());
// assertEquals(866.46, scan.getObservedMass().doubleValue());
assertEquals(1892.2, scan.getTotalIntensity().doubleValue());
assertEquals(56.4, scan.getLowestSp().doubleValue());
assertEquals(4716510, scan.getSequenceMatches());
// S 00020 00020 1 22 shamu049 807.67000 2681.7 95.3 5138490
scanId = scanDao.loadScanIdForScanNumRun(20, runId);
scan = sqtScanDao.load(runSearchId, scanId, 1, new BigDecimal("807.67000"));
assertEquals(scanId, scan.getScanId());
assertEquals(runSearchId, scan.getRunSearchId());
assertEquals(1, scan.getCharge());
assertEquals(22, scan.getProcessTime());
assertEquals("shamu049", scan.getServerName());
// assertEquals(807.67000, scan.getObservedMass().doubleValue());
assertEquals(2681.7, scan.getTotalIntensity().doubleValue());
assertEquals(95.3, scan.getLowestSp().doubleValue());
assertEquals(5138490, scan.getSequenceMatches());
// S 00010 00010 1 23 shamu050 717.62000 4000.6 111.6 5928764
scanId = scanDao.loadScanIdForScanNumRun(10, runId);
scan = sqtScanDao.load(runSearchId, scanId, 1, new BigDecimal("717.62000"));
assertEquals(scanId, scan.getScanId());
assertEquals(runSearchId, scan.getRunSearchId());
assertEquals(1, scan.getCharge());
assertEquals(23, scan.getProcessTime());
assertEquals("shamu050", scan.getServerName());
// assertEquals(717.62000, scan.getObservedMass().doubleValue());
assertEquals(4000.6, scan.getTotalIntensity().doubleValue());
assertEquals(111.6, scan.getLowestSp().doubleValue());
assertEquals(5928764, scan.getSequenceMatches());
// S 00026 00026 1 23 shamu048 817.33000 2044.4 69.6 5697304
scanId = scanDao.loadScanIdForScanNumRun(26, runId);
scan = sqtScanDao.load(runSearchId, scanId, 1, new BigDecimal("817.33000"));
assertEquals(scanId, scan.getScanId());
assertEquals(runSearchId, scan.getRunSearchId());
assertEquals(1, scan.getCharge());
assertEquals(23, scan.getProcessTime());
assertEquals("shamu048", scan.getServerName());
// assertEquals(817.33, scan.getObservedMass().doubleValue());
assertEquals(2044.4, scan.getTotalIntensity().doubleValue());
assertEquals(69.6, scan.getLowestSp().doubleValue());
assertEquals(5697304, scan.getSequenceMatches());
}
private void checkSearchResults2(int runSearchId, int runId) {
List<Integer> ids = presDao.loadResultIdsForRunSearch(runSearchId);
assertEquals(13, ids.size());
Collections.sort(ids);
// S 00023 00023 3 22 shamu048 866.46000 1892.2 56.4 4716510
// M 1 4 866.96470123 0.00000 1.1529 3.137 9 14 L.(156.1011)S(79.9876)DMSASRI(123.4567).T U
ProlucidSearchResult res = presDao.load(ids.get(0));
assertEquals(getScanId(runId, 23), res.getScanId());
assertEquals(3, res.getCharge());
assertEquals(866.46, res.getObservedMass().doubleValue());
assertEquals("SDMSASRI", res.getResultPeptide().getPeptideSequence());
assertEquals('L', res.getResultPeptide().getPreResidue());
assertEquals('T', res.getResultPeptide().getPostResidue());
assertEquals(ValidationStatus.UNVALIDATED, res.getValidationStatus());
// check the residue modifications for this result
MsSearchResultPeptide peptide = res.getResultPeptide();
List<MsResultResidueMod> resultMods = peptide.getResultDynamicResidueModifications();
assertEquals(1, resultMods.size());
assertEquals(0, resultMods.get(0).getModifiedPosition());
assertEquals('p', resultMods.get(0).getModificationSymbol());
assertEquals('S', resultMods.get(0).getModifiedResidue());
assertEquals(79.9876, resultMods.get(0).getModificationMass().doubleValue());
// check the terminal modifications for this result
List<MsResultTerminalMod> termMods = peptide.getResultDynamicTerminalModifications();
assertEquals(2, termMods.size());
Collections.sort(termMods, new TerminalModComparator<MsResultTerminalMod>());
MsResultTerminalMod tmod = termMods.get(0);
assertEquals(Terminal.CTERM, tmod.getModifiedTerminal());
assertEquals(123.4567, tmod.getModificationMass().doubleValue());
assertEquals('y', tmod.getModificationSymbol());
tmod = termMods.get(1);
assertEquals(Terminal.NTERM, tmod.getModifiedTerminal());
assertEquals(156.1011, tmod.getModificationMass().doubleValue());
assertEquals('*', tmod.getModificationSymbol());
// check the ProLuCID specific results
ProlucidResultData data = res.getProlucidResultData();
assertEquals(1, data.getPrimaryScoreRank());
assertEquals(4, data.getSecondaryScoreRank());
assertEquals(866.96470123, data.getCalculatedMass().doubleValue());
assertEquals(0.0, data.getDeltaCN().doubleValue());
assertEquals(1.1529, data.getPrimaryScore());
assertEquals(3.137, data.getSecondaryScore());
assertEquals(9, data.getMatchingIons());
assertEquals(14, data.getPredictedIons());
// check the protein matches for this result
// 28 L Reverse_gi|927415|emb|CAA55359.1|
// 29 L Reverse_gi|21618336|ref|NP_659006.1|
List<MsSearchResultProtein> prList = res.getProteinMatchList();
Collections.sort(prList, new MatchProteinComparator());
assertEquals(2, prList.size());
MsSearchResultProtein pr = prList.get(0);
assertEquals(ids.get(0).intValue(), pr.getResultId());
assertEquals("Reverse_gi|21618336|ref|NP_659006.1|", pr.getAccession());
pr = prList.get(1);
assertEquals(ids.get(0).intValue(), pr.getResultId());
assertEquals("Reverse_gi|927415|emb|CAA55359.1|", pr.getAccession());
// M 2 200 865.91874 0.0311 1.117 2.953 8 16 T.(79.9876)(156.1011)SGTS(79.9876)SAS(79.9876)LR.K V
res = presDao.load(ids.get(1));
assertEquals(getScanId(runId, 23), res.getScanId());
assertEquals(3, res.getCharge());
assertEquals("SGTSSASLR", res.getResultPeptide().getPeptideSequence());
assertEquals('T', res.getResultPeptide().getPreResidue());
assertEquals('K', res.getResultPeptide().getPostResidue());
assertEquals(ValidationStatus.VALID, res.getValidationStatus());
// check the residue modifications for this result
peptide = res.getResultPeptide();
resultMods = peptide.getResultDynamicResidueModifications();
Collections.sort(resultMods, new ResultResidueModComparator<MsResultResidueMod>());
assertEquals(3, resultMods.size());
MsResultResidueMod rmod = resultMods.get(0);
assertEquals(0, rmod.getModifiedPosition());
assertEquals('p', rmod.getModificationSymbol());
assertEquals('S', rmod.getModifiedResidue());
assertEquals(79.9876, rmod.getModificationMass().doubleValue());
rmod = resultMods.get(1);
assertEquals(3, rmod.getModifiedPosition());
assertEquals('p', rmod.getModificationSymbol());
assertEquals('S', rmod.getModifiedResidue());
assertEquals(79.9876, rmod.getModificationMass().doubleValue());
rmod = resultMods.get(2);
assertEquals(6, rmod.getModifiedPosition());
assertEquals('p', rmod.getModificationSymbol());
assertEquals('S', rmod.getModifiedResidue());
assertEquals(79.9876, rmod.getModificationMass().doubleValue());
// check the terminal modifications for this result
termMods = peptide.getResultDynamicTerminalModifications();
assertEquals(1, termMods.size());
tmod = termMods.get(0);
assertEquals(Terminal.NTERM, tmod.getModifiedTerminal());
assertEquals(156.1011, tmod.getModificationMass().doubleValue());
assertEquals('*', tmod.getModificationSymbol());
// check the ProLuCID specific results
data = res.getProlucidResultData();
assertEquals(2, data.getPrimaryScoreRank());
assertEquals(200, data.getSecondaryScoreRank());
assertEquals(865.91874, data.getCalculatedMass().doubleValue());
assertEquals(0.0311, data.getDeltaCN().doubleValue());
assertEquals(1.117, data.getPrimaryScore());
assertEquals(2.953, data.getSecondaryScore());
assertEquals(8, data.getMatchingIons());
assertEquals(16, data.getPredictedIons());
// M 4 22 866.99266 0.0989 1.0388 2.551 8 14 A.SG(-99.9)IY(79.9876)ASRL.S N
res = presDao.load(ids.get(3));
assertEquals(getScanId(runId, 23), res.getScanId());
assertEquals(3, res.getCharge());
assertEquals("SGIYASRL", res.getResultPeptide().getPeptideSequence());
assertEquals('A', res.getResultPeptide().getPreResidue());
assertEquals('S', res.getResultPeptide().getPostResidue());
assertEquals(ValidationStatus.NOT_VALID, res.getValidationStatus());
// check the residue modifications for this result
peptide = res.getResultPeptide();
resultMods = peptide.getResultDynamicResidueModifications();
Collections.sort(resultMods, new ResultResidueModComparator<MsResultResidueMod>());
assertEquals(2, resultMods.size());
rmod = resultMods.get(0);
assertEquals(1, rmod.getModifiedPosition());
assertEquals('#', resultMods.get(0).getModificationSymbol());
assertEquals('G', resultMods.get(0).getModifiedResidue());
assertEquals(-99.9, resultMods.get(0).getModificationMass().doubleValue());
rmod = resultMods.get(1);
assertEquals(3, rmod.getModifiedPosition());
assertEquals('p', rmod.getModificationSymbol());
assertEquals('Y', rmod.getModifiedResidue());
assertEquals(79.9876, rmod.getModificationMass().doubleValue());
// check the terminal modifications for this result
termMods = peptide.getResultDynamicTerminalModifications();
assertEquals(0, termMods.size());
// check the ProLuCID specific results
data = res.getProlucidResultData();
assertEquals(4, data.getPrimaryScoreRank());
assertEquals(22, data.getSecondaryScoreRank());
assertEquals(866.99266, data.getCalculatedMass().doubleValue());
assertEquals(0.0989, data.getDeltaCN().doubleValue());
assertEquals(1.0388, data.getPrimaryScore());
assertEquals(2.551, data.getSecondaryScore());
assertEquals(8, data.getMatchingIons());
assertEquals(14, data.getPredictedIons());
// check the protein matches for this result
// 36 L gi|113427084|ref|XP_001128380.1|
prList = res.getProteinMatchList();
assertEquals(1, prList.size());
pr = prList.get(0);
assertEquals(ids.get(3).intValue(), pr.getResultId());
assertEquals("gi|113427084|ref|XP_001128380.1|", pr.getAccession());
// 60 S 00026 00026 1 23 shamu048 817.33000 2044.4 69.6 5697304
// 61 M 1 22 816.80570 0.00000 1.5492 3.795 11 24 D.AGGGAGGGGAGAG(123.4567)(-99.9).Q M
// 62 L gi|3090887|gb|AAC15421.1|
res = presDao.load(ids.get(10));
assertNotSame(0, res.getScanId());
assertEquals(getScanId(runId, 26), res.getScanId());
assertEquals(1, res.getCharge());
assertEquals("AGGGAGGGGAGAG", res.getResultPeptide().getPeptideSequence());
assertEquals('D', res.getResultPeptide().getPreResidue());
assertEquals('Q', res.getResultPeptide().getPostResidue());
assertEquals(ValidationStatus.MAYBE, res.getValidationStatus());
// check the residue modifications for this result
peptide = res.getResultPeptide();
resultMods = peptide.getResultDynamicResidueModifications();
assertEquals(1, resultMods.size());
rmod = resultMods.get(0);
assertEquals(12, rmod.getModifiedPosition());
assertEquals('#', resultMods.get(0).getModificationSymbol());
assertEquals('G', resultMods.get(0).getModifiedResidue());
assertEquals(-99.9, resultMods.get(0).getModificationMass().doubleValue());
// check the terminal modifications for this result
termMods = peptide.getResultDynamicTerminalModifications();
assertEquals(1, termMods.size());
tmod = termMods.get(0);
assertEquals(Terminal.CTERM, tmod.getModifiedTerminal());
assertEquals(123.4567, tmod.getModificationMass().doubleValue());
assertEquals('y', tmod.getModificationSymbol());
// check the ProLuCID specific results
data = res.getProlucidResultData();
assertEquals(1, data.getPrimaryScoreRank());
assertEquals(22, data.getSecondaryScoreRank());
assertEquals(816.80570, data.getCalculatedMass().doubleValue());
assertEquals(0.0, data.getDeltaCN().doubleValue());
assertEquals(1.5492, data.getPrimaryScore());
assertEquals(3.795, data.getSecondaryScore());
assertEquals(11, data.getMatchingIons());
assertEquals(24, data.getPredictedIons());
// check the protein matches for this result
// 62 L gi|3090887|gb|AAC15421.1|
prList = res.getProteinMatchList();
assertEquals(1, prList.size());
pr = prList.get(0);
assertEquals(ids.get(10).intValue(), pr.getResultId());
assertEquals("gi|3090887|gb|AAC15421.1|", pr.getAccession());
}
private int getScanId(int runId, int scanNumber) {
return scanDao.loadScanIdForScanNumRun(scanNumber, runId);
}
private int checkSearchForFile2(int searchId, int runId) {
int runSearchId = sqtRunSearchDao.loadIdForRunAndSearch(runId, searchId);
SQTRunSearch runSearch = sqtRunSearchDao.loadRunSearch(runSearchId);
assertEquals(runId, runSearch.getRunId());
assertEquals(searchId, runSearch.getSearchId());
assertEquals(SearchFileFormat.SQT_PLUCID, runSearch.getSearchFileFormat());
assertEquals("2008-01-29", runSearch.getSearchDate().toString());
assertEquals(167, runSearch.getSearchDuration());
assertEquals(Program.PROLUCID, runSearch.getSearchProgram());
// check headers
checkRunSearchHeaders2(runSearch);
return runSearchId;
}
private void checkRunSearchHeaders2(SQTRunSearch runSearch) {
List<SQTHeaderItem> headers = runSearch.getHeaders();
assertEquals(23, headers.size());
}
private void checkSearch(int searchId, java.util.Date experimentDate, String dir) {
ProlucidSearch search = psearchDao.loadSearch(searchId);
// assertEquals(experimentDate, search.getSearchDate());
assertEquals("remoteDirectory", search.getServerDirectory());
assertEquals(Program.PROLUCID, search.getSearchProgram());
assertEquals("3.0", search.getSearchProgramVersion());
// check the database
checkSearchDatabase(search);
// check the enzyme
checkEnzyme(search);
// check static residue modifications
checkStaticResidueMods(search);
// check dynamic residue modifications
checkDynamicResidueMods(search);
// check static terminal modifications
checkStaticTerminalMods(search);
// check dynamic terminal modifications
checkDynamicTerminalMods(search);
// check the parameters
try {
this.checkUploadedParams(search, dir);
}
catch (IOException e) {
fail("Error checking uploaded params");
}
}
private void checkUploadedParams(ProlucidSearch search, String dir) throws IOException {
List<ProlucidParam> params = search.getProlucidParams();
Collections.sort(params, new Comparator<ProlucidParam>(){
public int compare(ProlucidParam o1, ProlucidParam o2) {
return Integer.valueOf(o1.getId()).compareTo(Integer.valueOf(o2.getId()));
}});
ProlucidParamNode root = new ProlucidParamNode();
root.elName = params.get(0).getParamElementName();
root.elValue = params.get(0).getParamElementValue();
root.id = params.get(0).getId();
assertEquals(0, params.get(0).getParentParamElementId());
List<ProlucidParamNode> nodes = new ArrayList<ProlucidParamNode>(params.size());
nodes.add(root);
for (int i = 1; i < params.size(); i++) {
ProlucidParam p = params.get(i);
boolean found = false;
for (ProlucidParamNode parents: nodes) {
if (parents.id == p.getParentParamElementId()) {
found = true;
ProlucidParamNode nn = new ProlucidParamNode();
nn.elName = p.getParamElementName();
nn.elValue = p.getParamElementValue();
nn.id = p.getId();
nodes.add(nn);
parents.addChildParamElement(nn);
break;
}
}
if (!found)
System.out.println("should always find a parent!!");
}
StringBuilder fromDb = new StringBuilder();
printParam(root, 0, fromDb);
fromDb.deleteCharAt(0);
// read the file from directory to compare against
String origFile = dir+File.separator+"search.xml.fortest";
StringBuilder buf = new StringBuilder();
BufferedReader r = null;
r = new BufferedReader(new FileReader(origFile));
String line = r.readLine();
while (line != null) {
buf.append("\n"+line.trim());
line = r.readLine();
}
r.close();
buf.deleteCharAt(0);
assertEquals(fromDb.length(), buf.toString().length());
String[] fromDbLines = fromDb.toString().split("\\n");
String[] origLines = buf.toString().split("\\n");
assertEquals(fromDbLines.length, origLines.length);
for (int i = 0; i < fromDbLines.length; i++) {
assertEquals(fromDbLines[i], origLines[i]);
}
assertEquals(fromDb.toString(), buf.toString());
}
private void printParam(ProlucidParamIn param, int indent, StringBuilder buf) {
String tab = "";
// for (int i = 0; i < indent; i++) {
// tab += "\t";
// }
// System.out.print(tab+"\n<"+param.getParamElementName()+">");
buf.append(tab+"\n<"+param.getParamElementName()+">");
if (param.getParamElementValue() != null) {
// System.out.print(param.getParamElementValue());
buf.append(param.getParamElementValue());
}
// System.out.println("");
List<ProlucidParamIn> childNodes = param.getChildParamElements();
for (ProlucidParamIn child: childNodes) {
printParam(child, indent+1, buf);
}
if (param.getParamElementValue() == null) {
// System.out.println("");
buf.append("\n");
}
// System.out.print(tab+"</"+param.getParamElementName()+">");
buf.append(tab+"</"+param.getParamElementName()+">");
}
private static final class ProlucidParamNode implements ProlucidParamIn {
int id;
private String elName;
private String elValue;
private List<ProlucidParamIn> childElList = new ArrayList<ProlucidParamIn>();
@Override
public String getParamElementName() {
return elName;
}
@Override
public String getParamElementValue() {
return elValue;
}
@Override
public List<ProlucidParamIn> getChildParamElements() {
return childElList;
}
public void addChildParamElement(ProlucidParamIn param) {
childElList.add(param);
}
public String toString() {
StringBuilder buf = new StringBuilder();
buf.append("Name: "+elName);
buf.append("\n");
buf.append("Value: "+elValue);
return buf.toString();
}
}
private void checkSearchDatabase(ProlucidSearch search) {
List<MsSearchDatabase> dbs = search.getSearchDatabases();
assertEquals(1, dbs.size());
MsSearchDatabase db = dbs.get(0);
assertEquals("/net/maccoss/vol2/software/pipeline/dbase/mouse-contam.fasta", db.getServerPath());
assertEquals("remoteServer", db.getServerAddress());
assertEquals("mouse-contam.fasta", db.getDatabaseFileName());
}
private void checkEnzyme(ProlucidSearch search) {
List<MsEnzyme> enzymes = search.getEnzymeList();
assertEquals(1, enzymes.size());
MsEnzyme en = enzymes.get(0);
assertEquals("trypsin", en.getName());
assertEquals(Sense.CTERM, en.getSense());
assertEquals("RK", en.getCut());
assertEquals(null, en.getNocut());
assertEquals(null, en.getDescription());
}
private void checkStaticResidueMods(ProlucidSearch search) {
List<MsResidueModification> mods = search.getStaticResidueMods();
assertEquals(1, mods.size());
MsResidueModification mod = mods.get(0);
assertEquals('C', mod.getModifiedResidue());
assertEquals(0, mod.getModificationSymbol());
assertEquals(57.02146, mod.getModificationMass().doubleValue());
}
private void checkDynamicResidueMods(ProlucidSearch search) {
List<MsResidueModification> mods = search.getDynamicResidueMods();
assertEquals(6, mods.size());
Collections.sort(mods, new ResidueModComparator<MsResidueModification>());
int i = 0;
MsResidueModification mod = mods.get(i++);
assertEquals('D', mod.getModifiedResidue());
assertEquals('#', mod.getModificationSymbol());
assertEquals(-99.9, mod.getModificationMass().doubleValue());
mod = mods.get(i++);
assertEquals('G', mod.getModifiedResidue());
assertEquals('#', mod.getModificationSymbol());
assertEquals(-99.9, mod.getModificationMass().doubleValue());
mod = mods.get(i++);
assertEquals('S', mod.getModifiedResidue());
assertEquals('p', mod.getModificationSymbol());
assertEquals(79.9876, mod.getModificationMass().doubleValue());
mod = mods.get(i++);
assertEquals('T', mod.getModifiedResidue());
assertEquals('p', mod.getModificationSymbol());
assertEquals(79.9876, mod.getModificationMass().doubleValue());
mod = mods.get(i++);
assertEquals('V', mod.getModifiedResidue());
assertEquals('#', mod.getModificationSymbol());
assertEquals(-99.9, mod.getModificationMass().doubleValue());
mod = mods.get(i++);
assertEquals('Y', mod.getModifiedResidue());
assertEquals('p', mod.getModificationSymbol());
assertEquals(79.9876, mod.getModificationMass().doubleValue());
}
private void checkStaticTerminalMods(ProlucidSearch search) {
List<MsTerminalModification> mods = search.getStaticTerminalMods();
assertEquals(2, mods.size());
Collections.sort(mods, new TerminalModComparator<MsTerminalModification>());
int i = 0;
MsTerminalModification mod = mods.get(i++);
assertEquals(Terminal.CTERM, mod.getModifiedTerminal());
assertEquals(0, mod.getModificationSymbol());
assertEquals(-10.0, mod.getModificationMass().doubleValue());
mod = mods.get(i++);
assertEquals(Terminal.NTERM, mod.getModifiedTerminal());
assertEquals(0, mod.getModificationSymbol());
assertEquals(987.654, mod.getModificationMass().doubleValue());
}
private void checkDynamicTerminalMods(ProlucidSearch search) {
List<MsTerminalModification> mods = search.getDynamicTerminalMods();
assertEquals(2, mods.size());
Collections.sort(mods, new TerminalModComparator<MsTerminalModification>());
int i = 0;
MsTerminalModification mod = mods.get(i++);
assertEquals(Terminal.CTERM, mod.getModifiedTerminal());
assertEquals('y', mod.getModificationSymbol());
assertEquals(123.4567, mod.getModificationMass().doubleValue());
mod = mods.get(i++);
assertEquals(Terminal.NTERM, mod.getModifiedTerminal());
assertEquals('*', mod.getModificationSymbol());
assertEquals(156.1011, mod.getModificationMass().doubleValue());
}
private class ResidueModComparator <T extends MsResidueModificationIn> implements Comparator<T> {
public int compare(T o1, T o2) {
return Character.valueOf(o1.getModifiedResidue()).compareTo(Character.valueOf(o2.getModifiedResidue()));
}
}
private class TerminalModComparator <T extends MsTerminalModificationIn> implements Comparator<T> {
public int compare(T o1, T o2) {
return o1.getModificationMass().compareTo(o2.getModificationMass());
}
}
private class ResultResidueModComparator <T extends MsResultResidueMod> implements Comparator<T> {
public int compare(T o1, T o2) {
return Integer.valueOf(o1.getModifiedPosition()).compareTo(Integer.valueOf(o2.getModifiedPosition()));
}
}
private class MatchProteinComparator implements Comparator<MsSearchResultProtein> {
public int compare(MsSearchResultProtein o1, MsSearchResultProtein o2) {
return o1.getAccession().compareTo(o2.getAccession());
}
}
private int getRunId(String runFileName) {
List<Integer> runIds = runDao.loadRunIdsForFileName(runFileName);
assertEquals(1, runIds.size());
int runId = runIds.get(0);
assertNotSame(0, runId);
return runId;
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.composite;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.sort.SortOrder;
import java.io.IOException;
import java.time.ZoneId;
import java.util.Objects;
/**
* A {@link ValuesSource} builder for {@link CompositeAggregationBuilder}
*/
public abstract class CompositeValuesSourceBuilder<AB extends CompositeValuesSourceBuilder<AB>> implements Writeable, ToXContentFragment {
protected final String name;
private String field = null;
private Script script = null;
private ValueType userValueTypeHint = null;
private boolean missingBucket = false;
private SortOrder order = SortOrder.ASC;
private String format = null;
CompositeValuesSourceBuilder(String name) {
this.name = name;
}
CompositeValuesSourceBuilder(StreamInput in) throws IOException {
this.name = in.readString();
this.field = in.readOptionalString();
if (in.readBoolean()) {
this.script = new Script(in);
}
if (in.readBoolean()) {
this.userValueTypeHint = ValueType.readFromStream(in);
}
this.missingBucket = in.readBoolean();
this.order = SortOrder.readFromStream(in);
this.format = in.readOptionalString();
}
@Override
public final void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
out.writeOptionalString(field);
boolean hasScript = script != null;
out.writeBoolean(hasScript);
if (hasScript) {
script.writeTo(out);
}
boolean hasValueType = userValueTypeHint != null;
out.writeBoolean(hasValueType);
if (hasValueType) {
userValueTypeHint.writeTo(out);
}
out.writeBoolean(missingBucket);
order.writeTo(out);
out.writeOptionalString(format);
innerWriteTo(out);
}
protected abstract void innerWriteTo(StreamOutput out) throws IOException;
protected abstract void doXContentBody(XContentBuilder builder, Params params) throws IOException;
@Override
public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(type());
if (field != null) {
builder.field("field", field);
}
if (script != null) {
builder.field("script", script);
}
builder.field("missing_bucket", missingBucket);
if (userValueTypeHint != null) {
builder.field("value_type", userValueTypeHint.getPreferredName());
}
if (format != null) {
builder.field("format", format);
}
builder.field("order", order);
doXContentBody(builder, params);
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(field, missingBucket, script, userValueTypeHint, order, format);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
@SuppressWarnings("unchecked")
AB that = (AB) o;
return Objects.equals(field, that.field()) &&
Objects.equals(script, that.script()) &&
Objects.equals(userValueTypeHint, that.userValuetypeHint()) &&
Objects.equals(missingBucket, that.missingBucket()) &&
Objects.equals(order, that.order()) &&
Objects.equals(format, that.format());
}
public String name() {
return name;
}
abstract String type();
/**
* Sets the field to use for this source
*/
@SuppressWarnings("unchecked")
public AB field(String field) {
if (field == null) {
throw new IllegalArgumentException("[field] must not be null");
}
this.field = field;
return (AB) this;
}
/**
* Gets the field to use for this source
*/
public String field() {
return field;
}
/**
* Sets the script to use for this source
*/
@SuppressWarnings("unchecked")
public AB script(Script script) {
if (script == null) {
throw new IllegalArgumentException("[script] must not be null");
}
this.script = script;
return (AB) this;
}
/**
* Gets the script to use for this source
*/
public Script script() {
return script;
}
/**
* Sets the {@link ValueType} for the value produced by this source
*/
@SuppressWarnings("unchecked")
public AB userValuetypeHint(ValueType valueType) {
if (valueType == null) {
throw new IllegalArgumentException("[userValueTypeHint] must not be null");
}
this.userValueTypeHint = valueType;
return (AB) this;
}
/**
* Gets the {@link ValueType} for the value produced by this source
*/
public ValueType userValuetypeHint() {
return userValueTypeHint;
}
/**
* If <code>true</code> an explicit <code>null</code> bucket will represent documents with missing values.
*/
@SuppressWarnings("unchecked")
public AB missingBucket(boolean missingBucket) {
this.missingBucket = missingBucket;
return (AB) this;
}
/**
* False if documents with missing values are ignored, otherwise missing values are
* represented by an explicit `null` value.
*/
public boolean missingBucket() {
return missingBucket;
}
/**
* Sets the {@link SortOrder} to use to sort values produced this source
*/
@SuppressWarnings("unchecked")
public AB order(String order) {
if (order == null) {
throw new IllegalArgumentException("[order] must not be null");
}
this.order = SortOrder.fromString(order);
return (AB) this;
}
/**
* Sets the {@link SortOrder} to use to sort values produced this source
*/
@SuppressWarnings("unchecked")
public AB order(SortOrder order) {
if (order == null) {
throw new IllegalArgumentException("[order] must not be null");
}
this.order = order;
return (AB) this;
}
/**
* Gets the {@link SortOrder} to use to sort values produced this source
*/
public SortOrder order() {
return order;
}
/**
* Sets the format to use for the output of the aggregation.
*/
public AB format(String format) {
if (format == null) {
throw new IllegalArgumentException("[format] must not be null: [" + name + "]");
}
this.format = format;
return (AB) this;
}
/**
* Gets the format to use for the output of the aggregation.
*/
public String format() {
return format;
}
/**
* Actually build the values source and its associated configuration.
*/
protected abstract CompositeValuesSourceConfig innerBuild(ValuesSourceRegistry registry,
ValuesSourceConfig config) throws IOException;
protected abstract ValuesSourceType getDefaultValuesSourceType();
public final CompositeValuesSourceConfig build(AggregationContext context) throws IOException {
ValuesSourceConfig config = ValuesSourceConfig.resolve(context,
userValueTypeHint, field, script, null, timeZone(), format, getDefaultValuesSourceType());
return innerBuild(context.getValuesSourceRegistry(), config);
}
/**
* The time zone for this value source. Default implementation returns {@code null}
* because most value source types don't support time zone.
*/
protected ZoneId timeZone() {
return null;
}
}
|
|
package com.fractalwrench.vulcan.view.recycler;
import com.fractalwrench.voatapi.Constants;
import com.fractalwrench.voatapi.ui.AppEntityComment;
import com.fractalwrench.voatapi.ui.RecyclerModel;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/**
* Represents a node in a tree structure, which has references to its parent and children (if any).
* This is used to model a collapsing comment tree in the UI. It is possible to removeChild/add nodes
* onto the tree.
*/
public class VoatCommentNode implements RecyclerModel, Serializable {
private final List<VoatCommentNode> children;
private List<VoatCommentNode> sortedList;
private VoatCommentNode parent;
private int depth;
private boolean expanded;
private boolean optionsMode;
private AppEntityComment data;
private String transientReply; // any comment reply which has not been persisted on the server.
private String transientEdit; // any comment edit which has not been persisted on the server.
private boolean hasSubmittedEdit; // whether the user has submitted an edit request or not.
private boolean hasSubmittedDelete; // whether the user has submitted a delete request or not.
private boolean hasSubmittedReply; // whether the user has submitted a create request or not.
private @Constants.VoteType Integer persistedVote; // the vote type submitted to the submission.
private @Constants.VoteType Integer submittedVote; // the vote type submitted to the submission.
public VoatCommentNode() {
this.children = new ArrayList<>();
expanded = true;
sortedList = toList();
}
/**
* Adds a child to the last index of this parent node.
*
* @param child the child being added to this node
*/
public void addChild(VoatCommentNode child) {
if (child != null && !children.contains(child)) {
child.setParent(this);
children.add(child);
}
}
/**
* Adds a child to the first index of this parent node.
*
* @param child the child being added to this node
*/
public void addChildAsFirstElement(VoatCommentNode child) {
if (child != null && !children.contains(child)) {
child.setParent(this);
children.add(0, child);
}
}
/**
* Removes a child from the parent node.
*
* @param child the child being removed from this node
*/
public void removeChild(VoatCommentNode child) {
if (child != null) {
children.remove(child);
child.setParent(null);
}
}
public List<VoatCommentNode> getChildren() {
return children;
}
public VoatCommentNode getParent() {
return parent;
}
public void setParent(VoatCommentNode parent) {
this.parent = parent;
}
public boolean isRoot() {
return parent == null;
}
public boolean hasChildren() {
return !children.isEmpty();
}
public int getDepth() {
return depth;
}
public void setDepth(int depth) {
this.depth = depth;
}
public AppEntityComment getData() {
return data;
}
public void setData(AppEntityComment data) {
this.data = data;
}
public boolean isExpanded() {
return expanded;
}
public void setExpanded(boolean expanded) {
this.expanded = expanded;
}
/**
* Recurses to give an ordered comment tree list. Node children must be in an expanded state
* to be added to this list.
*
* @return an ordered comment tree list
*/
public List<VoatCommentNode> toList() {
List<VoatCommentNode> orderedList = new ArrayList<>();
for (VoatCommentNode child : children) {
if (expanded) {
orderedList.add(child);
orderedList.addAll(child.toList());
}
}
return orderedList;
}
public int size() {
return sortedList.size();
}
public void notifyDataChanged() {
sortedList = toList();
}
public VoatCommentNode getNodeAtIndex(int index) {
return sortedList.get(index);
}
/**
* Finds the comment by its id, if any exists
*
* @param id the comment id
* @return the matching comment, or null
*/
public VoatCommentNode getNodeById(int id) {
for (VoatCommentNode node : sortedList) {
if (id == node.getData().getId()) {
return node;
}
}
return null;
}
/**
* Finds the index of the child node
*
* @param child the given child
* @return the index of the child in this list, or -1
*/
public int getNodeIndex(VoatCommentNode child) {
for (int i = 0; i < sortedList.size(); i++) {
VoatCommentNode node = sortedList.get(i);
if (node.equals(child)) {
return i;
}
}
return -1;
}
public String getTransientReply() {
return transientReply;
}
public void setTransientReply(String transientReply) {
this.transientReply = transientReply;
}
public String getTransientEdit() {
return transientEdit;
}
public void setTransientEdit(String transientEdit) {
this.transientEdit = transientEdit;
}
public boolean hasSubmittedEdit() {
return hasSubmittedEdit;
}
public void setHasSubmittedEdit(boolean hasSubmittedEdit) {
this.hasSubmittedEdit = hasSubmittedEdit;
}
public boolean hasSubmittedDelete() {
return hasSubmittedDelete;
}
public void setHasSubmittedDelete(boolean hasSubmittedDelete) {
this.hasSubmittedDelete = hasSubmittedDelete;
}
public boolean hasSubmittedReply() {
return hasSubmittedReply;
}
public void setHasSubmittedReply(boolean hasSubmittedReply) {
this.hasSubmittedReply = hasSubmittedReply;
}
public Integer getTransientVote() {
return submittedVote;
}
public void setTransientVote(Integer submittedVote) {
this.submittedVote = submittedVote;
}
public Integer getPersistedVote() {
return persistedVote;
}
public void setPersistedVote(Integer persistedVote) {
this.persistedVote = persistedVote;
}
public boolean isOptionsMode() {
return optionsMode;
}
public void setOptionsMode(boolean optionsMode) {
this.optionsMode = optionsMode;
}
}
|
|
/*
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Rhino code, released
* May 6, 1999.
*
* The Initial Developer of the Original Code is
* Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 1997-1999
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Michael Zhou
*
* Alternatively, the contents of this file may be used under the terms of
* the GNU General Public License Version 2 or later (the "GPL"), in which
* case the provisions of the GPL are applicable instead of those above. If
* you wish to allow use of your version of this file only under the terms of
* the GPL and not to allow others to use your version of this file under the
* MPL, indicate your decision by deleting the provisions above and replacing
* them with the notice and other provisions required by the GPL. If you do
* not delete the provisions above, a recipient may use your version of this
* file under either the MPL or the GPL.
*
* ***** END LICENSE BLOCK ***** */
package com.google.javascript.rhino;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.collect.Iterables;
import com.google.javascript.rhino.Node.TypeDeclarationNode;
import java.util.Arrays;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* An AST construction helper class for TypeDeclarationNode
* @author [email protected] (Alex Eagle)
* @author [email protected] (Michael Zhou)
*/
public class TypeDeclarationsIR {
/**
* @return a new node representing the string built-in type.
*/
public static TypeDeclarationNode stringType() {
return new TypeDeclarationNode(Token.STRING_TYPE);
}
/**
* @return a new node representing the number built-in type.
*/
public static TypeDeclarationNode numberType() {
return new TypeDeclarationNode(Token.NUMBER_TYPE);
}
/**
* @return a new node representing the boolean built-in type.
*/
public static TypeDeclarationNode booleanType() {
return new TypeDeclarationNode(Token.BOOLEAN_TYPE);
}
/**
* Equivalent to the UNKNOWN type in Closure, expressed with {@code {?}}
* @return a new node representing any type, without type checking.
*/
public static TypeDeclarationNode anyType() {
return new TypeDeclarationNode(Token.ANY_TYPE);
}
/**
* @return a new node representing the Void type as defined by TypeScript.
*/
public static TypeDeclarationNode voidType() {
return new TypeDeclarationNode(Token.VOID_TYPE);
}
/**
* @return a new node representing the Undefined type as defined by TypeScript.
*/
public static TypeDeclarationNode undefinedType() {
return new TypeDeclarationNode(Token.UNDEFINED_TYPE);
}
/**
* Splits a '.' separated qualified name into a tree of type segments.
*
* @param typeName a qualified name such as "goog.ui.Window"
* @return a new node representing the type
* @see #namedType(Iterable)
*/
public static TypeDeclarationNode namedType(String typeName) {
return namedType(Splitter.on('.').split(typeName));
}
/**
* Produces a tree structure similar to the Rhino AST of a qualified name
* expression, under a top-level NAMED_TYPE node.
*
* <p>Example:
* <pre>
* NAMED_TYPE
* NAME goog
* STRING ui
* STRING Window
* </pre>
*/
public static TypeDeclarationNode namedType(Iterable<String> segments) {
Iterator<String> segmentsIt = segments.iterator();
Node node = IR.name(segmentsIt.next());
while (segmentsIt.hasNext()) {
node = IR.getprop(node, IR.string(segmentsIt.next()));
}
return new TypeDeclarationNode(Token.NAMED_TYPE, node);
}
/**
* Represents a structural type.
* Closure calls this a Record Type and accepts the syntax
* {@code {myNum: number, myObject}}
*
* <p>Example:
* <pre>
* RECORD_TYPE
* STRING_KEY myNum
* NUMBER_TYPE
* STRING_KEY myObject
* </pre>
* @param properties a map from property name to property type
* @return a new node representing the record type
*/
public static TypeDeclarationNode recordType(
LinkedHashMap<String, TypeDeclarationNode> properties) {
TypeDeclarationNode node = new TypeDeclarationNode(Token.RECORD_TYPE);
for (Map.Entry<String, TypeDeclarationNode> prop : properties.entrySet()) {
Node stringKey = IR.stringKey(prop.getKey());
node.addChildToBack(stringKey);
if (prop.getValue() != null) {
stringKey.addChildToFront(prop.getValue());
}
}
return node;
}
private static Node maybeAddType(Node node, TypeDeclarationNode type) {
if (type != null) {
node.setDeclaredTypeExpression(type);
}
return node;
}
/**
* Represents a function type.
* Closure has syntax like {@code {function(string, boolean):number}}
* Closure doesn't include parameter names. If the parameter types are unnamed,
* arbitrary names can be substituted, eg. p1, p2, etc.
*
* <p>Example:
* <pre>
* FUNCTION_TYPE
* NUMBER_TYPE
* STRING_KEY p1 [declared_type_expr: STRING_TYPE]
* STRING_KEY p2 [declared_type_expr: BOOLEAN_TYPE]
* </pre>
* @param returnType the type returned by the function, possibly ANY_TYPE
* @param requiredParams the names and types of the required parameters.
* @param optionalParams the names and types of the optional parameters.
* @param restName the name of the rest parameter, if any.
* @param restType the type of the rest parameter, if any.
*/
public static TypeDeclarationNode functionType(
Node returnType, LinkedHashMap<String, TypeDeclarationNode> requiredParams,
LinkedHashMap<String, TypeDeclarationNode> optionalParams,
String restName, TypeDeclarationNode restType) {
TypeDeclarationNode node = new TypeDeclarationNode(Token.FUNCTION_TYPE, returnType);
Preconditions.checkNotNull(requiredParams);
Preconditions.checkNotNull(optionalParams);
for (Map.Entry<String, TypeDeclarationNode> param : requiredParams.entrySet()) {
Node name = IR.name(param.getKey());
node.addChildToBack(maybeAddType(name, param.getValue()));
}
for (Map.Entry<String, TypeDeclarationNode> param : optionalParams.entrySet()) {
Node name = IR.name(param.getKey());
name.putBooleanProp(Node.OPT_ES6_TYPED, true);
node.addChildToBack(maybeAddType(name, param.getValue()));
}
if (restName != null) {
Node rest = Node.newString(Token.REST, restName);
node.addChildrenToBack(maybeAddType(rest, restType));
}
return node;
}
/**
* Represents a parameterized, or generic, type.
* Closure calls this a Type Application and accepts syntax like
* {@code {Object.<string, number>}}
*
* <p>Example:
* <pre>
* PARAMETERIZED_TYPE
* NAMED_TYPE
* NAME Object
* STRING_TYPE
* NUMBER_TYPE
* </pre>
* @param baseType
* @param typeParameters
*/
public static TypeDeclarationNode parameterizedType(
TypeDeclarationNode baseType, Iterable<TypeDeclarationNode> typeParameters) {
if (Iterables.isEmpty(typeParameters)) {
return baseType;
}
TypeDeclarationNode node = new TypeDeclarationNode(Token.PARAMETERIZED_TYPE, baseType);
for (Node typeParameter : typeParameters) {
node.addChildToBack(typeParameter);
}
return node;
}
/**
* Represents an array type. In Closure, this is represented by a
* {@link #parameterizedType(TypeDeclarationNode, Iterable) parameterized type} of {@code Array}
* with {@code elementType} as the sole type parameter.
*
* <p>Example
* <pre>
* ARRAY_TYPE
* elementType
* </pre>
*/
public static TypeDeclarationNode arrayType(Node elementType) {
return new TypeDeclarationNode(Token.ARRAY_TYPE, elementType);
}
/**
* Represents a union type, which can be one of the given types.
* Closure accepts syntax like {@code {(number|boolean)}}
*
* <p>Example:
* <pre>
* UNION_TYPE
* NUMBER_TYPE
* BOOLEAN_TYPE
* </pre>
* @param options the types which are accepted
* @return a new node representing the union type
*/
public static TypeDeclarationNode unionType(Iterable<TypeDeclarationNode> options) {
Preconditions.checkArgument(!Iterables.isEmpty(options),
"union must have at least one option");
TypeDeclarationNode node = new TypeDeclarationNode(Token.UNION_TYPE);
for (Node option : options) {
node.addChildToBack(option);
}
return node;
}
public static TypeDeclarationNode unionType(TypeDeclarationNode... options) {
return unionType(Arrays.asList(options));
}
/**
* Represents a function parameter that is optional.
* In closure syntax, this is {@code function(?string=, number=)}
* In TypeScript syntax, it is
* {@code (firstName: string, lastName?: string)=>string}
* @param parameterType the type of the parameter
* @return a new node representing the function parameter type
*/
public static TypeDeclarationNode optionalParameter(TypeDeclarationNode parameterType) {
return new TypeDeclarationNode(Token.OPTIONAL_PARAMETER, parameterType);
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version
* 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package org.apache.storm.pacemaker;
import java.net.InetSocketAddress;
import java.util.Map;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import javax.security.auth.login.Configuration;
import org.apache.storm.Config;
import org.apache.storm.generated.HBMessage;
import org.apache.storm.messaging.netty.ISaslClient;
import org.apache.storm.messaging.netty.NettyRenameThreadFactory;
import org.apache.storm.pacemaker.codec.ThriftNettyClientCodec;
import org.apache.storm.security.auth.ClientAuthUtils;
import org.apache.storm.shade.io.netty.bootstrap.Bootstrap;
import org.apache.storm.shade.io.netty.buffer.PooledByteBufAllocator;
import org.apache.storm.shade.io.netty.channel.Channel;
import org.apache.storm.shade.io.netty.channel.ChannelOption;
import org.apache.storm.shade.io.netty.channel.EventLoopGroup;
import org.apache.storm.shade.io.netty.channel.WriteBufferWaterMark;
import org.apache.storm.shade.io.netty.channel.nio.NioEventLoopGroup;
import org.apache.storm.shade.io.netty.channel.socket.nio.NioSocketChannel;
import org.apache.storm.utils.StormBoundedExponentialBackoffRetry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class PacemakerClient implements ISaslClient {
private static final Logger LOG = LoggerFactory.getLogger(PacemakerClient.class);
private static Timer timer = new Timer(true);
private final Bootstrap bootstrap;
private final EventLoopGroup workerEventLoopGroup;
private String client_name;
private String secret;
private AtomicBoolean ready;
private AtomicBoolean shutdown;
private AtomicReference<Channel> channelRef;
private InetSocketAddress remote_addr;
private int maxPending = 100;
private HBMessage messages[];
private LinkedBlockingQueue<Integer> availableMessageSlots;
private ThriftNettyClientCodec.AuthMethod authMethod;
private static final int maxRetries = 10;
private String host;
private StormBoundedExponentialBackoffRetry backoff = new StormBoundedExponentialBackoffRetry(100, 5000, 20);
private int retryTimes = 0;
public PacemakerClient(Map<String, Object> config, String host) {
this.host = host;
int port = (int) config.get(Config.PACEMAKER_PORT);
client_name = (String) config.get(Config.TOPOLOGY_NAME);
if (client_name == null) {
client_name = "pacemaker-client";
}
int maxWorkers = (int)config.get(Config.PACEMAKER_CLIENT_MAX_THREADS);
String auth = (String) config.get(Config.PACEMAKER_AUTH_METHOD);
switch (auth) {
case "DIGEST":
Configuration login_conf = ClientAuthUtils.getConfiguration(config);
authMethod = ThriftNettyClientCodec.AuthMethod.DIGEST;
secret = ClientAuthUtils.makeDigestPayload(login_conf, ClientAuthUtils.LOGIN_CONTEXT_PACEMAKER_DIGEST);
if (secret == null) {
LOG.error("Can't start pacemaker server without digest secret.");
throw new RuntimeException("Can't start pacemaker server without digest secret.");
}
break;
case "KERBEROS":
authMethod = ThriftNettyClientCodec.AuthMethod.KERBEROS;
break;
case "NONE":
authMethod = ThriftNettyClientCodec.AuthMethod.NONE;
break;
default:
authMethod = ThriftNettyClientCodec.AuthMethod.NONE;
LOG.warn("Invalid auth scheme: '{}'. Falling back to 'NONE'", auth);
break;
}
ready = new AtomicBoolean(false);
shutdown = new AtomicBoolean(false);
channelRef = new AtomicReference<>(null);
setupMessaging();
ThreadFactory workerFactory = new NettyRenameThreadFactory(this.host + "-pm");
// 0 means DEFAULT_EVENT_LOOP_THREADS
// https://github.com/netty/netty/blob/netty-4.1.24.Final/transport/src/main/java/io/netty/channel/MultithreadEventLoopGroup.java#L40
this.workerEventLoopGroup = new NioEventLoopGroup(maxWorkers > 0 ? maxWorkers : 0, workerFactory);
int thriftMessageMaxSize = (Integer) config.get(Config.PACEMAKER_THRIFT_MESSAGE_SIZE_MAX);
bootstrap = new Bootstrap()
.group(workerEventLoopGroup)
.channel(NioSocketChannel.class)
.option(ChannelOption.TCP_NODELAY, true)
.option(ChannelOption.SO_SNDBUF, 5242880)
.option(ChannelOption.SO_KEEPALIVE, true)
.option(ChannelOption.WRITE_BUFFER_WATER_MARK, new WriteBufferWaterMark(8 * 1024, 32 * 1024))
.option(ChannelOption.ALLOCATOR, PooledByteBufAllocator.DEFAULT)
.handler(new ThriftNettyClientCodec(this, config, authMethod, host, thriftMessageMaxSize));
remote_addr = new InetSocketAddress(host, port);
bootstrap.connect(remote_addr);
}
private void setupMessaging() {
messages = new HBMessage[maxPending];
availableMessageSlots = new LinkedBlockingQueue<Integer>();
for (int i = 0; i < maxPending; i++) {
availableMessageSlots.add(i);
}
}
@Override
public synchronized void channelReady(Channel channel) {
Channel oldChannel = channelRef.get();
if (oldChannel != null) {
LOG.debug("Closing oldChannel is connected: {}", oldChannel.toString());
close_channel();
}
channelRef.set(channel);
retryTimes = 0;
LOG.debug("Channel is ready: {}", channel.toString());
ready.set(true);
this.notifyAll();
}
public String name() {
return client_name;
}
public String secretKey() {
return secret;
}
public HBMessage send(HBMessage m) throws PacemakerConnectionException, InterruptedException {
LOG.debug("Sending pacemaker message to {}: {}", host, m);
int next = availableMessageSlots.take();
synchronized (m) {
m.set_message_id(next);
messages[next] = m;
LOG.debug("Put message in slot: {} for {}", Integer.toString(next), host);
int retry = maxRetries;
while (true) {
try {
waitUntilReady();
Channel channel = channelRef.get();
if (channel != null) {
channel.writeAndFlush(m, channel.voidPromise());
m.wait(1000);
}
if (messages[next] != m && messages[next] != null) {
// messages[next] == null can happen if we lost the connection and subsequently reconnected or timed out.
HBMessage ret = messages[next];
messages[next] = null;
LOG.debug("Got Response: {}", ret);
return ret;
}
} catch (PacemakerConnectionException e) {
if (retry <= 0) {
throw e;
}
LOG.error("error attempting to write to a channel {}.", e.getMessage());
}
if (retry <= 0) {
throw new PacemakerConnectionException("couldn't get response after " + maxRetries + " attempts.");
}
retry--;
LOG.error("Not getting response or getting null response. Making {} more attempts for {}.", retry, host);
}
}
}
private void waitUntilReady() throws PacemakerConnectionException, InterruptedException {
// Wait for 'ready' (channel connected and maybe authentication)
if (!ready.get() || channelRef.get() == null) {
synchronized (this) {
if (!ready.get()) {
LOG.debug("Waiting for netty channel to be ready.");
this.wait(1000);
if (!ready.get() || channelRef.get() == null) {
throw new PacemakerConnectionException("Timed out waiting for channel ready.");
}
}
}
}
}
public void gotMessage(HBMessage m) {
int message_id = m.get_message_id();
if (message_id >= 0 && message_id < maxPending) {
LOG.debug("Pacemaker client got message: {}", m.toString());
HBMessage request = messages[message_id];
if (request == null) {
LOG.debug("No message for slot: {}", Integer.toString(message_id));
} else {
synchronized (request) {
messages[message_id] = m;
request.notifyAll();
availableMessageSlots.add(message_id);
}
}
} else {
LOG.error("Got Message with bad id: {}", m.toString());
}
}
public void reconnect() {
final PacemakerClient client = this;
timer.schedule(new TimerTask() {
public void run() {
client.doReconnect();
}
}, backoff.getSleepTimeMs(retryTimes++, 0));
ready.set(false);
setupMessaging();
}
public synchronized void doReconnect() {
LOG.info("reconnecting to {}", host);
close_channel();
if (!shutdown.get()) {
bootstrap.connect(remote_addr);
}
}
public void shutdown() {
shutdown.set(true);
workerEventLoopGroup.shutdownGracefully().awaitUninterruptibly();
}
private synchronized void close_channel() {
if (channelRef.get() != null) {
channelRef.get().close();
LOG.debug("channel {} closed", remote_addr);
channelRef.set(null);
}
}
public void close() {
close_channel();
}
}
|
|
/********************************************************************************
The MIT License(MIT)
Copyright(c) 2016 Copyleaks LTD (https://copyleaks.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sub-license, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
********************************************************************************/
package copyleaks.sdk.api;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Serializable;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.UUID;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import copyleaks.sdk.api.helpers.HttpURLConnection.CopyleaksClient;
import copyleaks.sdk.api.helpers.HttpURLConnection.HttpURLConnectionHelper;
import copyleaks.sdk.api.RequestMethod;
import copyleaks.sdk.api.exceptions.CommandFailedException;
import copyleaks.sdk.api.exceptions.SecurityTokenException;
import copyleaks.sdk.api.models.ComparisonResult;
import copyleaks.sdk.api.models.LoginToken;
import copyleaks.sdk.api.models.ResultRecord;
import copyleaks.sdk.api.models.responses.CheckStatusResponse;
import copyleaks.sdk.api.models.responses.CreateResourceResponse;
import copyleaks.sdk.api.models.responses.ProcessInList;
public class CopyleaksProcess implements Comparable<CopyleaksProcess>, Serializable
{
/**
* For 'Serializable' implementation.
*/
private static final long serialVersionUID = 1L;
public UUID PID;
/**
* Get process ID
*
* @return The process ID
*/
public UUID getPID()
{
return PID;
}
private void setPID(UUID processId)
{
PID = processId;
}
private Date CreationTimeUTC;
/**
* Get the process creation time
*
* @return Process creation time
*/
public Date getCreationTimeUTC()
{
return CreationTimeUTC;
}
private void setCreationTimeUTC(Date creationTimeUTC)
{
CreationTimeUTC = creationTimeUTC;
}
private boolean ListProcesses_IsCompleted = false;
private HashMap<String, String> CustomFields;
public HashMap<String, String> getCustomFields()
{
return CustomFields;
}
private void setCustomFields(HashMap<String, String> value)
{
this.CustomFields = value;
}
private LoginToken SecurityToken;
private LoginToken getSecurityToken()
{
return SecurityToken;
}
private void setSecurityToken(LoginToken securityToken)
{
SecurityToken = securityToken;
}
CopyleaksProcess(String product, LoginToken authorizationToken, ProcessInList process)
{
setProduct(product);
this.setPID(process.getProcessId());
this.setCreationTimeUTC(process.getCreationTimeUTC());
this.setSecurityToken(authorizationToken);
this.setCustomFields(process.getCustomFields());
this.ListProcesses_IsCompleted = process.getStatus().equalsIgnoreCase("finished");
}
CopyleaksProcess(String product, LoginToken authorizationToken, CreateResourceResponse response,
HashMap<String, String> customFields)
{
setProduct(product);
this.setPID(response.getProcessId());
this.setCreationTimeUTC(response.getCreationTimeUTC());
this.setSecurityToken(authorizationToken);
this.setCustomFields(customFields);
}
private String Product;
protected String getProduct()
{
return this.Product;
}
private void setProduct(String product)
{
this.Product = product;
}
/**
* Get process progress information
*
* @return process progress (out of 100).
* @throws CommandFailedException
* This exception is thrown if an exception situation occured
* during the processing of a command
* @throws SecurityTokenException
* The login-token is undefined or expired
*/
public int getCurrentProgress() throws SecurityTokenException, CommandFailedException
{
if (this.ListProcesses_IsCompleted)
{
return 100;
}
LoginToken.ValidateToken(this.getSecurityToken());
URL url;
HttpURLConnection conn = null;
Gson gson = new GsonBuilder().create();
String json;
try
{
url = new URL(String.format("%1$s/%2$s/%3$s/%4$s/status", Settings.ServiceEntryPoint,
Settings.ServiceVersion, this.getProduct(), getPID()));
conn = CopyleaksClient.getClient(url, this.getSecurityToken(), RequestMethod.GET, HttpContentTypes.Json,
HttpContentTypes.TextPlain);
if (conn.getResponseCode() != 200)
throw new CommandFailedException(conn);
try (InputStream inputStream = new BufferedInputStream(conn.getInputStream()))
{
json = HttpURLConnectionHelper.convertStreamToString(inputStream);
}
}
catch (IOException e)
{
throw new RuntimeException(e.getMessage());
}
finally
{
if (conn != null)
conn.disconnect();
}
CheckStatusResponse response = gson.fromJson(json, CheckStatusResponse.class);
return response.getProgressPercents();
}
/**
* Get the scan results from the server
*
* @return Scan results
* @throws CommandFailedException
* This exception is thrown if an exception situation occured
* during the processing of a command
* @throws SecurityTokenException
* The login-token is undefined or expired
*/
public ResultRecord[] GetResults() throws SecurityTokenException, CommandFailedException
{
LoginToken.ValidateToken(this.getSecurityToken());
String json;
URL url;
HttpURLConnection conn = null;
Gson gson = new GsonBuilder().create();
try
{
url = new URL(String.format("%1$s/%2$s/%3$s/%4$s/result", Settings.ServiceEntryPoint,
Settings.ServiceVersion, this.getProduct(), getPID()));
conn = CopyleaksClient.getClient(url, this.getSecurityToken(), RequestMethod.GET, HttpContentTypes.Json,
HttpContentTypes.Json);
if (conn.getResponseCode() != 200)
throw new CommandFailedException(conn);
try (InputStream inputStream = new BufferedInputStream(conn.getInputStream()))
{
json = HttpURLConnectionHelper.convertStreamToString(inputStream);
}
}
catch (IOException e)
{
throw new RuntimeException(e.getMessage());
}
finally
{
if (conn != null)
conn.disconnect();
}
ResultRecord[] results = gson.fromJson(json, ResultRecord[].class);
Arrays.sort(results, Collections.reverseOrder());
return results;
}
/**
* Deletes the process once it has finished running
*
* @throws CommandFailedException
* This exception is thrown if an exception situation occured
* during the processing of a command
* @throws SecurityTokenException
* The login-token is undefined or expired
*/
public void Delete() throws SecurityTokenException, CommandFailedException
{
LoginToken.ValidateToken(this.getSecurityToken());
URL url;
HttpURLConnection conn = null;
try
{
url = new URL(String.format("%1$s/%2$s/%3$s/%4$s/delete", Settings.ServiceEntryPoint,
Settings.ServiceVersion, this.getProduct(), this.PID));
conn = CopyleaksClient.getClient(url, this.getSecurityToken(), RequestMethod.DELETE, HttpContentTypes.Json,
HttpContentTypes.Json);
if (conn.getResponseCode() != 200)
throw new CommandFailedException(conn);
}
catch (IOException e)
{
throw new RuntimeException(e.getMessage());
}
finally
{
if (conn != null)
conn.disconnect();
}
}
public String DownloadSourceText()
throws SecurityTokenException, CommandFailedException
{
LoginToken.ValidateToken(this.getSecurityToken());
URL url;
HttpURLConnection conn = null;
try
{
url = new URL(String.format("%1$s/%2$s/%3$s/source-text?pid=%4$s", Settings.ServiceEntryPoint,
Settings.ServiceVersion, Settings.DownloadsServicePage, getPID()));
conn = CopyleaksClient.getClient(url, this.getSecurityToken(), RequestMethod.GET, HttpContentTypes.Json,
HttpContentTypes.Json);
if (conn.getResponseCode() != 200)
throw new CommandFailedException(conn);
try (InputStream inputStream = new BufferedInputStream(conn.getInputStream()))
{
return HttpURLConnectionHelper.convertStreamToString(inputStream);
}
}
catch (IOException e)
{
throw new RuntimeException(e.getMessage());
}
finally
{
if (conn != null)
conn.disconnect();
}
}
public String DownloadResultText(ResultRecord result)
throws SecurityTokenException, CommandFailedException
{
LoginToken.ValidateToken(this.getSecurityToken());
URL url;
HttpURLConnection conn = null;
try
{
url = new URL(result.getCachedVersion());
conn = CopyleaksClient.getClient(url, this.getSecurityToken(), RequestMethod.GET, HttpContentTypes.Json,
HttpContentTypes.Json);
if (conn.getResponseCode() != 200)
throw new CommandFailedException(conn);
try (InputStream inputStream = new BufferedInputStream(conn.getInputStream()))
{
return HttpURLConnectionHelper.convertStreamToString(inputStream);
}
}
catch (IOException e)
{
throw new RuntimeException(e.getMessage());
}
finally
{
if (conn != null)
conn.disconnect();
}
}
public ComparisonResult DownloadResultComparison(ResultRecord result)
throws SecurityTokenException, CommandFailedException
{
LoginToken.ValidateToken(this.getSecurityToken());
String json;
URL url;
HttpURLConnection conn = null;
Gson gson = new GsonBuilder().create();
try
{
url = new URL(result.getComparisonReport());
conn = CopyleaksClient.getClient(url, this.getSecurityToken(), RequestMethod.GET, HttpContentTypes.Json,
HttpContentTypes.Json);
if (conn.getResponseCode() != 200)
throw new CommandFailedException(conn);
try (InputStream inputStream = new BufferedInputStream(conn.getInputStream()))
{
json = HttpURLConnectionHelper.convertStreamToString(inputStream);
}
}
catch (IOException e)
{
throw new RuntimeException(e.getMessage());
}
finally
{
if (conn != null)
conn.disconnect();
}
return gson.fromJson(json, ComparisonResult.class);
}
@Override
public String toString()
{
return this.getPID().toString();
}
@Override
public int compareTo(CopyleaksProcess process)
{
return this.getCreationTimeUTC().compareTo(process.CreationTimeUTC);
}
}
|
|
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* CmsMetadataCriteria.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.admanager.axis.v202102;
/**
* A {@code CmsMetadataCriteria} object is used to target {@code CmsMetadataValue}
* objects.
*/
public class CmsMetadataCriteria extends com.google.api.ads.admanager.axis.v202102.CustomCriteriaLeaf implements java.io.Serializable {
/* The comparison operator. This attribute is required. */
private com.google.api.ads.admanager.axis.v202102.CmsMetadataCriteriaComparisonOperator operator;
/* The ids of {@link CmsMetadataValue} objects used to target
* CMS metadata. This attribute is
* required. */
private long[] cmsMetadataValueIds;
public CmsMetadataCriteria() {
}
public CmsMetadataCriteria(
com.google.api.ads.admanager.axis.v202102.CmsMetadataCriteriaComparisonOperator operator,
long[] cmsMetadataValueIds) {
this.operator = operator;
this.cmsMetadataValueIds = cmsMetadataValueIds;
}
@Override
public String toString() {
return com.google.common.base.MoreObjects.toStringHelper(this.getClass())
.omitNullValues()
.add("cmsMetadataValueIds", getCmsMetadataValueIds())
.add("operator", getOperator())
.toString();
}
/**
* Gets the operator value for this CmsMetadataCriteria.
*
* @return operator * The comparison operator. This attribute is required.
*/
public com.google.api.ads.admanager.axis.v202102.CmsMetadataCriteriaComparisonOperator getOperator() {
return operator;
}
/**
* Sets the operator value for this CmsMetadataCriteria.
*
* @param operator * The comparison operator. This attribute is required.
*/
public void setOperator(com.google.api.ads.admanager.axis.v202102.CmsMetadataCriteriaComparisonOperator operator) {
this.operator = operator;
}
/**
* Gets the cmsMetadataValueIds value for this CmsMetadataCriteria.
*
* @return cmsMetadataValueIds * The ids of {@link CmsMetadataValue} objects used to target
* CMS metadata. This attribute is
* required.
*/
public long[] getCmsMetadataValueIds() {
return cmsMetadataValueIds;
}
/**
* Sets the cmsMetadataValueIds value for this CmsMetadataCriteria.
*
* @param cmsMetadataValueIds * The ids of {@link CmsMetadataValue} objects used to target
* CMS metadata. This attribute is
* required.
*/
public void setCmsMetadataValueIds(long[] cmsMetadataValueIds) {
this.cmsMetadataValueIds = cmsMetadataValueIds;
}
public long getCmsMetadataValueIds(int i) {
return this.cmsMetadataValueIds[i];
}
public void setCmsMetadataValueIds(int i, long _value) {
this.cmsMetadataValueIds[i] = _value;
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof CmsMetadataCriteria)) return false;
CmsMetadataCriteria other = (CmsMetadataCriteria) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = super.equals(obj) &&
((this.operator==null && other.getOperator()==null) ||
(this.operator!=null &&
this.operator.equals(other.getOperator()))) &&
((this.cmsMetadataValueIds==null && other.getCmsMetadataValueIds()==null) ||
(this.cmsMetadataValueIds!=null &&
java.util.Arrays.equals(this.cmsMetadataValueIds, other.getCmsMetadataValueIds())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = super.hashCode();
if (getOperator() != null) {
_hashCode += getOperator().hashCode();
}
if (getCmsMetadataValueIds() != null) {
for (int i=0;
i<java.lang.reflect.Array.getLength(getCmsMetadataValueIds());
i++) {
java.lang.Object obj = java.lang.reflect.Array.get(getCmsMetadataValueIds(), i);
if (obj != null &&
!obj.getClass().isArray()) {
_hashCode += obj.hashCode();
}
}
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(CmsMetadataCriteria.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "CmsMetadataCriteria"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("operator");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "operator"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "CmsMetadataCriteria.ComparisonOperator"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("cmsMetadataValueIds");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "cmsMetadataValueIds"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
elemField.setMaxOccursUnbounded(true);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.document.locks;
import static com.google.common.base.Preconditions.checkNotNull;
import java.util.Collection;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import javax.annotation.Nonnull;
import org.apache.jackrabbit.oak.plugins.document.util.Utils;
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import com.google.common.util.concurrent.Striped;
public class TreeNodeDocumentLocks implements NodeDocumentLocks {
/**
* Locks to ensure cache consistency on reads, writes and invalidation.
*/
private final Striped<Lock> locks = Striped.lock(4096);
/**
* ReadWriteLocks to synchronize cache access when child documents are
* requested from MongoDB and put into the cache. Accessing a single
* document in the cache will acquire a read (shared) lock for the parent
* key in addition to the lock (from {@link #locks}) for the individual
* document. Reading multiple sibling documents will acquire a write
* (exclusive) lock for the parent key. See OAK-1897.
*/
private final Striped<ReadWriteLock> parentLocks = Striped.readWriteLock(2048);
/**
* Counts how many times {@link TreeLock}s were acquired.
*/
private volatile AtomicLong lockAcquisitionCounter;
/**
* Acquires a lock for the given key. The returned tree lock will also hold
* a shared lock on the parent key.
*
* @param key a key.
* @return the acquired lock for the given key.
*/
@Override
public TreeLock acquire(String key) {
if (lockAcquisitionCounter != null) {
lockAcquisitionCounter.incrementAndGet();
}
TreeLock lock = TreeLock.shared(parentLocks.get(getParentId(key)), locks.get(key));
lock.lock();
return lock;
}
/**
* This implementation creates two sequences of locks (for the keys and for
* the their parents) using {@link #locks} and {@link #parentLocks}. Then
* all parent locks are acquired first and in a second step the locks for
* the actual keys.
* <p>
* Since we only acquire a parentLock.read, there's no danger of
* deadlock caused by interleaving locks from two different stripes by two
* threads. The only place where the parentLock.write is acquired is the
* {@link #acquireExclusive(String)} and that method doesn't acquire locks in bulk.
*/
@Override
public Lock acquire(Collection<String> keys) {
if (lockAcquisitionCounter != null) {
lockAcquisitionCounter.addAndGet(keys.size());
}
Iterable<String> parentKeys = Iterables.transform(keys, new Function<String, String>() {
@Override
public String apply(String keys) {
return getParentId(keys);
}
});
ReadWriteLock bulkParentLock = new BulkReadWriteLock(parentLocks.bulkGet(parentKeys));
Lock bulkChildrenLock = new BulkLock(locks.bulkGet(keys));
Lock lock = TreeLock.shared(bulkParentLock, bulkChildrenLock);
lock.lock();
return lock;
}
/**
* Acquires an exclusive lock on the given parent key. Use this method to
* block cache access for child keys of the given parent key.
*
* @param parentKey the parent key.
* @return the acquired lock for the given parent key.
*/
public TreeLock acquireExclusive(String parentKey) {
if (lockAcquisitionCounter != null) {
lockAcquisitionCounter.incrementAndGet();
}
TreeLock lock = TreeLock.exclusive(parentLocks.get(parentKey));
lock.lock();
return lock;
}
/**
* Returns the parent id for the given id. An empty String is returned if
* the given value is the id of the root document or the id for a long path.
*
* @param id an id for a document.
* @return the id of the parent document or the empty String.
*/
@Nonnull
private static String getParentId(@Nonnull String id) {
String parentId = Utils.getParentId(checkNotNull(id));
if (parentId == null) {
parentId = "";
}
return parentId;
}
public void resetLockAcquisitionCount() {
lockAcquisitionCounter = new AtomicLong();
}
public long getLockAcquisitionCount() {
if (lockAcquisitionCounter == null) {
throw new IllegalStateException("The counter hasn't been initialized");
}
return lockAcquisitionCounter.get();
}
private final static class TreeLock implements Lock {
private final Lock parentLock;
private final Lock lock;
private TreeLock(Lock parentLock, Lock lock) {
this.parentLock = parentLock;
this.lock = lock;
}
private static TreeLock shared(ReadWriteLock parentLock, Lock lock) {
return new TreeLock(parentLock.readLock(), lock);
}
private static TreeLock exclusive(ReadWriteLock parentLock) {
return new TreeLock(parentLock.writeLock(), null);
}
@Override
public void lock() {
parentLock.lock();
if (lock != null) {
lock.lock();
}
}
@Override
public void unlock() {
if (lock != null) {
lock.unlock();
}
parentLock.unlock();
}
@Override
public void lockInterruptibly() throws InterruptedException {
throw new UnsupportedOperationException();
}
@Override
public boolean tryLock() {
throw new UnsupportedOperationException();
}
@Override
public boolean tryLock(long time, TimeUnit unit) throws InterruptedException {
throw new UnsupportedOperationException();
}
@Override
public Condition newCondition() {
throw new UnsupportedOperationException();
}
}
}
|
|
// line 1 "ext/puma_http11/http11_parser.java.rl"
package org.jruby.puma;
import org.jruby.util.ByteList;
public class Http11Parser {
/** Machine **/
// line 65 "ext/puma_http11/http11_parser.java.rl"
/** Data **/
// line 18 "ext/puma_http11/org/jruby/puma/Http11Parser.java"
private static byte[] init__http_parser_actions_0()
{
return new byte [] {
0, 1, 0, 1, 2, 1, 3, 1, 4, 1, 5, 1,
6, 1, 7, 1, 8, 1, 9, 1, 11, 1, 12, 1,
13, 2, 0, 8, 2, 1, 2, 2, 4, 5, 2, 10,
7, 2, 12, 7, 3, 9, 10, 7
};
}
private static final byte _http_parser_actions[] = init__http_parser_actions_0();
private static short[] init__http_parser_key_offsets_0()
{
return new short [] {
0, 0, 8, 17, 27, 29, 30, 31, 32, 33, 34, 36,
39, 41, 44, 45, 61, 62, 78, 80, 81, 90, 99, 105,
111, 121, 130, 136, 142, 153, 159, 165, 175, 181, 187, 196,
205, 211, 217, 226, 235, 244, 253, 262, 271, 280, 289, 298,
307, 316, 325, 334, 343, 352, 361, 370, 379, 380
};
}
private static final short _http_parser_key_offsets[] = init__http_parser_key_offsets_0();
private static char[] init__http_parser_trans_keys_0()
{
return new char [] {
36, 95, 45, 46, 48, 57, 65, 90, 32, 36, 95, 45,
46, 48, 57, 65, 90, 42, 43, 47, 58, 45, 57, 65,
90, 97, 122, 32, 35, 72, 84, 84, 80, 47, 48, 57,
46, 48, 57, 48, 57, 13, 48, 57, 10, 13, 33, 124,
126, 35, 39, 42, 43, 45, 46, 48, 57, 65, 90, 94,
122, 10, 33, 58, 124, 126, 35, 39, 42, 43, 45, 46,
48, 57, 65, 90, 94, 122, 13, 32, 13, 32, 37, 60,
62, 127, 0, 31, 34, 35, 32, 37, 60, 62, 127, 0,
31, 34, 35, 48, 57, 65, 70, 97, 102, 48, 57, 65,
70, 97, 102, 43, 58, 45, 46, 48, 57, 65, 90, 97,
122, 32, 34, 35, 37, 60, 62, 127, 0, 31, 48, 57,
65, 70, 97, 102, 48, 57, 65, 70, 97, 102, 32, 34,
35, 37, 59, 60, 62, 63, 127, 0, 31, 48, 57, 65,
70, 97, 102, 48, 57, 65, 70, 97, 102, 32, 34, 35,
37, 60, 62, 63, 127, 0, 31, 48, 57, 65, 70, 97,
102, 48, 57, 65, 70, 97, 102, 32, 34, 35, 37, 60,
62, 127, 0, 31, 32, 34, 35, 37, 60, 62, 127, 0,
31, 48, 57, 65, 70, 97, 102, 48, 57, 65, 70, 97,
102, 32, 36, 95, 45, 46, 48, 57, 65, 90, 32, 36,
95, 45, 46, 48, 57, 65, 90, 32, 36, 95, 45, 46,
48, 57, 65, 90, 32, 36, 95, 45, 46, 48, 57, 65,
90, 32, 36, 95, 45, 46, 48, 57, 65, 90, 32, 36,
95, 45, 46, 48, 57, 65, 90, 32, 36, 95, 45, 46,
48, 57, 65, 90, 32, 36, 95, 45, 46, 48, 57, 65,
90, 32, 36, 95, 45, 46, 48, 57, 65, 90, 32, 36,
95, 45, 46, 48, 57, 65, 90, 32, 36, 95, 45, 46,
48, 57, 65, 90, 32, 36, 95, 45, 46, 48, 57, 65,
90, 32, 36, 95, 45, 46, 48, 57, 65, 90, 32, 36,
95, 45, 46, 48, 57, 65, 90, 32, 36, 95, 45, 46,
48, 57, 65, 90, 32, 36, 95, 45, 46, 48, 57, 65,
90, 32, 36, 95, 45, 46, 48, 57, 65, 90, 32, 36,
95, 45, 46, 48, 57, 65, 90, 32, 0
};
}
private static final char _http_parser_trans_keys[] = init__http_parser_trans_keys_0();
private static byte[] init__http_parser_single_lengths_0()
{
return new byte [] {
0, 2, 3, 4, 2, 1, 1, 1, 1, 1, 0, 1,
0, 1, 1, 4, 1, 4, 2, 1, 5, 5, 0, 0,
2, 7, 0, 0, 9, 0, 0, 8, 0, 0, 7, 7,
0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 1, 0
};
}
private static final byte _http_parser_single_lengths[] = init__http_parser_single_lengths_0();
private static byte[] init__http_parser_range_lengths_0()
{
return new byte [] {
0, 3, 3, 3, 0, 0, 0, 0, 0, 0, 1, 1,
1, 1, 0, 6, 0, 6, 0, 0, 2, 2, 3, 3,
4, 1, 3, 3, 1, 3, 3, 1, 3, 3, 1, 1,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 0, 0
};
}
private static final byte _http_parser_range_lengths[] = init__http_parser_range_lengths_0();
private static short[] init__http_parser_index_offsets_0()
{
return new short [] {
0, 0, 6, 13, 21, 24, 26, 28, 30, 32, 34, 36,
39, 41, 44, 46, 57, 59, 70, 73, 75, 83, 91, 95,
99, 106, 115, 119, 123, 134, 138, 142, 152, 156, 160, 169,
178, 182, 186, 193, 200, 207, 214, 221, 228, 235, 242, 249,
256, 263, 270, 277, 284, 291, 298, 305, 312, 314
};
}
private static final short _http_parser_index_offsets[] = init__http_parser_index_offsets_0();
private static byte[] init__http_parser_indicies_0()
{
return new byte [] {
0, 0, 0, 0, 0, 1, 2, 3, 3, 3, 3, 3,
1, 4, 5, 6, 7, 5, 5, 5, 1, 8, 9, 1,
10, 1, 11, 1, 12, 1, 13, 1, 14, 1, 15, 1,
16, 15, 1, 17, 1, 18, 17, 1, 19, 1, 20, 21,
21, 21, 21, 21, 21, 21, 21, 21, 1, 22, 1, 23,
24, 23, 23, 23, 23, 23, 23, 23, 23, 1, 26, 27,
25, 29, 28, 30, 32, 1, 1, 1, 1, 1, 31, 33,
35, 1, 1, 1, 1, 1, 34, 36, 36, 36, 1, 34,
34, 34, 1, 37, 38, 37, 37, 37, 37, 1, 8, 1,
9, 39, 1, 1, 1, 1, 38, 40, 40, 40, 1, 38,
38, 38, 1, 41, 1, 43, 44, 45, 1, 1, 46, 1,
1, 42, 47, 47, 47, 1, 42, 42, 42, 1, 8, 1,
9, 49, 1, 1, 50, 1, 1, 48, 51, 51, 51, 1,
48, 48, 48, 1, 52, 1, 54, 55, 1, 1, 1, 1,
53, 56, 1, 58, 59, 1, 1, 1, 1, 57, 60, 60,
60, 1, 57, 57, 57, 1, 2, 61, 61, 61, 61, 61,
1, 2, 62, 62, 62, 62, 62, 1, 2, 63, 63, 63,
63, 63, 1, 2, 64, 64, 64, 64, 64, 1, 2, 65,
65, 65, 65, 65, 1, 2, 66, 66, 66, 66, 66, 1,
2, 67, 67, 67, 67, 67, 1, 2, 68, 68, 68, 68,
68, 1, 2, 69, 69, 69, 69, 69, 1, 2, 70, 70,
70, 70, 70, 1, 2, 71, 71, 71, 71, 71, 1, 2,
72, 72, 72, 72, 72, 1, 2, 73, 73, 73, 73, 73,
1, 2, 74, 74, 74, 74, 74, 1, 2, 75, 75, 75,
75, 75, 1, 2, 76, 76, 76, 76, 76, 1, 2, 77,
77, 77, 77, 77, 1, 2, 78, 78, 78, 78, 78, 1,
2, 1, 1, 0
};
}
private static final byte _http_parser_indicies[] = init__http_parser_indicies_0();
private static byte[] init__http_parser_trans_targs_0()
{
return new byte [] {
2, 0, 3, 38, 4, 24, 28, 25, 5, 20, 6, 7,
8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 57, 17,
18, 19, 14, 18, 19, 14, 5, 21, 22, 5, 21, 22,
23, 24, 25, 26, 27, 5, 28, 20, 29, 31, 34, 30,
31, 32, 34, 33, 5, 35, 20, 36, 5, 35, 20, 36,
37, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
50, 51, 52, 53, 54, 55, 56
};
}
private static final byte _http_parser_trans_targs[] = init__http_parser_trans_targs_0();
private static byte[] init__http_parser_trans_actions_0()
{
return new byte [] {
1, 0, 11, 0, 1, 1, 1, 1, 13, 13, 1, 0,
0, 0, 0, 0, 0, 0, 19, 0, 0, 28, 23, 3,
5, 7, 31, 7, 0, 9, 25, 1, 1, 15, 0, 0,
0, 0, 0, 0, 0, 37, 0, 37, 0, 21, 21, 0,
0, 0, 0, 0, 40, 17, 40, 17, 34, 0, 34, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0
};
}
private static final byte _http_parser_trans_actions[] = init__http_parser_trans_actions_0();
static final int http_parser_start = 1;
static final int http_parser_first_final = 57;
static final int http_parser_error = 0;
static final int http_parser_en_main = 1;
// line 69 "ext/puma_http11/http11_parser.java.rl"
public static interface ElementCB {
public void call(Object data, int at, int length);
}
public static interface FieldCB {
public void call(Object data, int field, int flen, int value, int vlen);
}
public static class HttpParser {
int cs;
int body_start;
int content_len;
int nread;
int mark;
int field_start;
int field_len;
int query_start;
Object data;
ByteList buffer;
public FieldCB http_field;
public ElementCB request_method;
public ElementCB request_uri;
public ElementCB fragment;
public ElementCB request_path;
public ElementCB query_string;
public ElementCB http_version;
public ElementCB header_done;
public void init() {
cs = 0;
// line 240 "ext/puma_http11/org/jruby/puma/Http11Parser.java"
{
cs = http_parser_start;
}
// line 104 "ext/puma_http11/http11_parser.java.rl"
body_start = 0;
content_len = 0;
mark = 0;
nread = 0;
field_len = 0;
field_start = 0;
}
}
public final HttpParser parser = new HttpParser();
public int execute(ByteList buffer, int off) {
int p, pe;
int cs = parser.cs;
int len = buffer.length();
assert off<=len : "offset past end of buffer";
p = off;
pe = len;
// get a copy of the bytes, since it may not start at 0
// FIXME: figure out how to just use the bytes in-place
byte[] data = buffer.bytes();
parser.buffer = buffer;
// line 272 "ext/puma_http11/org/jruby/puma/Http11Parser.java"
{
int _klen;
int _trans = 0;
int _acts;
int _nacts;
int _keys;
int _goto_targ = 0;
_goto: while (true) {
switch ( _goto_targ ) {
case 0:
if ( p == pe ) {
_goto_targ = 4;
continue _goto;
}
if ( cs == 0 ) {
_goto_targ = 5;
continue _goto;
}
case 1:
_match: do {
_keys = _http_parser_key_offsets[cs];
_trans = _http_parser_index_offsets[cs];
_klen = _http_parser_single_lengths[cs];
if ( _klen > 0 ) {
int _lower = _keys;
int _mid;
int _upper = _keys + _klen - 1;
while (true) {
if ( _upper < _lower )
break;
_mid = _lower + ((_upper-_lower) >> 1);
if ( data[p] < _http_parser_trans_keys[_mid] )
_upper = _mid - 1;
else if ( data[p] > _http_parser_trans_keys[_mid] )
_lower = _mid + 1;
else {
_trans += (_mid - _keys);
break _match;
}
}
_keys += _klen;
_trans += _klen;
}
_klen = _http_parser_range_lengths[cs];
if ( _klen > 0 ) {
int _lower = _keys;
int _mid;
int _upper = _keys + (_klen<<1) - 2;
while (true) {
if ( _upper < _lower )
break;
_mid = _lower + (((_upper-_lower) >> 1) & ~1);
if ( data[p] < _http_parser_trans_keys[_mid] )
_upper = _mid - 2;
else if ( data[p] > _http_parser_trans_keys[_mid+1] )
_lower = _mid + 2;
else {
_trans += ((_mid - _keys)>>1);
break _match;
}
}
_trans += _klen;
}
} while (false);
_trans = _http_parser_indicies[_trans];
cs = _http_parser_trans_targs[_trans];
if ( _http_parser_trans_actions[_trans] != 0 ) {
_acts = _http_parser_trans_actions[_trans];
_nacts = (int) _http_parser_actions[_acts++];
while ( _nacts-- > 0 )
{
switch ( _http_parser_actions[_acts++] )
{
case 0:
// line 13 "ext/puma_http11/http11_parser.java.rl"
{parser.mark = p; }
break;
case 1:
// line 15 "ext/puma_http11/http11_parser.java.rl"
{ parser.field_start = p; }
break;
case 2:
// line 16 "ext/puma_http11/http11_parser.java.rl"
{ /* FIXME stub */ }
break;
case 3:
// line 17 "ext/puma_http11/http11_parser.java.rl"
{
parser.field_len = p-parser.field_start;
}
break;
case 4:
// line 21 "ext/puma_http11/http11_parser.java.rl"
{ parser.mark = p; }
break;
case 5:
// line 22 "ext/puma_http11/http11_parser.java.rl"
{
if(parser.http_field != null) {
parser.http_field.call(parser.data, parser.field_start, parser.field_len, parser.mark, p-parser.mark);
}
}
break;
case 6:
// line 27 "ext/puma_http11/http11_parser.java.rl"
{
if(parser.request_method != null)
parser.request_method.call(parser.data, parser.mark, p-parser.mark);
}
break;
case 7:
// line 31 "ext/puma_http11/http11_parser.java.rl"
{
if(parser.request_uri != null)
parser.request_uri.call(parser.data, parser.mark, p-parser.mark);
}
break;
case 8:
// line 35 "ext/puma_http11/http11_parser.java.rl"
{
if(parser.fragment != null)
parser.fragment.call(parser.data, parser.mark, p-parser.mark);
}
break;
case 9:
// line 40 "ext/puma_http11/http11_parser.java.rl"
{parser.query_start = p; }
break;
case 10:
// line 41 "ext/puma_http11/http11_parser.java.rl"
{
if(parser.query_string != null)
parser.query_string.call(parser.data, parser.query_start, p-parser.query_start);
}
break;
case 11:
// line 46 "ext/puma_http11/http11_parser.java.rl"
{
if(parser.http_version != null)
parser.http_version.call(parser.data, parser.mark, p-parser.mark);
}
break;
case 12:
// line 51 "ext/puma_http11/http11_parser.java.rl"
{
if(parser.request_path != null)
parser.request_path.call(parser.data, parser.mark, p-parser.mark);
}
break;
case 13:
// line 56 "ext/puma_http11/http11_parser.java.rl"
{
parser.body_start = p + 1;
if(parser.header_done != null)
parser.header_done.call(parser.data, p + 1, pe - p - 1);
{ p += 1; _goto_targ = 5; if (true) continue _goto;}
}
break;
// line 437 "ext/puma_http11/org/jruby/puma/Http11Parser.java"
}
}
}
case 2:
if ( cs == 0 ) {
_goto_targ = 5;
continue _goto;
}
if ( ++p != pe ) {
_goto_targ = 1;
continue _goto;
}
case 4:
case 5:
}
break; }
}
// line 130 "ext/puma_http11/http11_parser.java.rl"
parser.cs = cs;
parser.nread += (p - off);
assert p <= pe : "buffer overflow after parsing execute";
assert parser.nread <= len : "nread longer than length";
assert parser.body_start <= len : "body starts after buffer end";
assert parser.mark < len : "mark is after buffer end";
assert parser.field_len <= len : "field has length longer than whole buffer";
assert parser.field_start < len : "field starts after buffer end";
return parser.nread;
}
public int finish() {
if(has_error()) {
return -1;
} else if(is_finished()) {
return 1;
} else {
return 0;
}
}
public boolean has_error() {
return parser.cs == http_parser_error;
}
public boolean is_finished() {
return parser.cs == http_parser_first_final;
}
}
|
|
package org.nearbyshops.DAOPreparedReviewItem;
import com.zaxxer.hikari.HikariDataSource;
import org.nearbyshops.Globals.Globals;
import org.nearbyshops.ModelEndpointReview.ItemReviewEndPoint;
import org.nearbyshops.ModelReviewItem.ItemReview;
import org.nearbyshops.ModelReviewItem.ItemReviewStatRow;
import org.nearbyshops.ModelReviewItem.ItemReviewThanks;
import java.sql.*;
import java.util.ArrayList;
import java.util.List;
/**
* Created by sumeet on 8/8/16.
*/
public class ItemReviewDAOPrepared {
private HikariDataSource dataSource = Globals.getDataSource();
@Override
protected void finalize() throws Throwable {
// TODO Auto-generated method stub
super.finalize();
}
public int saveItemReview(ItemReview itemReview)
{
Connection connection = null;
PreparedStatement statement = null;
int idOfInsertedRow = 0;
String insertStatement = "INSERT INTO "
+ ItemReview.TABLE_NAME
+ "("
+ ItemReview.ITEM_ID + ","
+ ItemReview.END_USER_ID + ","
+ ItemReview.RATING + ","
+ ItemReview.REVIEW_TEXT + ","
+ ItemReview.REVIEW_TITLE + ""
+ ") VALUES(?,?,?,?,?)";
try {
connection = dataSource.getConnection();
statement = connection.prepareStatement(insertStatement,PreparedStatement.RETURN_GENERATED_KEYS);
statement.setObject(1,itemReview.getItemID());
statement.setObject(2,itemReview.getEndUserID());
statement.setObject(3,itemReview.getRating());
statement.setString(4,itemReview.getReviewText());
statement.setString(5,itemReview.getReviewTitle());
idOfInsertedRow = statement.executeUpdate();
ResultSet rs = statement.getGeneratedKeys();
if(rs.next())
{
idOfInsertedRow = rs.getInt(1);
}
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
finally
{
try {
if(statement!=null)
{statement.close();}
}
catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
if(connection!=null)
{connection.close();}
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return idOfInsertedRow;
}
public int updateItemReview(ItemReview itemReview)
{
String updateStatement = "UPDATE " + ItemReview.TABLE_NAME
+ " SET "
+ ItemReview.ITEM_ID + " = ?,"
+ ItemReview.END_USER_ID + " = ?,"
+ ItemReview.RATING + " = ?,"
+ ItemReview.REVIEW_TEXT + " = ?,"
+ ItemReview.REVIEW_TITLE + " = ?"
+ " WHERE "
+ ItemReview.ITEM_REVIEW_ID + " = ?";
Connection connection = null;
PreparedStatement statement = null;
int rowCountUpdated = 0;
try {
connection = dataSource.getConnection();
statement = connection.prepareStatement(updateStatement);
statement.setInt(1,itemReview.getItemID());
statement.setInt(2,itemReview.getEndUserID());
statement.setInt(3,itemReview.getRating());
statement.setString(4,itemReview.getReviewText());
statement.setString(5,itemReview.getReviewTitle());
statement.setInt(6,itemReview.getItemReviewID());
rowCountUpdated = statement.executeUpdate();
System.out.println("Total rows updated: " + rowCountUpdated);
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
finally
{
try {
if(statement!=null)
{statement.close();}
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
if(connection!=null)
{connection.close();}
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return rowCountUpdated;
}
public int deleteItemReview(int itemReviewID)
{
String deleteStatement = "DELETE FROM " + ItemReview.TABLE_NAME
+ " WHERE " + ItemReview.ITEM_REVIEW_ID + " = ?";
Connection connection= null;
PreparedStatement statement = null;
int rowCountDeleted = 0;
try {
connection = dataSource.getConnection();
statement = connection.prepareStatement(deleteStatement);
statement.setInt(1,itemReviewID);
rowCountDeleted = statement.executeUpdate();
System.out.println("Rows Deleted: " + rowCountDeleted);
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
finally
{
try {
if(statement!=null)
{statement.close();}
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
if(connection!=null)
{connection.close();}
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return rowCountDeleted;
}
public List<ItemReview> getItemReviews(
Integer itemID,
Integer endUserID,
String sortBy,
Integer limit, Integer offset
) {
boolean isFirst = true;
String query = "";
String queryNormal = "SELECT * FROM " + ItemReview.TABLE_NAME;
String queryJoin = "SELECT "
+ ItemReview.TABLE_NAME + "." + ItemReview.ITEM_REVIEW_ID + ","
+ ItemReview.TABLE_NAME + "." + ItemReview.ITEM_ID + ","
+ ItemReview.TABLE_NAME + "." + ItemReview.END_USER_ID + ","
+ ItemReview.TABLE_NAME + "." + ItemReview.RATING + ","
+ ItemReview.TABLE_NAME + "." + ItemReview.REVIEW_TEXT + ","
+ ItemReview.TABLE_NAME + "." + ItemReview.REVIEW_DATE + ","
+ ItemReview.TABLE_NAME + "." + ItemReview.REVIEW_TITLE + ","
+ " count(" + ItemReviewThanks.TABLE_NAME + "." + ItemReviewThanks.ITEM_REVIEW_ID + ") as thanks_count "
+ " FROM " + ItemReview.TABLE_NAME + " LEFT OUTER JOIN " + ItemReviewThanks.TABLE_NAME
+ " ON (" + ItemReview.TABLE_NAME + "." + ItemReview.ITEM_REVIEW_ID
+ " = " + ItemReviewThanks.TABLE_NAME + "." + ItemReviewThanks.ITEM_REVIEW_ID + ") ";
if(itemID != null)
{
queryJoin = queryJoin + " WHERE "
+ ItemReview.TABLE_NAME
+ "."
+ ItemReview.ITEM_ID + " = " + itemID;
queryNormal = queryNormal + " WHERE "
+ ItemReview.TABLE_NAME
+ "."
+ ItemReview.ITEM_ID + " = " + itemID;
isFirst = false;
}
if(endUserID != null)
{
String queryPartMember =
ItemReview.TABLE_NAME
+ "."
+ ItemReview.END_USER_ID + " = " + endUserID;
if(isFirst)
{
queryJoin = queryJoin + " WHERE " + queryPartMember;
queryNormal = queryNormal + " WHERE " + queryPartMember;
}else
{
queryJoin = queryJoin + " AND " + queryPartMember;
queryNormal = queryNormal + " AND " + queryPartMember;
}
isFirst = false;
}
queryJoin = queryJoin
+ " group by "
+ ItemReview.TABLE_NAME + "." + ItemReview.ITEM_REVIEW_ID + ","
+ ItemReview.TABLE_NAME + "." + ItemReview.ITEM_ID + ","
+ ItemReview.TABLE_NAME + "." + ItemReview.END_USER_ID + ","
+ ItemReview.TABLE_NAME + "." + ItemReview.RATING + ","
+ ItemReview.TABLE_NAME + "." + ItemReview.REVIEW_TEXT + ","
+ ItemReview.TABLE_NAME + "." + ItemReview.REVIEW_DATE + ","
+ ItemReview.TABLE_NAME + "." + ItemReview.REVIEW_TITLE ;
// Applying filters
if(sortBy!=null)
{
if(!sortBy.equals(""))
{
String queryPartSortBy = " ORDER BY " + sortBy;
queryNormal = queryNormal + queryPartSortBy;
queryJoin = queryJoin + queryPartSortBy;
}
}
if(limit != null)
{
String queryPartLimitOffset = "";
if(offset>0)
{
queryPartLimitOffset = " LIMIT " + limit + " " + " OFFSET " + offset;
}else
{
queryPartLimitOffset = " LIMIT " + limit + " " + " OFFSET " + 0;
}
queryNormal = queryNormal + queryPartLimitOffset;
queryJoin = queryJoin + queryPartLimitOffset;
}
/*
Applying filters Ends
*/
query = queryJoin;
/*
if(bookCategoryID!=null)
{
query = queryJoin;
isJoinQuery = true;
}else
{
query = queryNormal;
}
*/
ArrayList<ItemReview> itemReviewsList = new ArrayList<ItemReview>();
Connection connection = null;
Statement statement = null;
ResultSet rs = null;
try {
connection = dataSource.getConnection();
statement = connection.createStatement();
rs = statement.executeQuery(query);
while(rs.next())
{
ItemReview itemReview = new ItemReview();
itemReview.setItemReviewID(rs.getInt(ItemReview.ITEM_REVIEW_ID));
itemReview.setItemID(rs.getInt(ItemReview.ITEM_ID));
itemReview.setEndUserID(rs.getInt(ItemReview.END_USER_ID));
itemReview.setRating(rs.getInt(ItemReview.RATING));
itemReview.setReviewText(rs.getString(ItemReview.REVIEW_TEXT));
itemReview.setReviewTitle(rs.getString(ItemReview.REVIEW_TITLE));
itemReview.setReviewDate(rs.getTimestamp(ItemReview.REVIEW_DATE));
itemReview.setRt_thanks_count(rs.getInt("thanks_count"));
itemReviewsList.add(itemReview);
}
System.out.println("books By CategoryID " + itemReviewsList.size());
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
finally
{
try {
if(rs!=null)
{rs.close();}
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
if(statement!=null)
{statement.close();}
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
if(connection!=null)
{connection.close();}
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return itemReviewsList;
}
public ItemReviewEndPoint getEndPointMetadata(
Integer itemID,
Integer endUserID)
{
boolean isFirst = true;
String query = "";
String queryNormal = "SELECT "
+ "count( DISTINCT " + ItemReview.ITEM_REVIEW_ID + ") as item_count" + ""
+ " FROM " + ItemReview.TABLE_NAME;
if(itemID != null)
{
queryNormal = queryNormal + " WHERE "
+ ItemReview.TABLE_NAME
+ "."
+ ItemReview.ITEM_ID + " = " + itemID;
isFirst = false;
}
if(endUserID != null)
{
String queryPartMember =
ItemReview.TABLE_NAME
+ "."
+ ItemReview.END_USER_ID + " = " + endUserID;
if(isFirst)
{
queryNormal = queryNormal + " WHERE " + queryPartMember;
}else
{
queryNormal = queryNormal + " AND " + queryPartMember;
}
isFirst = false;
}
/*
if(bookID != null)
{
*//*
queryJoin = queryJoin + " AND "
+ ItemContract.TABLE_NAME
+ "."
+ ItemContract.ITEM_CATEGORY_ID + " = " + itemCategoryID;
*//*
//" WHERE ITEM_CATEGORY_ID = " + itemCategoryID
queryNormal = queryNormal + " WHERE "
+ BookReview.TABLE_NAME
+ "."
+ BookReview.BOOK_ID + " = " + bookID;
}
if(memberID != null)
{
*//*
queryJoin = queryJoin + " AND "
+ ItemContract.TABLE_NAME
+ "."
+ ItemContract.ITEM_CATEGORY_ID + " = " + itemCategoryID;
*//*
//" WHERE ITEM_CATEGORY_ID = " + itemCategoryID
queryNormal = queryNormal + " WHERE "
+ BookReview.TABLE_NAME
+ "."
+ BookReview.MEMBER_ID + " = " + memberID;
}*/
// Applying filters
/*
Applying filters Ends
*/
query = queryNormal;
ItemReviewEndPoint endPoint = new ItemReviewEndPoint();
Connection connection = null;
Statement statement = null;
ResultSet rs = null;
try {
connection = dataSource.getConnection();
statement = connection.createStatement();
rs = statement.executeQuery(query);
while(rs.next())
{
endPoint.setItemCount(rs.getInt("item_count"));
}
System.out.println("Item Count : " + endPoint.getItemCount());
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
finally
{
try {
if(rs!=null)
{rs.close();}
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
if(statement!=null)
{statement.close();}
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
if(connection!=null)
{connection.close();}
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return endPoint;
}
public ItemReview getItemReview(int itemReviewID)
{
String query = "SELECT * FROM " + ItemReview.TABLE_NAME
+ " WHERE " + ItemReview.ITEM_REVIEW_ID + " = " + itemReviewID;
Connection connection = null;
Statement statement = null;
ResultSet rs = null;
//ItemCategory itemCategory = new ItemCategory();
ItemReview itemReview = null;
try {
connection = dataSource.getConnection();
statement = connection.createStatement();
rs = statement.executeQuery(query);
while(rs.next())
{
itemReview = new ItemReview();
itemReview.setItemReviewID(rs.getInt(ItemReview.ITEM_REVIEW_ID));
itemReview.setItemID(rs.getInt(ItemReview.ITEM_ID));
itemReview.setEndUserID(rs.getInt(ItemReview.END_USER_ID));
itemReview.setRating(rs.getInt(ItemReview.RATING));
itemReview.setReviewText(rs.getString(ItemReview.REVIEW_TEXT));
itemReview.setReviewTitle(rs.getString(ItemReview.REVIEW_TITLE));
itemReview.setReviewDate(rs.getTimestamp(ItemReview.REVIEW_DATE));
System.out.println("Get BookReview by DELIVERY_GUY_SELF_ID : " + itemReview.getItemID());
}
//System.out.println("Total itemCategories queried " + itemCategoryList.size());
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} finally
{
try {
if(rs!=null)
{rs.close();}
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
if(statement!=null)
{statement.close();}
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
if(connection!=null)
{connection.close();}
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return itemReview;
}
public List<ItemReviewStatRow> getStats(Integer itemID)
{
//select rating, count(book_review_id) as reviews_count from book_review group by rating
String query = "SELECT " + ItemReview.RATING + ", count(" + ItemReview.ITEM_REVIEW_ID + ") as reviews_count "
+ " FROM " + ItemReview.TABLE_NAME;
if(itemID!=null)
{
query = query + " WHERE " + ItemReview.ITEM_ID + " = " + itemID;
}
query = query + " GROUP BY " + ItemReview.RATING;
Connection connection = null;
Statement statement = null;
ResultSet rs = null;
// ShopReviewStats shopReviewStats = new ShopReviewStats();
ArrayList<ItemReviewStatRow> rowList = new ArrayList<>();
try {
connection = dataSource.getConnection();
statement = connection.createStatement();
rs = statement.executeQuery(query);
while(rs.next())
{
ItemReviewStatRow row = new ItemReviewStatRow();
row.setRating(rs.getInt(ItemReview.RATING));
row.setReviews_count(rs.getInt("reviews_count"));
rowList.add(row);
// System.out.println("Get BookReview by DELIVERY_GUY_SELF_ID : " + shopReview.getItemID());
}
//System.out.println("Total itemCategories queried " + itemCategoryList.size());
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} finally
{
try {
if(rs!=null)
{rs.close();}
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
if(statement!=null)
{statement.close();}
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
if(connection!=null)
{connection.close();}
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return rowList;
}
}
|
|
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.unitime.timetable.util;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.Date;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.TreeSet;
import java.util.Vector;
import org.cpsolver.coursett.model.RoomLocation;
import org.cpsolver.coursett.model.TimeLocation;
import org.unitime.commons.hibernate.util.HibernateUtil;
import org.unitime.localization.impl.Localization;
import org.unitime.timetable.ApplicationProperties;
import org.unitime.timetable.defaults.ApplicationProperty;
import org.unitime.timetable.gwt.resources.GwtConstants;
import org.unitime.timetable.model.Assignment;
import org.unitime.timetable.model.BuildingPref;
import org.unitime.timetable.model.Class_;
import org.unitime.timetable.model.CourseOffering;
import org.unitime.timetable.model.DatePattern;
import org.unitime.timetable.model.DepartmentalInstructor;
import org.unitime.timetable.model.ExactTimeMins;
import org.unitime.timetable.model.InstrOfferingConfig;
import org.unitime.timetable.model.InstructionalOffering;
import org.unitime.timetable.model.ItypeDesc;
import org.unitime.timetable.model.Preference;
import org.unitime.timetable.model.PreferenceLevel;
import org.unitime.timetable.model.RoomFeaturePref;
import org.unitime.timetable.model.RoomGroupPref;
import org.unitime.timetable.model.RoomPref;
import org.unitime.timetable.model.SchedulingSubpart;
import org.unitime.timetable.model.Session;
import org.unitime.timetable.model.SubjectArea;
import org.unitime.timetable.model.TimePatternModel;
import org.unitime.timetable.model.TimePref;
import org.unitime.timetable.model.comparators.ClassComparator;
import org.unitime.timetable.model.comparators.InstrOfferingConfigComparator;
import org.unitime.timetable.model.comparators.SchedulingSubpartComparator;
import org.unitime.timetable.model.dao.SessionDAO;
import org.unitime.timetable.solver.TimetableDatabaseLoader;
import org.unitime.timetable.util.Constants;
import org.unitime.timetable.util.duration.DurationModel;
import com.lowagie.text.Document;
import com.lowagie.text.DocumentException;
import com.lowagie.text.PageSize;
import com.lowagie.text.Paragraph;
import com.lowagie.text.pdf.PdfWriter;
/**
* @author Tomas Muller
*/
public class PdfWorksheet {
protected static GwtConstants CONSTANTS = Localization.create(GwtConstants.class);
private boolean iUseCommitedAssignments = true;
private static int sNrChars = 133;
private static int sNrLines = 50;
private OutputStream iOut = null;
private Document iDoc = null;
private TreeSet<SubjectArea> iSubjectAreas;
private String iCourseNumber = null;
private int iPageNo = 0;
private int iLineNo = 0;
private StringBuffer iBuffer = new StringBuffer();
private CourseOffering iCourseOffering = null;
private SubjectArea iCurrentSubjectArea = null;
private PdfWorksheet(OutputStream out, Collection<SubjectArea> subjectAreas, String courseNumber) throws IOException, DocumentException {
iUseCommitedAssignments = ApplicationProperty.WorksheetPdfUseCommittedAssignments.isTrue();
iSubjectAreas = new TreeSet<SubjectArea>(new Comparator<SubjectArea>() {
@Override
public int compare(SubjectArea s1, SubjectArea s2) {
return s1.getSubjectAreaAbbreviation().compareTo(s2.getSubjectAreaAbbreviation());
}
});
iSubjectAreas.addAll(subjectAreas);
iCourseNumber = courseNumber;
if (iCourseNumber!=null && (iCourseNumber.trim().length()==0 || "*".equals(iCourseNumber.trim().length())))
iCourseNumber = null;
iDoc = new Document(PageSize.LETTER.rotate());
iOut = out;
PdfWriter.getInstance(iDoc, iOut);
String session = null;
String subjects = "";
for (SubjectArea sa: iSubjectAreas) {
if (subjects.isEmpty()) subjects += ", ";
subjects += sa.getSubjectAreaAbbreviation();
if (session == null) session += sa.getSession().getLabel();
}
iDoc.addTitle(subjects + (iCourseNumber==null?"":" "+iCourseNumber) + " Worksheet");
iDoc.addAuthor(ApplicationProperty.WorksheetPdfAuthor.value().replace("%", Constants.getVersion()));
iDoc.addSubject(subjects + (session == null ? "" : " -- " + session));
iDoc.addCreator("UniTime "+Constants.getVersion()+", www.unitime.org");
if (!iSubjectAreas.isEmpty())
iCurrentSubjectArea = iSubjectAreas.first();
iDoc.open();
printHeader();
}
public static boolean print(OutputStream out, Collection<SubjectArea> subjectAreas) throws IOException, DocumentException {
TreeSet courses = new TreeSet(new Comparator() {
public int compare(Object o1, Object o2) {
CourseOffering co1 = (CourseOffering)o1;
CourseOffering co2 = (CourseOffering)o2;
int cmp = co1.getCourseName().compareTo(co2.getCourseName());
if (cmp != 0) return cmp;
return co1.getUniqueId().compareTo(co2.getUniqueId());
}
});
String subjectIds = "";
for (SubjectArea sa: subjectAreas)
subjectIds += (subjectIds.isEmpty() ? "" : ",") + sa.getUniqueId();
courses.addAll(SessionDAO.getInstance().getSession().createQuery(
"select co from CourseOffering co where co.subjectArea.uniqueId in (" + subjectIds + ")").list());
if (courses.isEmpty()) return false;
PdfWorksheet w = new PdfWorksheet(out, subjectAreas, null);
for (Iterator i=courses.iterator();i.hasNext();) {
w.print((CourseOffering)i.next());
}
w.lastPage();
w.close();
return true;
}
public static boolean print(OutputStream out, Collection<SubjectArea> subjectAreas, String courseNumber) throws IOException, DocumentException {
TreeSet courses = new TreeSet(new Comparator() {
public int compare(Object o1, Object o2) {
CourseOffering co1 = (CourseOffering)o1;
CourseOffering co2 = (CourseOffering)o2;
int cmp = co1.getCourseName().compareTo(co2.getCourseName());
if (cmp!=0) return cmp;
return co1.getUniqueId().compareTo(co2.getUniqueId());
}
});
String subjectIds = "";
for (SubjectArea sa: subjectAreas)
subjectIds += (subjectIds.isEmpty() ? "" : ",") + sa.getUniqueId();
String query = "select co from CourseOffering co where co.subjectArea.uniqueId in (" + subjectIds + ")";
if (courseNumber!=null && !courseNumber.trim().isEmpty()) {
query += " and co.courseNbr ";
if (courseNumber.indexOf('*')>=0)
query += " like '"+courseNumber.trim().replace('*', '%').toUpperCase()+"'";
else
query += " = '"+courseNumber.trim().toUpperCase()+"'";
}
courses.addAll(new SessionDAO().getSession().createQuery(query).list());
if (courses.isEmpty()) return false;
PdfWorksheet w = new PdfWorksheet(out, subjectAreas, courseNumber);
for (Iterator i=courses.iterator();i.hasNext();) {
w.print((CourseOffering)i.next());
}
w.lastPage();
w.close();
return true;
}
private String[] time(Class_ clazz) {
String dpat = "";
DatePattern dp = clazz.effectiveDatePattern();
if (dp!=null && !dp.isDefault()) {
if (dp.getType().intValue()==DatePattern.sTypeAlternate)
dpat = " "+dp.getName();
else {
SimpleDateFormat dpf = new SimpleDateFormat("MM/dd");
dpat = ", "+dpf.format(dp.getStartDate())+" - "+dpf.format(dp.getEndDate());
}
}
Assignment assgn = (iUseCommitedAssignments?clazz.getCommittedAssignment():null);
if (assgn==null) {
Set timePrefs = clazz.getEffectiveTimePreferences();
if (timePrefs.isEmpty()) {
DurationModel dm = clazz.getSchedulingSubpart().getInstrOfferingConfig().getDurationModel();
Integer ah = dm.getArrangedHours(clazz.getSchedulingSubpart().getMinutesPerWk(), clazz.effectiveDatePattern());
if (ah != null)
return new String[]{"Arr "+ah+" Hrs"+dpat};
else
return new String[]{"Arr Hrs"+dpat};
}
boolean onlyOneReq = true;
TimeLocation req = null;
for (Iterator x=timePrefs.iterator();onlyOneReq && x.hasNext();) {
TimePref tp = (TimePref)x.next();
TimePatternModel model = tp.getTimePatternModel();
if (model.isExactTime()) {
if (req!=null) onlyOneReq=false;
else {
DurationModel dm = clazz.getSchedulingSubpart().getInstrOfferingConfig().getDurationModel();
int minsPerMeeting = dm.getExactTimeMinutesPerMeeting(clazz.getSchedulingSubpart().getMinutesPerWk(), clazz.effectiveDatePattern(), model.getExactDays());
int length = ExactTimeMins.getNrSlotsPerMtg(minsPerMeeting);
int breakTime = ExactTimeMins.getBreakTime(minsPerMeeting);
req = new TimeLocation(model.getExactDays(), model.getExactStartSlot(), length,PreferenceLevel.sIntLevelNeutral,0,dp.getUniqueId(),dp.getName(),dp.getPatternBitSet(),breakTime);
}
} else {
for (int d=0;d<model.getNrDays();d++)
for (int t=0;onlyOneReq && t<model.getNrTimes();t++) {
if (PreferenceLevel.sRequired.equals(model.getPreference(d,t))) {
if (req!=null) onlyOneReq=false;
else {
req = new TimeLocation(
model.getDayCode(d),
model.getStartSlot(t),
model.getSlotsPerMtg(),
PreferenceLevel.prolog2int(model.getPreference(d, t)),
0,
dp.getUniqueId(),
dp.getName(),
dp.getPatternBitSet(),
model.getBreakTime());
}
}
}
}
}
if (onlyOneReq && req!=null)
return new String[] {req.getDayHeader()+" "+req.getStartTimeHeader(CONSTANTS.useAmPm())+" - "+req.getEndTimeHeader(CONSTANTS.useAmPm())+dpat};
Vector t = new Vector();
for (Iterator x=timePrefs.iterator();x.hasNext();) {
TimePref tp = (TimePref)x.next();
String tx = tp.getTimePatternModel().toString();
for (StringTokenizer s=new StringTokenizer(tx,",");s.hasMoreTokens();)
t.add(s.nextToken().trim());
}
String[] time = new String[t.size()];
for (int x=0;x<time.length;x++)
time[x]=t.elementAt(x)+dpat;
return time;
}
TimeLocation t = assgn.getTimeLocation();
return new String[] {t.getDayHeader()+" "+t.getStartTimeHeader(CONSTANTS.useAmPm())+" - "+t.getEndTimeHeader(CONSTANTS.useAmPm())+dpat};
}
private String[] room(Class_ clazz) {
Assignment assgn = (iUseCommitedAssignments?clazz.getCommittedAssignment():null);
if (assgn==null || assgn.getRoomLocations().isEmpty()) {
List<RoomLocation> roomLocations = TimetableDatabaseLoader.computeRoomLocations(clazz);
if (roomLocations.size()==clazz.getNbrRooms().intValue()) {
String[] rooms = new String[roomLocations.size()];
for (int x=0;x<roomLocations.size();x++) {
RoomLocation r = (RoomLocation)roomLocations.get(x);
rooms[x] = r.getName();
}
return rooms;
}
Vector roomPrefs = new Vector();
boolean allRoomReq = true;
for (Iterator i=clazz.effectivePreferences(BuildingPref.class).iterator();i.hasNext();) {
Preference pref = (Preference)i.next();
roomPrefs.add(pref.getPrefLevel().getAbbreviation()+" "+pref.preferenceText());
allRoomReq=false;
}
for (Iterator i=clazz.effectivePreferences(RoomPref.class).iterator();i.hasNext();) {
Preference pref = (Preference)i.next();
roomPrefs.add(pref.getPrefLevel().getAbbreviation()+" "+pref.preferenceText());
if (!PreferenceLevel.sRequired.equals(pref.getPrefLevel().getPrefProlog())) allRoomReq=false;
}
for (Iterator i=clazz.effectivePreferences(RoomFeaturePref.class).iterator();i.hasNext();) {
Preference pref = (Preference)i.next();
roomPrefs.add(pref.getPrefLevel().getAbbreviation()+" "+pref.preferenceText());
allRoomReq=false;
}
for (Iterator i=clazz.effectivePreferences(RoomGroupPref.class).iterator();i.hasNext();) {
Preference pref = (Preference)i.next();
roomPrefs.add(pref.getPrefLevel().getAbbreviation()+" "+pref.preferenceText());
allRoomReq=false;
}
if (allRoomReq) {
roomPrefs.clear();
for (Iterator i=clazz.effectivePreferences(RoomPref.class).iterator();i.hasNext();) {
Preference pref = (Preference)i.next();
roomPrefs.add(pref.preferenceText());
}
}
String[] rooms = new String[roomPrefs.size()];
for (int x=0;x<roomPrefs.size();x++) {
rooms[x] = roomPrefs.elementAt(x).toString();
}
return rooms;
}
String[] rooms = new String[assgn.getRoomLocations().size()];
for (int x=0;x<assgn.getRoomLocations().size();x++) {
RoomLocation r = (RoomLocation)assgn.getRoomLocations().elementAt(x);
rooms[x] = r.getName();
}
return rooms;
}
private String[] instructor(Class_ clazz) {
List<DepartmentalInstructor> leads = clazz.getLeadInstructors();
String[] instr = new String[leads.size()];
for (int x=0;x<clazz.getLeadInstructors().size();x++) {
DepartmentalInstructor in = (DepartmentalInstructor)leads.get(x);
instr[x] = in.nameShort();
}
return instr;
}
protected void print(CourseOffering co) throws DocumentException {
if (!iCurrentSubjectArea.equals(co.getSubjectArea())) {
lastPage();
iCurrentSubjectArea = co.getSubjectArea();
iDoc.newPage();
printHeader();
} else {
if (iLineNo+5>=sNrLines) newPage();
}
iCourseOffering = co;
int courseLimit = -1;
InstructionalOffering offering = co.getInstructionalOffering();
if (co.getReservation() != null)
courseLimit = co.getReservation();
if (courseLimit<0) {
if (offering.getCourseOfferings().size()==1 && offering.getLimit()!=null)
courseLimit = offering.getLimit().intValue();
}
boolean unlimited = false;
String courseOrg = "";
for (Iterator i=offering.getInstrOfferingConfigs().iterator();i.hasNext();) {
InstrOfferingConfig config = (InstrOfferingConfig)i.next();
if (config.isUnlimitedEnrollment().booleanValue()) unlimited=true;
Hashtable creditPerIType = new Hashtable();
for (Iterator j=config.getSchedulingSubparts().iterator();j.hasNext();) {
SchedulingSubpart subpart = (SchedulingSubpart)j.next();
if (subpart.getMinutesPerWk().intValue()<=0) continue;
Integer credit = (Integer)creditPerIType.get(subpart.getItype());
creditPerIType.put(subpart.getItype(), new Integer((credit==null?0:credit.intValue())+subpart.getMinutesPerWk().intValue()));
}
TreeSet itypes = new TreeSet(new Comparator() {
public int compare(Object o1, Object o2) {
ItypeDesc i1 = (ItypeDesc)o1;
ItypeDesc i2 = (ItypeDesc)o2;
return i1.getItype().compareTo(i2.getItype());
}
});
itypes.addAll(creditPerIType.keySet());
for (Iterator j=itypes.iterator();j.hasNext();) {
ItypeDesc itype = (ItypeDesc)j.next();
int minPerWeek = ((Integer)creditPerIType.get(itype)).intValue();
if (courseOrg.length()>0) courseOrg+=", ";
courseOrg+=itype.getAbbv().trim()+" "+((minPerWeek+49)/50);
}
break;
}
int enrl = -1;
String s1 = co.getSubjectArea().getSession().getAcademicTerm().substring(0,1) + co.getSubjectArea().getSession().getAcademicYear().substring(2);
String s2 = co.getSubjectArea().getSession().getAcademicTerm().substring(0,1) +
new DecimalFormat("00").format(Integer.parseInt(co.getSubjectArea().getSession().getAcademicYear().substring(2))-1);
if (co.getProjectedDemand()!=null) enrl = co.getProjectedDemand().intValue();
int lastLikeEnrl = co.getCourseOfferingDemands().size();
String title = co.getTitle();
if (title==null) title="*** Title not set";
println(" Proj "+s2+" ");
println("Course Title/Notes Credit Course Organization Limit Enrl Enrl Consent Cross List");
println("---------- ------------------------------------- ------ ------------------------------- ----- ----- ----- ---------- ----------");
println(rpad(co.getCourseName(),10)+" "+
rpad(title,37)+(title.length()>37?"-":" ")+" "+
rpad(co.getCredit()==null?"":co.getCredit().creditAbbv(),5)+" "+
rpad(courseOrg,31)+" "+
lpad(courseLimit<=0?unlimited?" inf":"":String.valueOf(courseLimit),5)+" "+
lpad(enrl<=0?"":String.valueOf(enrl),5)+" "+
lpad(lastLikeEnrl<=0?"":String.valueOf(lastLikeEnrl),5)+" "+
rpad(co.getConsentType()==null?"":co.getConsentType().getAbbv(),10)+" "+
rpad(offering.getCourseOfferings().size()>1?offering.getCourseName():"",10)
);
while (title.length()>37) {
title = title.substring(37);
println(" "+rpad(title,37)+(title.length()>37?"-":" "));
}
if (co.getScheduleBookNote()!=null && co.getScheduleBookNote().trim().length()>0) {
String note = co.getScheduleBookNote();
note = note.replaceAll("\\. ", "\\.\n");
for (StringTokenizer s=new StringTokenizer(note,"\n\r");s.hasMoreTokens();) {
String line = s.nextToken().trim();
while (line.length()>sNrChars-7) {
println(" "+line.substring(0,sNrChars-7)+"-");
line = line.substring(sNrChars-7);
}
println(" "+line);
}
}
if (iLineNo+5>=sNrLines) newPage();
else println("");
println(" "+s1+" "+s2+" Proj | Type");
println("Curr Reqst Enrl Enrl | Instr Number Time Limit Bldg-Room Instructor Mgr");
println("---- ----- ---- ---- | ----- ------ ---------------------------------------- ----- ------------------ --------------------- ------");
Vector rTable = new Vector();
//TODO: Print request data based on curricula
/*
int a=0,b=0,c=0;
for (Iterator i=co.getAcadAreaReservations().iterator();i.hasNext();) {
AcadAreaReservation ar = (AcadAreaReservation)i.next();
rTable.add(
lpad(ar.getAcademicArea().getAcademicAreaAbbreviation(),4)+" "+
lpad(ar.getRequested()==null?"":ar.getRequested().toString(),5)+" "+
lpad(ar.getPriorEnrollment()==null?"":ar.getPriorEnrollment().toString(),5)+" "+
lpad(ar.getProjectedEnrollment()==null?"":ar.getProjectedEnrollment().toString(),5));
if (ar.getRequested()!=null) a+=ar.getRequested().intValue();
if (ar.getPriorEnrollment()!=null) b+=ar.getPriorEnrollment().intValue();
if (ar.getProjectedEnrollment()!=null) c+=ar.getProjectedEnrollment().intValue();
}
if (rTable.isEmpty()) {
rTable.add(" *** No Request Data ");
} else {
rTable.add(
" Tot "+
lpad(String.valueOf(a),5)+" "+
lpad(String.valueOf(b),5)+" "+
lpad(String.valueOf(c),5));
rTable.add(" ");
rTable.add(" *Please check requests");
}
*/
Vector cTable = new Vector();
if (offering.isNotOffered().booleanValue())
cTable.add(" ** Course not offered");
Vector gTable = new Vector();
TreeSet configs = new TreeSet(new InstrOfferingConfigComparator(null));
configs.addAll(offering.getInstrOfferingConfigs());
for (Iterator i=configs.iterator();i.hasNext();) {
InstrOfferingConfig config = (InstrOfferingConfig)i.next();
if (offering.getInstrOfferingConfigs().size()>1)
cTable.add("** Configuration "+config.getName());
TreeSet subparts = new TreeSet(new SchedulingSubpartComparator());
subparts.addAll(config.getSchedulingSubparts());
for (Iterator j=subparts.iterator();j.hasNext();) {
SchedulingSubpart subpart = (SchedulingSubpart)j.next();
TreeSet classes = new TreeSet(new ClassComparator(ClassComparator.COMPARE_BY_HIERARCHY));
classes.addAll(subpart.getClasses());
String subpartLabel = subpart.getItype().getAbbv();
boolean same = false;
for (Iterator k=classes.iterator();k.hasNext();) {
Class_ clazz = (Class_)k.next();
String[] time = time(clazz);
String[] rooms = room(clazz);
String[] instr = instructor(clazz);
for (int x=0;x<Math.max(Math.max(1,time.length),Math.max(instr.length,rooms.length));x++) {
cTable.add(
rpad(same?"":x==0?subpartLabel:"",5)+" "+
lpad(x==0?clazz.getSectionNumberString():"",6)+" "+
rpad(time!=null && x<time.length?time[x]:"",40)+" "+
lpad(x==0 && clazz.getClassLimit()>0 && clazz.getNbrRooms().intValue()>0?(clazz.getNbrRooms().intValue()>1?clazz.getNbrRooms()+"x":"")+String.valueOf(clazz.getClassLimit()):"",5)+" "+
rpad(rooms!=null && x<rooms.length?rooms[x]:"",18)+" "+
rpad(instr!=null && x<instr.length?instr[x]:"",21)+" "+
rpad(x==0?clazz.getManagingDept().getShortLabel():"",6)
);
}
same=true;
if (clazz.getParentClass()!=null && clazz.getChildClasses().isEmpty()) {
String gr = clazz.getSchedulingSubpart().getItype().getAbbv().trim()+
lpad(clazz.getSectionNumberString(),4);
Class_ parent = clazz.getParentClass();
while (parent!=null) {
gr = parent.getSchedulingSubpart().getItype().getAbbv().trim()+
lpad(parent.getSectionNumberString(),4)+
", "+gr;
parent = parent.getParentClass();
}
gTable.add(gr);
}
}
}
}
for (int i=0;i<1+Math.max(rTable.size(), cTable.size());i++) {
String res = null;
String cl = null;
if (i<rTable.size()) res = (String)rTable.elementAt(i);
if (i<cTable.size()) cl = (String)cTable.elementAt(i);
println(rpad(res,23)+" | "+(cl==null?"":cl));
}
if (!gTable.isEmpty()) {
println(rep('-',sNrChars));
println(" Course groups:");
int half = (gTable.size()+1)/2;
for (int i=0;i<half;i++) {
String gr1 = (String)gTable.elementAt(i);
String gr2 = (half+i<gTable.size()?(String)gTable.elementAt(half+i):"");
println(" "+rpad(gr1,60)+" | "+rpad(gr2,60));
}
}
println(rep('=',sNrChars));
iCourseOffering = null;
}
private void out(String text) throws DocumentException {
if (iBuffer.length()>0) iBuffer.append("\n");
iBuffer.append(text);
}
private static String rep(char ch, int cnt) {
String ret = "";
for (int i=0;i<cnt;i++) ret+=ch;
return ret;
}
private void outln(char ch) throws DocumentException {
out(rep(ch,sNrChars));
}
private String lpad(String s, char ch, int len) {
while (s.length()<len) s = ch + s;
return s;
}
private String lpad(String s, int len) {
if (s==null) s="";
if (s.length()>len) return s.substring(0,len);
return lpad(s,' ',len);
}
private String rpad(String s, char ch, int len) {
while (s.length()<len) s = s + ch;
return s;
}
private String rpad(String s, int len) {
if (s==null) s="";
if (s.length()>len) return s.substring(0,len);
return rpad(s,' ',len);
}
private String mpad(String s1, String s2, char ch, int len) {
String m = "";
while ((s1+m+s2).length()<len) m += ch;
return s1+m+s2;
}
private String render(String line, String s, int idx) {
String a = (line.length()<=idx?rpad(line,' ',idx):line.substring(0,idx));
String b = (line.length()<=idx+s.length()?"":line.substring(idx+s.length()));
return a + s + b;
}
private String renderMiddle(String line, String s) {
return render(line, s, (sNrChars - s.length())/2);
}
private String renderEnd(String line, String s) {
return render(line, s, sNrChars-s.length());
}
protected void printHeader() throws DocumentException {
out(renderMiddle(
ApplicationProperty.WorksheetPdfAuthor.value().replace("%", Constants.getVersion()),
ApplicationProperty.WorksheetPdfTitle.value()
));
out(mpad(
new SimpleDateFormat("EEE MMM dd, yyyy").format(new Date()),
iCurrentSubjectArea.getSession().getAcademicInitiative()+" "+
iCurrentSubjectArea.getSession().getAcademicTerm()+" "+
iCurrentSubjectArea.getSession().getAcademicYear(),' ',sNrChars));
outln('=');
iLineNo=0;
if (iCourseOffering!=null)
println("("+iCourseOffering.getCourseName()+" Continued)");
}
protected void printFooter() throws DocumentException {
out("");
out(renderEnd(renderMiddle("","Page "+(iPageNo+1)),"<"+iCurrentSubjectArea.getSubjectAreaAbbreviation()+(iCourseNumber!=null?" "+iCourseNumber:"")+"> "));
//FIXME: For some reason when a line starts with space, the line is shifted by one space in the resulting PDF (when using iText 5.0.2)
Paragraph p = new Paragraph(iBuffer.toString().replace("\n ", "\n "), PdfFont.getFixedFont());
p.setLeading(9.5f); //was 13.5f
iDoc.add(p);
iBuffer = new StringBuffer();
iPageNo++;
}
protected void lastPage() throws DocumentException {
while (iLineNo<sNrLines) {
out(""); iLineNo++;
}
printFooter();
}
protected void newPage() throws DocumentException {
while (iLineNo<sNrLines) {
out(""); iLineNo++;
}
printFooter();
iDoc.newPage();
printHeader();
}
protected void println(String text) throws DocumentException {
out(text);
iLineNo++;
if (iLineNo>=sNrLines) newPage();
}
private void close() throws IOException {
iDoc.close();
}
public static void main(String[] args) {
try {
HibernateUtil.configureHibernate(ApplicationProperties.getProperties());
Long sessionId = Long.valueOf(ApplicationProperties.getProperty("tmtbl.pdf.worksheet.session", "165924"));
Session session = new SessionDAO().get(sessionId);
if (session==null) {
System.err.println("Academic session "+sessionId+" not found, use property tmtbl.pdf.worksheet.session to set academic session.");
System.exit(0);
} else {
System.out.println("Session: "+session);
}
TreeSet subjectAreas = null;
if (args.length>0) {
subjectAreas = new TreeSet();
for (int i=0;i<args.length;i++) {
SubjectArea sa = SubjectArea.findByAbbv(sessionId, args[i]);
if (sa==null)
System.err.println("Subject area "+args[i]+" not found.");
else
subjectAreas.add(sa);
}
} else {
subjectAreas = new TreeSet(SubjectArea.getSubjectAreaList(sessionId));
}
for (Iterator i=subjectAreas.iterator();i.hasNext();) {
SubjectArea sa = (SubjectArea)i.next();
System.out.println("Printing subject area "+sa.getSubjectAreaAbbreviation()+" ...");
FileOutputStream out = new FileOutputStream(sa.getSubjectAreaAbbreviation()+".pdf");
List<SubjectArea> sas = new ArrayList<SubjectArea>(); sas.add(sa);
PdfWorksheet.print(out, sas);
out.flush(); out.close();
}
HibernateUtil.closeHibernate();
} catch (Exception e) {
e.printStackTrace();
}
}
}
|
|
package org.apache.maven.plugin.invoker;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.File;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.installer.ArtifactInstaller;
import org.apache.maven.artifact.metadata.ArtifactMetadataSource;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
import org.apache.maven.artifact.resolver.ArtifactResolutionResult;
import org.apache.maven.artifact.resolver.ArtifactResolver;
import org.apache.maven.model.Model;
import org.apache.maven.model.Parent;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.Component;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.plugins.annotations.ResolutionScope;
import org.apache.maven.project.MavenProject;
import org.codehaus.plexus.util.FileUtils;
/**
* Installs the project artifacts of the main build into the local repository as a preparation to run the sub projects.
* More precisely, all artifacts of the project itself, all its locally reachable parent POMs and all its dependencies
* from the reactor will be installed to the local repository.
*
* @since 1.2
* @author Paul Gier
* @author Benjamin Bentmann
* @version $Id$
*/
@Mojo( name = "install", defaultPhase = LifecyclePhase.PRE_INTEGRATION_TEST,
requiresDependencyResolution = ResolutionScope.RUNTIME, threadSafe = true )
public class InstallMojo
extends AbstractMojo
{
/**
* Maven artifact install component to copy artifacts to the local repository.
*/
@Component
private ArtifactInstaller installer;
/**
* The component used to create artifacts.
*/
@Component
private ArtifactFactory artifactFactory;
/**
* The component used to create artifacts.
*/
@Component
private ArtifactRepositoryFactory repositoryFactory;
/**
*/
@Parameter( property = "localRepository", required = true, readonly = true )
private ArtifactRepository localRepository;
/**
* The path to the local repository into which the project artifacts should be installed for the integration tests.
* If not set, the regular local repository will be used. To prevent soiling of your regular local repository with
* possibly broken artifacts, it is strongly recommended to use an isolated repository for the integration tests
* (e.g. <code>${project.build.directory}/it-repo</code>).
*/
@Parameter( property = "invoker.localRepositoryPath" )
private File localRepositoryPath;
/**
* The current Maven project.
*/
@Component
private MavenProject project;
/**
* The set of Maven projects in the reactor build.
*/
@Parameter( defaultValue = "${reactorProjects}", readonly = true )
private Collection<MavenProject> reactorProjects;
/**
* A flag used to disable the installation procedure. This is primarily intended for usage from the command line to
* occasionally adjust the build.
*
* @since 1.4
*/
@Parameter( property = "invoker.skip", defaultValue = "false" )
private boolean skipInstallation;
/**
* The identifiers of already installed artifacts, used to avoid multiple installation of the same artifact.
*/
private Collection<String> installedArtifacts;
/**
* The identifiers of already copied artifacts, used to avoid multiple installation of the same artifact.
*/
private Collection<String> copiedArtifacts;
/**
* Extra dependencies that need to be installed on the local repository.<BR>
* Format:
*
* <pre>
* groupId:artifactId:version:type:classifier
* </pre>
*
* Examples:
*
* <pre>
* org.apache.maven.plugins:maven-clean-plugin:2.4:maven-plugin
* org.apache.maven.plugins:maven-clean-plugin:2.4:jar:javadoc
* </pre>
*
* If the type is 'maven-plugin' the plugin will try to resolve the artifact using plugin remote repositories,
* instead of using artifact remote repositories.
*
* @since 1.6
*/
@Parameter
private String[] extraArtifacts;
/**
*/
@Component
private ArtifactResolver resolver;
/**
*/
@Parameter( defaultValue = "${project.remoteArtifactRepositories}", readonly = true )
private List<ArtifactRepository> remoteArtifactRepositories;
/**
*/
@Parameter( defaultValue = "${project.pluginArtifactRepositories}", readonly = true )
private List<ArtifactRepository> remotePluginRepositories;
/**
*/
@Component
private ArtifactMetadataSource artifactMetadataSource;
/**
* Performs this mojo's tasks.
*
* @throws MojoExecutionException If the artifacts could not be installed.
*/
public void execute()
throws MojoExecutionException
{
if ( skipInstallation )
{
getLog().info( "Skipping artifact installation per configuration." );
return;
}
ArtifactRepository testRepository = createTestRepository();
installedArtifacts = new HashSet<String>();
copiedArtifacts = new HashSet<String>();
installProjectDependencies( project, reactorProjects, testRepository );
installProjectParents( project, testRepository );
installProjectArtifacts( project, testRepository );
installExtraArtifacts( testRepository, extraArtifacts );
}
/**
* Creates the local repository for the integration tests. If the user specified a custom repository location, the
* custom repository will have the same identifier, layout and policies as the real local repository. That means
* apart from the location, the custom repository will be indistinguishable from the real repository such that its
* usage is transparent to the integration tests.
*
* @return The local repository for the integration tests, never <code>null</code>.
* @throws MojoExecutionException If the repository could not be created.
*/
private ArtifactRepository createTestRepository()
throws MojoExecutionException
{
ArtifactRepository testRepository = localRepository;
if ( localRepositoryPath != null )
{
try
{
if ( !localRepositoryPath.exists() && !localRepositoryPath.mkdirs() )
{
throw new IOException( "Failed to create directory: " + localRepositoryPath );
}
testRepository =
repositoryFactory.createArtifactRepository( localRepository.getId(),
localRepositoryPath.toURL().toExternalForm(),
localRepository.getLayout(),
localRepository.getSnapshots(),
localRepository.getReleases() );
}
catch ( Exception e )
{
throw new MojoExecutionException( "Failed to create local repository: " + localRepositoryPath, e );
}
}
return testRepository;
}
/**
* Installs the specified artifact to the local repository. Note: This method should only be used for artifacts that
* originate from the current (reactor) build. Artifacts that have been grabbed from the user's local repository
* should be installed to the test repository via {@link #copyArtifact(File, Artifact, ArtifactRepository)}.
*
* @param file The file associated with the artifact, must not be <code>null</code>. This is in most cases the value
* of <code>artifact.getFile()</code> with the exception of the main artifact from a project with
* packaging "pom". Projects with packaging "pom" have no main artifact file. They have however artifact
* metadata (e.g. site descriptors) which needs to be installed.
* @param artifact The artifact to install, must not be <code>null</code>.
* @param testRepository The local repository to install the artifact to, must not be <code>null</code>.
* @throws MojoExecutionException If the artifact could not be installed (e.g. has no associated file).
*/
private void installArtifact( File file, Artifact artifact, ArtifactRepository testRepository )
throws MojoExecutionException
{
try
{
if ( file == null )
{
throw new IllegalStateException( "Artifact has no associated file: " + artifact.getId() );
}
if ( !file.isFile() )
{
throw new IllegalStateException( "Artifact is not fully assembled: " + file );
}
if ( installedArtifacts.add( artifact.getId() ) )
{
installer.install( file, artifact, testRepository );
}
else
{
getLog().debug( "Not re-installing " + artifact + ", " + file );
}
}
catch ( Exception e )
{
throw new MojoExecutionException( "Failed to install artifact: " + artifact, e );
}
}
/**
* Installs the specified artifact to the local repository. This method serves basically the same purpose as
* {@link #installArtifact(File, Artifact, ArtifactRepository)} but is meant for artifacts that have been resolved
* from the user's local repository (and not the current build outputs). The subtle difference here is that
* artifacts from the repository have already undergone transformations and these manipulations should not be redone
* by the artifact installer. For this reason, this method performs plain copy operations to install the artifacts.
*
* @param file The file associated with the artifact, must not be <code>null</code>.
* @param artifact The artifact to install, must not be <code>null</code>.
* @param testRepository The local repository to install the artifact to, must not be <code>null</code>.
* @throws MojoExecutionException If the artifact could not be installed (e.g. has no associated file).
*/
private void copyArtifact( File file, Artifact artifact, ArtifactRepository testRepository )
throws MojoExecutionException
{
try
{
if ( file == null )
{
throw new IllegalStateException( "Artifact has no associated file: " + artifact.getId() );
}
if ( !file.isFile() )
{
throw new IllegalStateException( "Artifact is not fully assembled: " + file );
}
if ( copiedArtifacts.add( artifact.getId() ) )
{
File destination = new File( testRepository.getBasedir(), testRepository.pathOf( artifact ) );
getLog().debug( "Installing " + file + " to " + destination );
copyFileIfDifferent( file, destination );
MetadataUtils.createMetadata( destination, artifact );
}
else
{
getLog().debug( "Not re-installing " + artifact + ", " + file );
}
}
catch ( Exception e )
{
throw new MojoExecutionException( "Failed to stage artifact: " + artifact, e );
}
}
private void copyFileIfDifferent( File src, File dst )
throws IOException
{
if ( src.lastModified() != dst.lastModified() || src.length() != dst.length() )
{
FileUtils.copyFile( src, dst );
dst.setLastModified( src.lastModified() );
}
}
/**
* Installs the main artifact and any attached artifacts of the specified project to the local repository.
*
* @param mvnProject The project whose artifacts should be installed, must not be <code>null</code>.
* @param testRepository The local repository to install the artifacts to, must not be <code>null</code>.
* @throws MojoExecutionException If any artifact could not be installed.
*/
private void installProjectArtifacts( MavenProject mvnProject, ArtifactRepository testRepository )
throws MojoExecutionException
{
try
{
// Install POM (usually attached as metadata but that happens only as a side effect of the Install Plugin)
installProjectPom( mvnProject, testRepository );
// Install the main project artifact (if the project has one, e.g. has no "pom" packaging)
Artifact mainArtifact = mvnProject.getArtifact();
if ( mainArtifact.getFile() != null )
{
installArtifact( mainArtifact.getFile(), mainArtifact, testRepository );
}
// Install any attached project artifacts
Collection<Artifact> attachedArtifacts = (Collection<Artifact>) mvnProject.getAttachedArtifacts();
for ( Artifact attachedArtifact : attachedArtifacts )
{
installArtifact( attachedArtifact.getFile(), attachedArtifact, testRepository );
}
}
catch ( Exception e )
{
throw new MojoExecutionException( "Failed to install project artifacts: " + mvnProject, e );
}
}
/**
* Installs the (locally reachable) parent POMs of the specified project to the local repository. The parent POMs
* from the reactor must be installed or the forked IT builds will fail when using a clean repository.
*
* @param mvnProject The project whose parent POMs should be installed, must not be <code>null</code>.
* @param testRepository The local repository to install the POMs to, must not be <code>null</code>.
* @throws MojoExecutionException If any POM could not be installed.
*/
private void installProjectParents( MavenProject mvnProject, ArtifactRepository testRepository )
throws MojoExecutionException
{
try
{
for ( MavenProject parent = mvnProject.getParent(); parent != null; parent = parent.getParent() )
{
if ( parent.getFile() == null )
{
copyParentPoms( parent.getGroupId(), parent.getArtifactId(), parent.getVersion(), testRepository );
break;
}
installProjectPom( parent, testRepository );
}
}
catch ( Exception e )
{
throw new MojoExecutionException( "Failed to install project parents: " + mvnProject, e );
}
}
/**
* Installs the POM of the specified project to the local repository.
*
* @param mvnProject The project whose POM should be installed, must not be <code>null</code>.
* @param testRepository The local repository to install the POM to, must not be <code>null</code>.
* @throws MojoExecutionException If the POM could not be installed.
*/
private void installProjectPom( MavenProject mvnProject, ArtifactRepository testRepository )
throws MojoExecutionException
{
try
{
Artifact pomArtifact = null;
if ( "pom".equals( mvnProject.getPackaging() ) )
{
pomArtifact = mvnProject.getArtifact();
}
if ( pomArtifact == null )
{
pomArtifact =
artifactFactory.createProjectArtifact( mvnProject.getGroupId(), mvnProject.getArtifactId(),
mvnProject.getVersion() );
}
installArtifact( mvnProject.getFile(), pomArtifact, testRepository );
}
catch ( Exception e )
{
throw new MojoExecutionException( "Failed to install POM: " + mvnProject, e );
}
}
/**
* Installs the dependent projects from the reactor to the local repository. The dependencies on other modules from
* the reactor must be installed or the forked IT builds will fail when using a clean repository.
*
* @param mvnProject The project whose dependent projects should be installed, must not be <code>null</code>.
* @param reactorProjects The set of projects in the reactor build, must not be <code>null</code>.
* @param testRepository The local repository to install the POMs to, must not be <code>null</code>.
* @throws MojoExecutionException If any dependency could not be installed.
*/
private void installProjectDependencies( MavenProject mvnProject, Collection<MavenProject> reactorProjects,
ArtifactRepository testRepository )
throws MojoExecutionException
{
// keep track if we have passed mvnProject in reactorProjects
boolean foundCurrent = false;
// ... into dependencies that were resolved from reactor projects ...
Collection<String> dependencyProjects = new LinkedHashSet<String>();
// index available reactor projects
Map<String, MavenProject> projects = new HashMap<String, MavenProject>();
for ( MavenProject reactorProject : reactorProjects )
{
String projectId =
reactorProject.getGroupId() + ':' + reactorProject.getArtifactId() + ':' + reactorProject.getVersion();
projects.put( projectId, reactorProject );
// only add projects of reactor build previous to this mvnProject
if ( !( foundCurrent |= ( mvnProject.equals( reactorProject ) ) ) )
{
dependencyProjects.add( projectId );
}
}
// group transitive dependencies (even those that don't contribute to the class path like POMs) ...
Collection<Artifact> artifacts = (Collection<Artifact>) mvnProject.getArtifacts();
// ... and those that were resolved from the (local) repo
Collection<Artifact> dependencyArtifacts = new LinkedHashSet<Artifact>();
for ( Artifact artifact : artifacts )
{
// workaround for MNG-2961 to ensure the base version does not contain a timestamp
artifact.isSnapshot();
String projectId = artifact.getGroupId() + ':' + artifact.getArtifactId() + ':' + artifact.getBaseVersion();
if ( !projects.containsKey( projectId ) )
{
dependencyArtifacts.add( artifact );
}
}
// install dependencies
try
{
// copy dependencies that where resolved from the local repo
for ( Artifact artifact : dependencyArtifacts )
{
copyArtifact( artifact, testRepository );
}
// install dependencies that were resolved from the reactor
for ( String projectId : dependencyProjects )
{
MavenProject dependencyProject = projects.get( projectId );
installProjectArtifacts( dependencyProject, testRepository );
installProjectParents( dependencyProject, testRepository );
}
}
catch ( Exception e )
{
throw new MojoExecutionException( "Failed to install project dependencies: " + mvnProject, e );
}
}
private void copyArtifact( Artifact artifact, ArtifactRepository testRepository )
throws MojoExecutionException
{
copyPoms( artifact, testRepository );
Artifact depArtifact =
artifactFactory.createArtifactWithClassifier( artifact.getGroupId(), artifact.getArtifactId(),
artifact.getBaseVersion(), artifact.getType(),
artifact.getClassifier() );
File artifactFile = artifact.getFile();
copyArtifact( artifactFile, depArtifact, testRepository );
}
private void copyPoms( Artifact artifact, ArtifactRepository testRepository )
throws MojoExecutionException
{
Artifact pomArtifact =
artifactFactory.createProjectArtifact( artifact.getGroupId(), artifact.getArtifactId(),
artifact.getBaseVersion() );
File pomFile = new File( localRepository.getBasedir(), localRepository.pathOf( pomArtifact ) );
if ( pomFile.isFile() )
{
copyArtifact( pomFile, pomArtifact, testRepository );
copyParentPoms( pomFile, testRepository );
}
}
/**
* Installs all parent POMs of the specified POM file that are available in the local repository.
*
* @param pomFile The path to the POM file whose parents should be installed, must not be <code>null</code>.
* @param testRepository The local repository to install the POMs to, must not be <code>null</code>.
* @throws MojoExecutionException If any (existing) parent POM could not be installed.
*/
private void copyParentPoms( File pomFile, ArtifactRepository testRepository )
throws MojoExecutionException
{
Model model = PomUtils.loadPom( pomFile );
Parent parent = model.getParent();
if ( parent != null )
{
copyParentPoms( parent.getGroupId(), parent.getArtifactId(), parent.getVersion(), testRepository );
}
}
/**
* Installs the specified POM and all its parent POMs to the local repository.
*
* @param groupId The group id of the POM which should be installed, must not be <code>null</code>.
* @param artifactId The artifact id of the POM which should be installed, must not be <code>null</code>.
* @param version The version of the POM which should be installed, must not be <code>null</code>.
* @param testRepository The local repository to install the POMs to, must not be <code>null</code>.
* @throws MojoExecutionException If any (existing) parent POM could not be installed.
*/
private void copyParentPoms( String groupId, String artifactId, String version, ArtifactRepository testRepository )
throws MojoExecutionException
{
Artifact pomArtifact = artifactFactory.createProjectArtifact( groupId, artifactId, version );
if ( installedArtifacts.contains( pomArtifact.getId() ) || copiedArtifacts.contains( pomArtifact.getId() ) )
{
getLog().debug( "Not re-installing " + pomArtifact );
return;
}
File pomFile = new File( localRepository.getBasedir(), localRepository.pathOf( pomArtifact ) );
if ( pomFile.isFile() )
{
copyArtifact( pomFile, pomArtifact, testRepository );
copyParentPoms( pomFile, testRepository );
}
}
private void installExtraArtifacts( ArtifactRepository testRepository, String[] extraArtifacts )
throws MojoExecutionException
{
if ( extraArtifacts == null )
{
return;
}
Artifact originatingArtifact = project.getArtifact();
for (String extraArtifact : extraArtifacts) {
String[] gav = extraArtifact.split(":");
if (gav.length < 3 || gav.length > 5) {
throw new MojoExecutionException("Invalid artifact " + extraArtifact);
}
String groupId = gav[0];
String artifactId = gav[1];
String version = gav[2];
String type = "jar";
if (gav.length > 3) {
type = gav[3];
}
String classifier = null;
if (gav.length == 5) {
classifier = gav[4];
}
List<ArtifactRepository> remoteRepositories;
if ("maven-plugin".equals(type)) {
remoteRepositories = this.remotePluginRepositories;
} else {
remoteRepositories = this.remoteArtifactRepositories;
}
Artifact artifact = null;
try {
artifact = artifactFactory.createArtifactWithClassifier(groupId, artifactId, version, type, classifier);
ArtifactResolutionResult arr =
resolver.resolveTransitively(Collections.singleton(artifact), originatingArtifact,
remoteRepositories, localRepository, artifactMetadataSource);
if (!groupId.equals(artifact.getGroupId()) || !artifactId.equals(artifact.getArtifactId())
|| !version.equals(artifact.getVersion())) {
artifact =
artifactFactory.createArtifactWithClassifier(groupId, artifactId, version, type, classifier);
copyPoms(artifact, testRepository);
}
for (Artifact arrArtifact : (Set<Artifact>) arr.getArtifacts()) {
copyArtifact(arrArtifact, testRepository);
}
} catch (Exception e) {
throw new MojoExecutionException("Unable to resolve dependencies for: " + artifact, e);
}
}
}
}
|
|
/*
* Copyright 2017 John L. Jegutanis
* Copyright 2018 Andreas Schildbach
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bitcoinj.script;
import org.bitcoinj.core.LegacyAddress;
import org.bitcoinj.core.SegwitAddress;
import org.bitcoinj.core.Sha256Hash;
import org.bitcoinj.core.Utils;
import java.math.BigInteger;
import java.util.Arrays;
import java.util.List;
import static org.bitcoinj.script.Script.decodeFromOpN;
import static org.bitcoinj.script.ScriptOpCodes.*;
/**
* This is a Script pattern matcher with some typical script patterns
*/
public class ScriptPattern {
/**
* Returns true if this script is of the form {@code DUP HASH160 <pubkey hash> EQUALVERIFY CHECKSIG}, ie, payment to an
* address like {@code 1VayNert3x1KzbpzMGt2qdqrAThiRovi8}. This form was originally intended for the case where you wish
* to send somebody money with a written code because their node is offline, but over time has become the standard
* way to make payments due to the short and recognizable base58 form addresses come in.
*/
public static boolean isP2PKH(Script script) {
List<ScriptChunk> chunks = script.chunks;
if (chunks.size() != 5)
return false;
if (!chunks.get(0).equalsOpCode(OP_DUP))
return false;
if (!chunks.get(1).equalsOpCode(OP_HASH160))
return false;
byte[] chunk2data = chunks.get(2).data;
if (chunk2data == null)
return false;
if (chunk2data.length != LegacyAddress.LENGTH)
return false;
if (!chunks.get(3).equalsOpCode(OP_EQUALVERIFY))
return false;
if (!chunks.get(4).equalsOpCode(OP_CHECKSIG))
return false;
return true;
}
/**
* Extract the pubkey hash from a P2PKH scriptPubKey. It's important that the script is in the correct form, so you
* will want to guard calls to this method with {@link #isP2PKH(Script)}.
*/
public static byte[] extractHashFromP2PKH(Script script) {
return script.chunks.get(2).data;
}
/**
* <p>
* Whether or not this is a scriptPubKey representing a P2SH output. In such outputs, the logic that
* controls reclamation is not actually in the output at all. Instead there's just a hash, and it's up to the
* spending input to provide a program matching that hash.
* </p>
* <p>
* P2SH is described by <a href="https://github.com/bitcoin/bips/blob/master/bip-0016.mediawiki">BIP16</a>.
* </p>
*/
public static boolean isP2SH(Script script) {
List<ScriptChunk> chunks = script.chunks;
// We check for the effective serialized form because BIP16 defines a P2SH output using an exact byte
// template, not the logical program structure. Thus you can have two programs that look identical when
// printed out but one is a P2SH script and the other isn't! :(
// We explicitly test that the op code used to load the 20 bytes is 0x14 and not something logically
// equivalent like {@code OP_HASH160 OP_PUSHDATA1 0x14 <20 bytes of script hash> OP_EQUAL}
if (chunks.size() != 3)
return false;
if (!chunks.get(0).equalsOpCode(OP_HASH160))
return false;
ScriptChunk chunk1 = chunks.get(1);
if (chunk1.opcode != 0x14)
return false;
byte[] chunk1data = chunk1.data;
if (chunk1data == null)
return false;
if (chunk1data.length != LegacyAddress.LENGTH)
return false;
if (!chunks.get(2).equalsOpCode(OP_EQUAL))
return false;
return true;
}
/**
* Extract the script hash from a P2SH scriptPubKey. It's important that the script is in the correct form, so you
* will want to guard calls to this method with {@link #isP2SH(Script)}.
*/
public static byte[] extractHashFromP2SH(Script script) {
return script.chunks.get(1).data;
}
/**
* Returns true if this script is of the form {@code <pubkey> OP_CHECKSIG}. This form was originally intended for transactions
* where the peers talked to each other directly via TCP/IP, but has fallen out of favor with time due to that mode
* of operation being susceptible to man-in-the-middle attacks. It is still used in coinbase outputs and can be
* useful more exotic types of transaction, but today most payments are to addresses.
*/
public static boolean isP2PK(Script script) {
List<ScriptChunk> chunks = script.chunks;
if (chunks.size() != 2)
return false;
ScriptChunk chunk0 = chunks.get(0);
if (chunk0.isOpCode())
return false;
byte[] chunk0data = chunk0.data;
if (chunk0data == null)
return false;
if (chunk0data.length <= 1)
return false;
if (!chunks.get(1).equalsOpCode(OP_CHECKSIG))
return false;
return true;
}
/**
* Extract the pubkey from a P2SH scriptPubKey. It's important that the script is in the correct form, so you will
* want to guard calls to this method with {@link #isP2PK(Script)}.
*/
public static byte[] extractKeyFromP2PK(Script script) {
return script.chunks.get(0).data;
}
/**
* Returns true if this script is of the form {@code OP_0 <hash>}. This can either be a P2WPKH or P2WSH scriptPubKey. These
* two script types were introduced with segwit.
*/
public static boolean isP2WH(Script script) {
List<ScriptChunk> chunks = script.chunks;
if (chunks.size() != 2)
return false;
if (!chunks.get(0).equalsOpCode(OP_0))
return false;
byte[] chunk1data = chunks.get(1).data;
if (chunk1data == null)
return false;
if (chunk1data.length != SegwitAddress.WITNESS_PROGRAM_LENGTH_PKH
&& chunk1data.length != SegwitAddress.WITNESS_PROGRAM_LENGTH_SH)
return false;
return true;
}
/**
* Returns true if this script is of the form {@code OP_0 <hash>} and hash is 20 bytes long. This can only be a P2WPKH
* scriptPubKey. This script type was introduced with segwit.
*/
public static boolean isP2WPKH(Script script) {
if (!isP2WH(script))
return false;
List<ScriptChunk> chunks = script.chunks;
if (!chunks.get(0).equalsOpCode(OP_0))
return false;
byte[] chunk1data = chunks.get(1).data;
return chunk1data != null && chunk1data.length == SegwitAddress.WITNESS_PROGRAM_LENGTH_PKH;
}
/**
* Returns true if this script is of the form {@code OP_0 <hash>} and hash is 32 bytes long. This can only be a P2WSH
* scriptPubKey. This script type was introduced with segwit.
*/
public static boolean isP2WSH(Script script) {
if (!isP2WH(script))
return false;
List<ScriptChunk> chunks = script.chunks;
if (!chunks.get(0).equalsOpCode(OP_0))
return false;
byte[] chunk1data = chunks.get(1).data;
return chunk1data != null && chunk1data.length == SegwitAddress.WITNESS_PROGRAM_LENGTH_SH;
}
/**
* Extract the pubkey hash from a P2WPKH or the script hash from a P2WSH scriptPubKey. It's important that the
* script is in the correct form, so you will want to guard calls to this method with
* {@link #isP2WH(Script)}.
*/
public static byte[] extractHashFromP2WH(Script script) {
return script.chunks.get(1).data;
}
/**
* Returns whether this script matches the format used for m-of-n multisig outputs:
* {@code [m] [keys...] [n] CHECKMULTISIG}
*/
public static boolean isSentToMultisig(Script script) {
List<ScriptChunk> chunks = script.chunks;
if (chunks.size() < 4) return false;
ScriptChunk chunk = chunks.get(chunks.size() - 1);
// Must end in OP_CHECKMULTISIG[VERIFY].
if (!chunk.isOpCode()) return false;
if (!(chunk.equalsOpCode(OP_CHECKMULTISIG) || chunk.equalsOpCode(OP_CHECKMULTISIGVERIFY))) return false;
try {
// Second to last chunk must be an OP_N opcode and there should be that many data chunks (keys).
ScriptChunk m = chunks.get(chunks.size() - 2);
if (!m.isOpCode()) return false;
int numKeys = decodeFromOpN(m.opcode);
if (numKeys < 1 || chunks.size() != 3 + numKeys) return false;
for (int i = 1; i < chunks.size() - 2; i++) {
if (chunks.get(i).isOpCode()) return false;
}
// First chunk must be an OP_N opcode too.
if (decodeFromOpN(chunks.get(0).opcode) < 1) return false;
} catch (IllegalStateException e) {
return false; // Not an OP_N opcode.
}
return true;
}
/**
* Returns whether this script matches the format used for LOCKTIMEVERIFY transactions.
*/
public static boolean isSentToCltvPaymentChannel(Script script) {
List<ScriptChunk> chunks = script.chunks;
if (chunks.size() != 10) return false;
// Check that opcodes match the pre-determined format.
if (!chunks.get(0).equalsOpCode(OP_IF)) return false;
// chunk[1] = recipient pubkey
if (!chunks.get(2).equalsOpCode(OP_CHECKSIGVERIFY)) return false;
if (!chunks.get(3).equalsOpCode(OP_ELSE)) return false;
// chunk[4] = locktime
if (!chunks.get(5).equalsOpCode(OP_CHECKLOCKTIMEVERIFY)) return false;
if (!chunks.get(6).equalsOpCode(OP_DROP)) return false;
if (!chunks.get(7).equalsOpCode(OP_ENDIF)) return false;
// chunk[8] = sender pubkey
if (!chunks.get(9).equalsOpCode(OP_CHECKSIG)) return false;
return true;
}
/**
* Retrieves the public key of the sender from a LOCKTIMEVERIFY transaction. It's important that the script is in
* the correct form, so you will want to guard calls to this method with
* {@link #isSentToCltvPaymentChannel(Script)}.
*/
public static byte[] extractSenderPubKeyFromCltvPaymentChannel(Script script) {
return script.chunks.get(8).data;
}
/**
* Retrieves the public key of the recipient from a LOCKTIMEVERIFY transaction. It's important that the script is in
* the correct form, so you will want to guard calls to this method with
* {@link #isSentToCltvPaymentChannel(Script)}.
*/
public static byte[] extractRecipientPubKeyFromCltvPaymentChannel(Script script) {
return script.chunks.get(1).data;
}
/**
* Retrieves the locktime from a LOCKTIMEVERIFY transaction. It's important that the script is in the correct form,
* so you will want to guard calls to this method with {@link #isSentToCltvPaymentChannel(Script)}.
*/
public static BigInteger extractExpiryFromCltvPaymentChannel(Script script) {
return Script.castToBigInteger(script.chunks.get(4).data, 5, false);
}
/**
* Returns whether this script is using OP_RETURN to store arbitrary data.
*/
public static boolean isOpReturn(Script script) {
List<ScriptChunk> chunks = script.chunks;
return chunks.size() > 0 && chunks.get(0).equalsOpCode(ScriptOpCodes.OP_RETURN);
}
private static final byte[] SEGWIT_COMMITMENT_HEADER = Utils.HEX.decode("aa21a9ed");
/**
* Returns whether this script matches the pattern for a segwit commitment (in an output of the coinbase
* transaction).
*/
public static boolean isWitnessCommitment(Script script) {
List<ScriptChunk> chunks = script.chunks;
if (chunks.size() < 2)
return false;
if (!chunks.get(0).equalsOpCode(ScriptOpCodes.OP_RETURN))
return false;
byte[] chunkData = chunks.get(1).data;
if (chunkData == null || chunkData.length != 36)
return false;
if (!Arrays.equals(Arrays.copyOfRange(chunkData, 0, 4), SEGWIT_COMMITMENT_HEADER))
return false;
return true;
}
/**
* Retrieves the hash from a segwit commitment (in an output of the coinbase transaction).
*/
public static Sha256Hash extractWitnessCommitmentHash(Script script) {
return Sha256Hash.wrap(Arrays.copyOfRange(script.chunks.get(1).data, 4, 36));
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master;
import edu.umd.cs.findbugs.annotations.Nullable;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterMetrics;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.RegionMetrics;
import org.apache.hadoop.hbase.ServerMetrics;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.Size;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.UserMetrics;
import org.apache.hadoop.hbase.client.CompactionState;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionStatesCount;
import org.apache.hadoop.hbase.master.assignment.AssignmentManager;
import org.apache.hadoop.hbase.replication.ReplicationLoadSink;
import org.apache.hadoop.hbase.replication.ReplicationLoadSource;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Test for RegionsRecoveryChore
*/
@Category({MasterTests.class, SmallTests.class})
public class TestRegionsRecoveryChore {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestRegionsRecoveryChore.class);
private static final Logger LOG = LoggerFactory.getLogger(TestRegionsRecoveryChore.class);
private static final HBaseTestingUtility HBASE_TESTING_UTILITY = new HBaseTestingUtility();
private static final String UTF_8_CHARSET = StandardCharsets.UTF_8.name();
private HMaster hMaster;
private AssignmentManager assignmentManager;
private RegionsRecoveryChore regionsRecoveryChore;
private static int regionNo;
public static final byte[][] REGION_NAME_LIST = new byte[][]{
new byte[]{114, 101, 103, 105, 111, 110, 50, 49, 95, 51},
new byte[]{114, 101, 103, 105, 111, 110, 50, 53, 95, 51},
new byte[]{114, 101, 103, 105, 111, 110, 50, 54, 95, 52},
new byte[]{114, 101, 103, 105, 111, 110, 51, 50, 95, 53},
new byte[]{114, 101, 103, 105, 111, 110, 51, 49, 95, 52},
new byte[]{114, 101, 103, 105, 111, 110, 51, 48, 95, 51},
new byte[]{114, 101, 103, 105, 111, 110, 50, 48, 95, 50},
new byte[]{114, 101, 103, 105, 111, 110, 50, 52, 95, 50},
new byte[]{114, 101, 103, 105, 111, 110, 50, 57, 95, 50},
new byte[]{114, 101, 103, 105, 111, 110, 51, 53, 95, 50},
new byte[]{114, 101, 103, 105, 111, 110, 49, 48, 56, 95, 49, 49}
};
private Configuration getCustomConf() {
Configuration conf = HBASE_TESTING_UTILITY.getConfiguration();
conf.setInt("hbase.master.regions.recovery.check.interval", 100);
return conf;
}
@Before
public void setUp() throws Exception {
this.hMaster = Mockito.mock(HMaster.class);
this.assignmentManager = Mockito.mock(AssignmentManager.class);
}
@After
public void tearDown() throws Exception {
Mockito.verifyNoMoreInteractions(this.hMaster);
Mockito.verifyNoMoreInteractions(this.assignmentManager);
}
@Test
public void testRegionReopensWithStoreRefConfig() throws Exception {
regionNo = 0;
ClusterMetrics clusterMetrics = TestRegionsRecoveryChore.getClusterMetrics(4);
final Map<ServerName, ServerMetrics> serverMetricsMap =
clusterMetrics.getLiveServerMetrics();
LOG.debug("All Region Names with refCount....");
for (ServerMetrics serverMetrics : serverMetricsMap.values()) {
Map<byte[], RegionMetrics> regionMetricsMap = serverMetrics.getRegionMetrics();
for (RegionMetrics regionMetrics : regionMetricsMap.values()) {
LOG.debug("name: " + new String(regionMetrics.getRegionName()) + " refCount: " +
regionMetrics.getStoreRefCount());
}
}
Mockito.when(hMaster.getClusterMetrics()).thenReturn(clusterMetrics);
Mockito.when(hMaster.getAssignmentManager()).thenReturn(assignmentManager);
for (byte[] regionName : REGION_NAME_LIST) {
Mockito.when(assignmentManager.getRegionInfo(regionName))
.thenReturn(TestRegionsRecoveryChore.getRegionInfo(regionName));
}
Stoppable stoppable = new StoppableImplementation();
Configuration configuration = getCustomConf();
configuration.setInt("hbase.regions.recovery.store.file.ref.count", 300);
regionsRecoveryChore = new RegionsRecoveryChore(stoppable, configuration, hMaster);
regionsRecoveryChore.chore();
// Verify that we need to reopen regions of 2 tables
Mockito.verify(hMaster, Mockito.times(2)).reopenRegions(Mockito.any(), Mockito.anyList(),
Mockito.anyLong(), Mockito.anyLong());
Mockito.verify(hMaster, Mockito.times(1)).getClusterMetrics();
// Verify that we need to reopen total 3 regions that have refCount > 300
Mockito.verify(hMaster, Mockito.times(3)).getAssignmentManager();
Mockito.verify(assignmentManager, Mockito.times(3))
.getRegionInfo(Mockito.any());
}
@Test
public void testRegionReopensWithLessThreshold() throws Exception {
regionNo = 0;
ClusterMetrics clusterMetrics = TestRegionsRecoveryChore.getClusterMetrics(4);
final Map<ServerName, ServerMetrics> serverMetricsMap =
clusterMetrics.getLiveServerMetrics();
LOG.debug("All Region Names with refCount....");
for (ServerMetrics serverMetrics : serverMetricsMap.values()) {
Map<byte[], RegionMetrics> regionMetricsMap = serverMetrics.getRegionMetrics();
for (RegionMetrics regionMetrics : regionMetricsMap.values()) {
LOG.debug("name: " + new String(regionMetrics.getRegionName()) + " refCount: " +
regionMetrics.getStoreRefCount());
}
}
Mockito.when(hMaster.getClusterMetrics()).thenReturn(clusterMetrics);
Mockito.when(hMaster.getAssignmentManager()).thenReturn(assignmentManager);
for (byte[] regionName : REGION_NAME_LIST) {
Mockito.when(assignmentManager.getRegionInfo(regionName))
.thenReturn(TestRegionsRecoveryChore.getRegionInfo(regionName));
}
Stoppable stoppable = new StoppableImplementation();
Configuration configuration = getCustomConf();
configuration.setInt("hbase.regions.recovery.store.file.ref.count", 400);
regionsRecoveryChore = new RegionsRecoveryChore(stoppable, configuration, hMaster);
regionsRecoveryChore.chore();
// Verify that we need to reopen regions of only 1 table
Mockito.verify(hMaster, Mockito.times(1)).reopenRegions(Mockito.any(), Mockito.anyList(),
Mockito.anyLong(), Mockito.anyLong());
Mockito.verify(hMaster, Mockito.times(1)).getClusterMetrics();
// Verify that we need to reopen only 1 region with refCount > 400
Mockito.verify(hMaster, Mockito.times(1)).getAssignmentManager();
Mockito.verify(assignmentManager, Mockito.times(1))
.getRegionInfo(Mockito.any());
}
@Test
public void testRegionReopensWithoutStoreRefConfig() throws Exception {
regionNo = 0;
ClusterMetrics clusterMetrics = TestRegionsRecoveryChore.getClusterMetrics(10);
final Map<ServerName, ServerMetrics> serverMetricsMap =
clusterMetrics.getLiveServerMetrics();
LOG.debug("All Region Names with refCount....");
for (ServerMetrics serverMetrics : serverMetricsMap.values()) {
Map<byte[], RegionMetrics> regionMetricsMap = serverMetrics.getRegionMetrics();
for (RegionMetrics regionMetrics : regionMetricsMap.values()) {
LOG.debug("name: " + new String(regionMetrics.getRegionName()) + " refCount: " +
regionMetrics.getStoreRefCount());
}
}
Mockito.when(hMaster.getClusterMetrics()).thenReturn(clusterMetrics);
Mockito.when(hMaster.getAssignmentManager()).thenReturn(assignmentManager);
for (byte[] regionName : REGION_NAME_LIST) {
Mockito.when(assignmentManager.getRegionInfo(regionName))
.thenReturn(TestRegionsRecoveryChore.getRegionInfo(regionName));
}
Stoppable stoppable = new StoppableImplementation();
Configuration configuration = getCustomConf();
configuration.unset("hbase.regions.recovery.store.file.ref.count");
regionsRecoveryChore = new RegionsRecoveryChore(stoppable, configuration, hMaster);
regionsRecoveryChore.chore();
// Verify that by default the feature is turned off so no regions
// should be reopened
Mockito.verify(hMaster, Mockito.times(0)).reopenRegions(Mockito.any(), Mockito.anyList(),
Mockito.anyLong(), Mockito.anyLong());
// default maxCompactedStoreFileRefCount is -1 (no regions to be reopened using AM)
Mockito.verify(hMaster, Mockito.times(0)).getAssignmentManager();
Mockito.verify(assignmentManager, Mockito.times(0))
.getRegionInfo(Mockito.any());
}
private static ClusterMetrics getClusterMetrics(int noOfLiveServer) {
ClusterMetrics clusterMetrics = new ClusterMetrics() {
@Nullable
@Override
public String getHBaseVersion() {
return null;
}
@Override
public List<ServerName> getDeadServerNames() {
return null;
}
@Override
public Map<ServerName, ServerMetrics> getLiveServerMetrics() {
Map<ServerName, ServerMetrics> liveServerMetrics = new HashMap<>();
for (int i = 0; i < noOfLiveServer; i++) {
ServerName serverName = ServerName.valueOf("rs_" + i, 16010, 12345);
liveServerMetrics.put(serverName, TestRegionsRecoveryChore.getServerMetrics(i + 3));
}
return liveServerMetrics;
}
@Nullable
@Override
public ServerName getMasterName() {
return null;
}
@Override
public List<ServerName> getBackupMasterNames() {
return null;
}
@Override
public List<RegionState> getRegionStatesInTransition() {
return null;
}
@Nullable
@Override
public String getClusterId() {
return null;
}
@Override
public List<String> getMasterCoprocessorNames() {
return null;
}
@Nullable
@Override
public Boolean getBalancerOn() {
return null;
}
@Override
public int getMasterInfoPort() {
return 0;
}
@Override
public List<ServerName> getServersName() {
return null;
}
@Override
public Map<TableName, RegionStatesCount> getTableRegionStatesCount() {
return null;
}
};
return clusterMetrics;
}
private static ServerMetrics getServerMetrics(int noOfRegions) {
ServerMetrics serverMetrics = new ServerMetrics() {
@Override
public ServerName getServerName() {
return null;
}
@Override
public long getRequestCountPerSecond() {
return 0;
}
@Override
public long getRequestCount() {
return 0;
}
@Override
public Size getUsedHeapSize() {
return null;
}
@Override
public Size getMaxHeapSize() {
return null;
}
@Override
public int getInfoServerPort() {
return 0;
}
@Override
public List<ReplicationLoadSource> getReplicationLoadSourceList() {
return null;
}
@Override
public Map<String, List<ReplicationLoadSource>> getReplicationLoadSourceMap() {
return null;
}
@Nullable
@Override
public ReplicationLoadSink getReplicationLoadSink() {
return null;
}
@Override
public Map<byte[], RegionMetrics> getRegionMetrics() {
Map<byte[], RegionMetrics> regionMetricsMap = new HashMap<>();
for (int i = 0; i < noOfRegions; i++) {
byte[] regionName = Bytes.toBytes("region" + regionNo + "_" + i);
regionMetricsMap.put(regionName,
TestRegionsRecoveryChore.getRegionMetrics(regionName, 100 * i));
++regionNo;
}
return regionMetricsMap;
}
@Override public Map<byte[], UserMetrics> getUserMetrics() {
return new HashMap<>();
}
@Override
public Set<String> getCoprocessorNames() {
return null;
}
@Override
public long getReportTimestamp() {
return 0;
}
@Override
public long getLastReportTimestamp() {
return 0;
}
};
return serverMetrics;
}
private static RegionMetrics getRegionMetrics(byte[] regionName, int compactedStoreRefCount) {
RegionMetrics regionMetrics = new RegionMetrics() {
@Override
public byte[] getRegionName() {
return regionName;
}
@Override
public int getStoreCount() {
return 0;
}
@Override
public int getStoreFileCount() {
return 0;
}
@Override
public Size getStoreFileSize() {
return null;
}
@Override
public Size getMemStoreSize() {
return null;
}
@Override
public long getReadRequestCount() {
return 0;
}
@Override
public long getWriteRequestCount() {
return 0;
}
@Override
public long getFilteredReadRequestCount() {
return 0;
}
@Override
public Size getStoreFileIndexSize() {
return null;
}
@Override
public Size getStoreFileRootLevelIndexSize() {
return null;
}
@Override
public Size getStoreFileUncompressedDataIndexSize() {
return null;
}
@Override
public Size getBloomFilterSize() {
return null;
}
@Override
public long getCompactingCellCount() {
return 0;
}
@Override
public long getCompactedCellCount() {
return 0;
}
@Override
public long getCompletedSequenceId() {
return 0;
}
@Override
public Map<byte[], Long> getStoreSequenceId() {
return null;
}
@Override
public Size getUncompressedStoreFileSize() {
return null;
}
@Override
public float getDataLocality() {
return 0;
}
@Override
public long getLastMajorCompactionTimestamp() {
return 0;
}
@Override
public int getStoreRefCount() {
return compactedStoreRefCount;
}
@Override
public int getMaxCompactedStoreFileRefCount() {
return compactedStoreRefCount;
}
@Override
public float getDataLocalityForSsd() {
return 0;
}
@Override
public long getBlocksLocalWeight() {
return 0;
}
@Override
public long getBlocksLocalWithSsdWeight() {
return 0;
}
@Override
public long getBlocksTotalWeight() {
return 0;
}
@Override
public CompactionState getCompactionState() {
return null;
}
};
return regionMetrics;
}
private static RegionInfo getRegionInfo(byte[] regionNameBytes) {
RegionInfo regionInfo = new RegionInfo() {
@Override
public String getShortNameToLog() {
return null;
}
@Override
public long getRegionId() {
return 0;
}
@Override
public byte[] getRegionName() {
return new byte[0];
}
@Override
public String getRegionNameAsString() {
try {
return new String(regionNameBytes, UTF_8_CHARSET);
} catch (UnsupportedEncodingException e) {
return "";
}
}
@Override
public String getEncodedName() {
return null;
}
@Override
public byte[] getEncodedNameAsBytes() {
return new byte[0];
}
@Override
public byte[] getStartKey() {
return new byte[0];
}
@Override
public byte[] getEndKey() {
return new byte[0];
}
@Override
public TableName getTable() {
String regionName;
try {
regionName = new String(regionNameBytes, UTF_8_CHARSET);
} catch (UnsupportedEncodingException e) {
regionName = "";
}
int regionNo = Integer.parseInt(regionName.split("_")[1]);
TableName tableName = TableName.valueOf("table_" + regionNo % 3);
return tableName;
}
@Override
public int getReplicaId() {
return 0;
}
@Override
public boolean isSplit() {
return false;
}
@Override
public boolean isOffline() {
return false;
}
@Override
public boolean isSplitParent() {
return false;
}
@Override
public boolean isMetaRegion() {
return false;
}
@Override
public boolean containsRange(byte[] rangeStartKey, byte[] rangeEndKey) {
return false;
}
@Override
public boolean containsRow(byte[] row) {
return false;
}
};
return regionInfo;
}
/**
* Simple helper class that just keeps track of whether or not its stopped.
*/
private static class StoppableImplementation implements Stoppable {
private volatile boolean stop = false;
@Override
public void stop(String why) {
this.stop = true;
}
@Override
public boolean isStopped() {
return this.stop;
}
}
}
|
|
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.cognitoidp.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* Represents the request to reset a user's password.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/ForgotPassword" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ForgotPasswordRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The ID of the client associated with the user pool.
* </p>
*/
private String clientId;
/**
* <p>
* A keyed-hash message authentication code (HMAC) calculated using the secret key of a user pool client and
* username plus the client ID in the message.
* </p>
*/
private String secretHash;
/**
* <p>
* Contextual data such as the user's device fingerprint, IP address, or location used for evaluating the risk of an
* unexpected event by Amazon Cognito advanced security.
* </p>
*/
private UserContextDataType userContextData;
/**
* <p>
* The user name of the user for whom you want to enter a code to reset a forgotten password.
* </p>
*/
private String username;
/**
* <p>
* The Amazon Pinpoint analytics metadata for collecting metrics for <code>ForgotPassword</code> calls.
* </p>
*/
private AnalyticsMetadataType analyticsMetadata;
/**
* <p>
* The ID of the client associated with the user pool.
* </p>
*
* @param clientId
* The ID of the client associated with the user pool.
*/
public void setClientId(String clientId) {
this.clientId = clientId;
}
/**
* <p>
* The ID of the client associated with the user pool.
* </p>
*
* @return The ID of the client associated with the user pool.
*/
public String getClientId() {
return this.clientId;
}
/**
* <p>
* The ID of the client associated with the user pool.
* </p>
*
* @param clientId
* The ID of the client associated with the user pool.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ForgotPasswordRequest withClientId(String clientId) {
setClientId(clientId);
return this;
}
/**
* <p>
* A keyed-hash message authentication code (HMAC) calculated using the secret key of a user pool client and
* username plus the client ID in the message.
* </p>
*
* @param secretHash
* A keyed-hash message authentication code (HMAC) calculated using the secret key of a user pool client and
* username plus the client ID in the message.
*/
public void setSecretHash(String secretHash) {
this.secretHash = secretHash;
}
/**
* <p>
* A keyed-hash message authentication code (HMAC) calculated using the secret key of a user pool client and
* username plus the client ID in the message.
* </p>
*
* @return A keyed-hash message authentication code (HMAC) calculated using the secret key of a user pool client and
* username plus the client ID in the message.
*/
public String getSecretHash() {
return this.secretHash;
}
/**
* <p>
* A keyed-hash message authentication code (HMAC) calculated using the secret key of a user pool client and
* username plus the client ID in the message.
* </p>
*
* @param secretHash
* A keyed-hash message authentication code (HMAC) calculated using the secret key of a user pool client and
* username plus the client ID in the message.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ForgotPasswordRequest withSecretHash(String secretHash) {
setSecretHash(secretHash);
return this;
}
/**
* <p>
* Contextual data such as the user's device fingerprint, IP address, or location used for evaluating the risk of an
* unexpected event by Amazon Cognito advanced security.
* </p>
*
* @param userContextData
* Contextual data such as the user's device fingerprint, IP address, or location used for evaluating the
* risk of an unexpected event by Amazon Cognito advanced security.
*/
public void setUserContextData(UserContextDataType userContextData) {
this.userContextData = userContextData;
}
/**
* <p>
* Contextual data such as the user's device fingerprint, IP address, or location used for evaluating the risk of an
* unexpected event by Amazon Cognito advanced security.
* </p>
*
* @return Contextual data such as the user's device fingerprint, IP address, or location used for evaluating the
* risk of an unexpected event by Amazon Cognito advanced security.
*/
public UserContextDataType getUserContextData() {
return this.userContextData;
}
/**
* <p>
* Contextual data such as the user's device fingerprint, IP address, or location used for evaluating the risk of an
* unexpected event by Amazon Cognito advanced security.
* </p>
*
* @param userContextData
* Contextual data such as the user's device fingerprint, IP address, or location used for evaluating the
* risk of an unexpected event by Amazon Cognito advanced security.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ForgotPasswordRequest withUserContextData(UserContextDataType userContextData) {
setUserContextData(userContextData);
return this;
}
/**
* <p>
* The user name of the user for whom you want to enter a code to reset a forgotten password.
* </p>
*
* @param username
* The user name of the user for whom you want to enter a code to reset a forgotten password.
*/
public void setUsername(String username) {
this.username = username;
}
/**
* <p>
* The user name of the user for whom you want to enter a code to reset a forgotten password.
* </p>
*
* @return The user name of the user for whom you want to enter a code to reset a forgotten password.
*/
public String getUsername() {
return this.username;
}
/**
* <p>
* The user name of the user for whom you want to enter a code to reset a forgotten password.
* </p>
*
* @param username
* The user name of the user for whom you want to enter a code to reset a forgotten password.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ForgotPasswordRequest withUsername(String username) {
setUsername(username);
return this;
}
/**
* <p>
* The Amazon Pinpoint analytics metadata for collecting metrics for <code>ForgotPassword</code> calls.
* </p>
*
* @param analyticsMetadata
* The Amazon Pinpoint analytics metadata for collecting metrics for <code>ForgotPassword</code> calls.
*/
public void setAnalyticsMetadata(AnalyticsMetadataType analyticsMetadata) {
this.analyticsMetadata = analyticsMetadata;
}
/**
* <p>
* The Amazon Pinpoint analytics metadata for collecting metrics for <code>ForgotPassword</code> calls.
* </p>
*
* @return The Amazon Pinpoint analytics metadata for collecting metrics for <code>ForgotPassword</code> calls.
*/
public AnalyticsMetadataType getAnalyticsMetadata() {
return this.analyticsMetadata;
}
/**
* <p>
* The Amazon Pinpoint analytics metadata for collecting metrics for <code>ForgotPassword</code> calls.
* </p>
*
* @param analyticsMetadata
* The Amazon Pinpoint analytics metadata for collecting metrics for <code>ForgotPassword</code> calls.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ForgotPasswordRequest withAnalyticsMetadata(AnalyticsMetadataType analyticsMetadata) {
setAnalyticsMetadata(analyticsMetadata);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getClientId() != null)
sb.append("ClientId: ").append("***Sensitive Data Redacted***").append(",");
if (getSecretHash() != null)
sb.append("SecretHash: ").append("***Sensitive Data Redacted***").append(",");
if (getUserContextData() != null)
sb.append("UserContextData: ").append(getUserContextData()).append(",");
if (getUsername() != null)
sb.append("Username: ").append("***Sensitive Data Redacted***").append(",");
if (getAnalyticsMetadata() != null)
sb.append("AnalyticsMetadata: ").append(getAnalyticsMetadata());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ForgotPasswordRequest == false)
return false;
ForgotPasswordRequest other = (ForgotPasswordRequest) obj;
if (other.getClientId() == null ^ this.getClientId() == null)
return false;
if (other.getClientId() != null && other.getClientId().equals(this.getClientId()) == false)
return false;
if (other.getSecretHash() == null ^ this.getSecretHash() == null)
return false;
if (other.getSecretHash() != null && other.getSecretHash().equals(this.getSecretHash()) == false)
return false;
if (other.getUserContextData() == null ^ this.getUserContextData() == null)
return false;
if (other.getUserContextData() != null && other.getUserContextData().equals(this.getUserContextData()) == false)
return false;
if (other.getUsername() == null ^ this.getUsername() == null)
return false;
if (other.getUsername() != null && other.getUsername().equals(this.getUsername()) == false)
return false;
if (other.getAnalyticsMetadata() == null ^ this.getAnalyticsMetadata() == null)
return false;
if (other.getAnalyticsMetadata() != null && other.getAnalyticsMetadata().equals(this.getAnalyticsMetadata()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getClientId() == null) ? 0 : getClientId().hashCode());
hashCode = prime * hashCode + ((getSecretHash() == null) ? 0 : getSecretHash().hashCode());
hashCode = prime * hashCode + ((getUserContextData() == null) ? 0 : getUserContextData().hashCode());
hashCode = prime * hashCode + ((getUsername() == null) ? 0 : getUsername().hashCode());
hashCode = prime * hashCode + ((getAnalyticsMetadata() == null) ? 0 : getAnalyticsMetadata().hashCode());
return hashCode;
}
@Override
public ForgotPasswordRequest clone() {
return (ForgotPasswordRequest) super.clone();
}
}
|
|
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.test;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.io.ByteStreams;
import com.google.common.io.Closeables;
import com.google.devtools.build.lib.actions.ActionExecutionContext;
import com.google.devtools.build.lib.actions.ExecException;
import com.google.devtools.build.lib.actions.Executor;
import com.google.devtools.build.lib.analysis.config.BinTools;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.exec.ExecutionOptions;
import com.google.devtools.build.lib.exec.SymlinkTreeHelper;
import com.google.devtools.build.lib.profiler.Profiler;
import com.google.devtools.build.lib.profiler.ProfilerTask;
import com.google.devtools.build.lib.util.OS;
import com.google.devtools.build.lib.util.ShellEscaper;
import com.google.devtools.build.lib.util.io.FileWatcher;
import com.google.devtools.build.lib.util.io.OutErr;
import com.google.devtools.build.lib.vfs.FileStatus;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.lib.vfs.SearchPath;
import com.google.devtools.build.lib.view.test.TestStatus.TestCase;
import com.google.devtools.common.options.Converters.RangeConverter;
import com.google.devtools.common.options.EnumConverter;
import com.google.devtools.common.options.OptionsClassProvider;
import com.google.devtools.common.options.OptionsParsingException;
import java.io.Closeable;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import javax.annotation.Nullable;
/**
* A strategy for executing a {@link TestRunnerAction}.
*/
public abstract class TestStrategy implements TestActionContext {
public static final String TEST_SETUP_BASENAME = "test-setup.sh";
/**
* Returns true if coverage data should be gathered.
*/
protected static boolean isCoverageMode(TestRunnerAction action) {
return action.getCoverageData() != null;
}
/**
* Converter for the --flaky_test_attempts option.
*/
public static class TestAttemptsConverter extends RangeConverter {
public TestAttemptsConverter() {
super(1, 10);
}
@Override
public Integer convert(String input) throws OptionsParsingException {
if ("default".equals(input)) {
return -1;
} else {
return super.convert(input);
}
}
@Override
public String getTypeDescription() {
return super.getTypeDescription() + " or the string \"default\"";
}
}
public enum TestOutputFormat {
SUMMARY, // Provide summary output only.
ERRORS, // Print output from failed tests to the stderr after the test failure.
ALL, // Print output from all tests to the stderr after the test completion.
STREAMED; // Stream output for each test.
/**
* Converts to {@link TestOutputFormat}.
*/
public static class Converter extends EnumConverter<TestOutputFormat> {
public Converter() {
super(TestOutputFormat.class, "test output");
}
}
}
public enum TestSummaryFormat {
SHORT, // Print information only about tests.
TERSE, // Like "SHORT", but even shorter: Do not print PASSED tests.
DETAILED, // Print information only about failed test cases.
NONE; // Do not print summary.
/**
* Converts to {@link TestSummaryFormat}.
*/
public static class Converter extends EnumConverter<TestSummaryFormat> {
public Converter() {
super(TestSummaryFormat.class, "test summary");
}
}
}
public static final PathFragment TEST_TMP_ROOT = new PathFragment("_tmp");
// Used for selecting subset of testcase / testmethods.
private static final String TEST_BRIDGE_TEST_FILTER_ENV = "TESTBRIDGE_TEST_ONLY";
// Used for generating unique temporary directory names.
private final AtomicInteger tmpIndex = new AtomicInteger(0);
protected final ImmutableMap<String, String> clientEnv;
protected final ExecutionOptions executionOptions;
protected final BinTools binTools;
public TestStrategy(
OptionsClassProvider requestOptionsProvider,
BinTools binTools,
Map<String, String> clientEnv) {
this.executionOptions = requestOptionsProvider.getOptions(ExecutionOptions.class);
this.binTools = binTools;
this.clientEnv = ImmutableMap.copyOf(clientEnv);
}
@Override
public abstract void exec(TestRunnerAction action, ActionExecutionContext actionExecutionContext)
throws ExecException, InterruptedException;
/**
* Returns mutable map of default testing shell environment. By itself it is incomplete and is
* modified further by the specific test strategy implementations (mostly due to the fact that
* environments used locally and remotely are different).
*/
protected Map<String, String> getDefaultTestEnvironment(TestRunnerAction action) {
Map<String, String> env = new HashMap<>();
env.putAll(action.getConfiguration().getLocalShellEnvironment());
env.remove("LANG");
env.put("TZ", "UTC");
env.put("TEST_SIZE", action.getTestProperties().getSize().toString());
env.put("TEST_TIMEOUT", Integer.toString(getTimeout(action)));
if (action.isSharded()) {
env.put("TEST_SHARD_INDEX", Integer.toString(action.getShardNum()));
env.put("TEST_TOTAL_SHARDS",
Integer.toString(action.getExecutionSettings().getTotalShards()));
}
// When we run test multiple times, set different TEST_RANDOM_SEED values for each run.
if (action.getConfiguration().getRunsPerTestForLabel(action.getOwner().getLabel()) > 1) {
env.put("TEST_RANDOM_SEED", Integer.toString(action.getRunNumber() + 1));
}
String testFilter = action.getExecutionSettings().getTestFilter();
if (testFilter != null) {
env.put(TEST_BRIDGE_TEST_FILTER_ENV, testFilter);
}
return env;
}
/**
* Generates a command line to run for the test action, taking into account coverage
* and {@code --run_under} settings.
*
* @param testScript the setup script that invokes the test
* @param coverageScript a script interjected between setup script and rest of command line
* to collect coverage data. If this is an empty string, it is ignored.
* @param testAction The test action.
* @return the command line as string list.
*/
protected List<String> getArgs(
String testScript, String coverageScript, TestRunnerAction testAction) {
List<String> args = Lists.newArrayList();
if (OS.getCurrent() == OS.WINDOWS) {
args.add(testAction.getShExecutable().getPathString());
args.add("-c");
args.add("$0 $*");
}
args.add(testScript);
TestTargetExecutionSettings execSettings = testAction.getExecutionSettings();
List<String> execArgs = new ArrayList<>();
if (!coverageScript.isEmpty() && isCoverageMode(testAction)) {
execArgs.add(coverageScript);
}
// Execute the test using the alias in the runfiles tree, as mandated by
// the Test Encyclopedia.
execArgs.add(execSettings.getExecutable().getRootRelativePath().getPathString());
execArgs.addAll(execSettings.getArgs());
// Insert the command prefix specified by the "--run_under=<command-prefix>" option,
// if any.
if (execSettings.getRunUnder() == null) {
args.addAll(execArgs);
} else if (execSettings.getRunUnderExecutable() != null) {
args.add(execSettings.getRunUnderExecutable().getRootRelativePath().getPathString());
args.addAll(execSettings.getRunUnder().getOptions());
args.addAll(execArgs);
} else {
args.add(testAction.getConfiguration().getShExecutable().getPathString());
args.add("-c");
String runUnderCommand = ShellEscaper.escapeString(execSettings.getRunUnder().getCommand());
Path fullySpecified =
SearchPath.which(
SearchPath.parse(
testAction.getTestLog().getPath().getFileSystem(), clientEnv.get("PATH")),
runUnderCommand);
if (fullySpecified != null) {
runUnderCommand = fullySpecified.toString();
}
args.add(
runUnderCommand
+ ' '
+ ShellEscaper.escapeJoinAll(
Iterables.concat(execSettings.getRunUnder().getOptions(), execArgs)));
}
return args;
}
/**
* Returns the number of attempts specific test action can be retried.
*
* <p>For rules with "flaky = 1" attribute, this method will return 3 unless --flaky_test_attempts
* option is given and specifies another value.
*/
@VisibleForTesting /* protected */
public int getTestAttempts(TestRunnerAction action) {
if (executionOptions.testAttempts == -1) {
return action.getTestProperties().isFlaky() ? 3 : 1;
} else {
return executionOptions.testAttempts;
}
}
/**
* Returns timeout value in seconds that should be used for the given test action. We always use
* the "categorical timeouts" which are based on the --test_timeout flag. A rule picks its timeout
* but ends up with the same effective value as all other rules in that bucket.
*/
protected final int getTimeout(TestRunnerAction testAction) {
return executionOptions.testTimeout.get(testAction.getTestProperties().getTimeout());
}
/**
* Returns a subset of the environment from the current shell.
*
* <p>Warning: Since these variables are not part of the configuration's fingerprint, they
* MUST NOT be used by any rule or action in such a way as to affect the semantics of that
* build step.
*/
public Map<String, String> getAdmissibleShellEnvironment(Iterable<String> variables) {
return getMapping(variables, clientEnv);
}
/*
* Finalize test run: persist the result, and post on the event bus.
*/
protected void postTestResult(Executor executor, TestResult result) throws IOException {
result.getTestAction().saveCacheStatus(result.getData());
executor.getEventBus().post(result);
}
/**
* Returns a unique name for a temporary directory a test could use.
*
* <p>Since each test within single Blaze run must have a unique TEST_TMPDIR,
* we will use rule name and a unique (within single Blaze request) number
* to generate directory name.</p>
*
* <p>This does not create the directory.</p>
*/
protected String getTmpDirName(PathFragment execPath) {
return execPath.getBaseName() + "_" + tmpIndex.incrementAndGet();
}
/**
* Parse a test result XML file into a {@link TestCase}.
*/
@Nullable
protected TestCase parseTestResult(Path resultFile) {
/* xml files. We avoid parsing it unnecessarily, since test results can potentially consume
a large amount of memory. */
if (executionOptions.testSummary != TestSummaryFormat.DETAILED) {
return null;
}
try (InputStream fileStream = resultFile.getInputStream()) {
return new TestXmlOutputParser().parseXmlIntoTestResult(fileStream);
} catch (IOException | TestXmlOutputParserException e) {
return null;
}
}
/**
* Returns a temporary directory for all tests in a workspace to use. Individual tests should
* create child directories to actually use.
*
* <p>This either dynamically generates a directory name or uses the directory specified by
* --test_tmpdir. This does not create the directory.</p>
*/
public static Path getTmpRoot(Path workspace, Path execRoot, ExecutionOptions executionOptions) {
return executionOptions.testTmpDir != null
? workspace.getRelative(executionOptions.testTmpDir).getRelative(TEST_TMP_ROOT)
: execRoot.getRelative(TEST_TMP_ROOT);
}
/**
* For an given environment, returns a subset containing all variables in the given list if they
* are defined in the given environment.
*/
@VisibleForTesting
public static Map<String, String> getMapping(Iterable<String> variables,
Map<String, String> environment) {
Map<String, String> result = new HashMap<>();
for (String var : variables) {
if (environment.containsKey(var)) {
result.put(var, environment.get(var));
}
}
return result;
}
/**
* Returns the runfiles directory associated with the test executable,
* creating/updating it if necessary and --build_runfile_links is specified.
*/
protected static Path getLocalRunfilesDirectory(
TestRunnerAction testAction,
ActionExecutionContext actionExecutionContext,
BinTools binTools,
ImmutableMap<String, String> shellEnvironment,
boolean enableRunfiles)
throws ExecException, InterruptedException {
TestTargetExecutionSettings execSettings = testAction.getExecutionSettings();
// If the symlink farm is already created then return the existing directory. If not we
// need to explicitly build it. This can happen when --nobuild_runfile_links is supplied
// as a flag to the build.
if (execSettings.getRunfilesSymlinksCreated()) {
return execSettings.getRunfilesDir();
}
// TODO(bazel-team): Should we be using TestTargetExecutionSettings#getRunfilesDir() here over
// generating the directory ourselves?
Path program = execSettings.getExecutable().getPath();
Path runfilesDir = program.getParentDirectory().getChild(program.getBaseName() + ".runfiles");
// Synchronize runfiles tree generation on the runfiles manifest artifact.
// This is necessary, because we might end up with multiple test runner actions
// trying to generate same runfiles tree in case of --runs_per_test > 1 or
// local test sharding.
long startTime = Profiler.nanoTimeMaybe();
synchronized (execSettings.getInputManifest()) {
Profiler.instance().logSimpleTask(startTime, ProfilerTask.WAIT, testAction);
updateLocalRunfilesDirectory(
testAction,
runfilesDir,
actionExecutionContext,
binTools,
shellEnvironment,
enableRunfiles);
}
return runfilesDir;
}
/**
* Ensure the runfiles tree exists and is consistent with the TestAction's manifest
* ($0.runfiles_manifest), bringing it into consistency if not. The contents of the output file
* $0.runfiles/MANIFEST, if it exists, are used a proxy for the set of existing symlinks, to avoid
* the need for recursion.
*/
private static void updateLocalRunfilesDirectory(
TestRunnerAction testAction,
Path runfilesDir,
ActionExecutionContext actionExecutionContext,
BinTools binTools,
ImmutableMap<String, String> shellEnvironment,
boolean enableRunfiles)
throws ExecException, InterruptedException {
Executor executor = actionExecutionContext.getExecutor();
TestTargetExecutionSettings execSettings = testAction.getExecutionSettings();
try {
// Avoid rebuilding the runfiles directory if the manifest in it matches the input manifest,
// implying the symlinks exist and are already up to date.
if (Arrays.equals(runfilesDir.getRelative("MANIFEST").getMD5Digest(),
execSettings.getInputManifest().getPath().getMD5Digest())) {
return;
}
} catch (IOException e1) {
// Ignore it - we will just try to create runfiles directory.
}
executor.getEventHandler().handle(Event.progress(
"Building runfiles directory for '" + execSettings.getExecutable().prettyPrint() + "'."));
new SymlinkTreeHelper(
execSettings.getInputManifest().getPath(),
runfilesDir,
false)
.createSymlinks(
testAction,
actionExecutionContext,
binTools,
shellEnvironment,
enableRunfiles);
executor.getEventHandler().handle(Event.progress(testAction.getProgressMessage()));
}
/**
* In rare cases, we might write something to stderr. Append it to the real test.log.
*/
protected static void appendStderr(Path stdOut, Path stdErr) throws IOException {
FileStatus stat = stdErr.statNullable();
OutputStream out = null;
InputStream in = null;
if (stat != null) {
try {
if (stat.getSize() > 0) {
if (stdOut.exists()) {
stdOut.setWritable(true);
}
out = stdOut.getOutputStream(true);
in = stdErr.getInputStream();
ByteStreams.copy(in, out);
}
} finally {
Closeables.close(out, true);
Closeables.close(in, true);
stdErr.delete();
}
}
}
/**
* Implements the --test_output=streamed option.
*/
protected static class StreamedTestOutput implements Closeable {
private final TestLogHelper.FilterTestHeaderOutputStream headerFilter;
private final FileWatcher watcher;
private final Path testLogPath;
private final OutErr outErr;
public StreamedTestOutput(OutErr outErr, Path testLogPath) throws IOException {
this.testLogPath = testLogPath;
this.outErr = outErr;
this.headerFilter = TestLogHelper.getHeaderFilteringOutputStream(outErr.getOutputStream());
this.watcher = new FileWatcher(testLogPath, OutErr.create(headerFilter, headerFilter), false);
watcher.start();
}
@Override
public void close() throws IOException {
watcher.stopPumping();
try {
// The watcher thread might leak if the following call is interrupted.
// This is a relatively minor issue since the worst it could do is
// write one additional line from the test.log to the console later on
// in the build.
watcher.join();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
if (!headerFilter.foundHeader()) {
try (InputStream input = testLogPath.getInputStream()) {
ByteStreams.copy(input, outErr.getOutputStream());
}
}
}
}
}
|
|
/**
*/
package Variation_Diff.impl;
import Variation_Diff.AssociationType;
import Variation_Diff.Variation_DiffPackage;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Association Type</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link Variation_Diff.impl.AssociationTypeImpl#getLowerBound <em>Lower Bound</em>}</li>
* <li>{@link Variation_Diff.impl.AssociationTypeImpl#getUpperBound <em>Upper Bound</em>}</li>
* </ul>
*
* @generated
*/
public abstract class AssociationTypeImpl extends FieldTypeImpl implements AssociationType
{
/**
* The default value of the '{@link #getLowerBound() <em>Lower Bound</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getLowerBound()
* @generated
* @ordered
*/
protected static final int LOWER_BOUND_EDEFAULT = 0;
/**
* The cached value of the '{@link #getLowerBound() <em>Lower Bound</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getLowerBound()
* @generated
* @ordered
*/
protected int lowerBound = LOWER_BOUND_EDEFAULT;
/**
* The default value of the '{@link #getUpperBound() <em>Upper Bound</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getUpperBound()
* @generated
* @ordered
*/
protected static final int UPPER_BOUND_EDEFAULT = 0;
/**
* The cached value of the '{@link #getUpperBound() <em>Upper Bound</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getUpperBound()
* @generated
* @ordered
*/
protected int upperBound = UPPER_BOUND_EDEFAULT;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected AssociationTypeImpl()
{
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass()
{
return Variation_DiffPackage.Literals.ASSOCIATION_TYPE;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public int getLowerBound()
{
return lowerBound;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setLowerBound(int newLowerBound)
{
int oldLowerBound = lowerBound;
lowerBound = newLowerBound;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, Variation_DiffPackage.ASSOCIATION_TYPE__LOWER_BOUND, oldLowerBound, lowerBound));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public int getUpperBound()
{
return upperBound;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setUpperBound(int newUpperBound)
{
int oldUpperBound = upperBound;
upperBound = newUpperBound;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, Variation_DiffPackage.ASSOCIATION_TYPE__UPPER_BOUND, oldUpperBound, upperBound));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType)
{
switch (featureID)
{
case Variation_DiffPackage.ASSOCIATION_TYPE__LOWER_BOUND:
return getLowerBound();
case Variation_DiffPackage.ASSOCIATION_TYPE__UPPER_BOUND:
return getUpperBound();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue)
{
switch (featureID)
{
case Variation_DiffPackage.ASSOCIATION_TYPE__LOWER_BOUND:
setLowerBound((Integer)newValue);
return;
case Variation_DiffPackage.ASSOCIATION_TYPE__UPPER_BOUND:
setUpperBound((Integer)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID)
{
switch (featureID)
{
case Variation_DiffPackage.ASSOCIATION_TYPE__LOWER_BOUND:
setLowerBound(LOWER_BOUND_EDEFAULT);
return;
case Variation_DiffPackage.ASSOCIATION_TYPE__UPPER_BOUND:
setUpperBound(UPPER_BOUND_EDEFAULT);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID)
{
switch (featureID)
{
case Variation_DiffPackage.ASSOCIATION_TYPE__LOWER_BOUND:
return lowerBound != LOWER_BOUND_EDEFAULT;
case Variation_DiffPackage.ASSOCIATION_TYPE__UPPER_BOUND:
return upperBound != UPPER_BOUND_EDEFAULT;
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString()
{
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (lowerBound: ");
result.append(lowerBound);
result.append(", upperBound: ");
result.append(upperBound);
result.append(')');
return result.toString();
}
} //AssociationTypeImpl
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.AbstractQueryTestCase;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.notNullValue;
public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQueryBuilder> {
protected static final String PARENT_TYPE = "parent";
protected static final String CHILD_TYPE = "child";
boolean requiresRewrite = false;
@Override
protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {
mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
STRING_FIELD_NAME, "type=text",
STRING_FIELD_NAME_2, "type=keyword",
INT_FIELD_NAME, "type=integer",
DOUBLE_FIELD_NAME, "type=double",
BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date",
OBJECT_FIELD_NAME, "type=object"
).string()), MapperService.MergeReason.MAPPING_UPDATE, false);
mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE,
"_parent", "type=" + PARENT_TYPE,
STRING_FIELD_NAME, "type=text",
STRING_FIELD_NAME_2, "type=keyword",
INT_FIELD_NAME, "type=integer",
DOUBLE_FIELD_NAME, "type=double",
BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date",
OBJECT_FIELD_NAME, "type=object"
).string()), MapperService.MergeReason.MAPPING_UPDATE, false);
mapperService.merge("just_a_type", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("just_a_type"
).string()), MapperService.MergeReason.MAPPING_UPDATE, false);
}
/**
* @return a {@link HasChildQueryBuilder} with random values all over the place
*/
@Override
protected HasParentQueryBuilder doCreateTestQueryBuilder() {
QueryBuilder innerQueryBuilder = RandomQueryBuilder.createQuery(random());
if (randomBoolean()) {
requiresRewrite = true;
innerQueryBuilder = new WrapperQueryBuilder(innerQueryBuilder.toString());
}
HasParentQueryBuilder hqb = new HasParentQueryBuilder(PARENT_TYPE, innerQueryBuilder, randomBoolean());
hqb.ignoreUnmapped(randomBoolean());
if (randomBoolean()) {
hqb.innerHit(new InnerHitBuilder()
.setName(randomAsciiOfLengthBetween(1, 10))
.setSize(randomIntBetween(0, 100))
.addSort(new FieldSortBuilder(STRING_FIELD_NAME_2).order(SortOrder.ASC)), hqb.ignoreUnmapped());
}
return hqb;
}
@Override
protected void doAssertLuceneQuery(HasParentQueryBuilder queryBuilder, Query query, SearchContext searchContext) throws IOException {
assertThat(query, instanceOf(HasChildQueryBuilder.LateParsingQuery.class));
HasChildQueryBuilder.LateParsingQuery lpq = (HasChildQueryBuilder.LateParsingQuery) query;
assertEquals(queryBuilder.score() ? ScoreMode.Max : ScoreMode.None, lpq.getScoreMode());
if (queryBuilder.innerHit() != null) {
// have to rewrite again because the provided queryBuilder hasn't been rewritten (directly returned from
// doCreateTestQueryBuilder)
queryBuilder = (HasParentQueryBuilder) queryBuilder.rewrite(searchContext.getQueryShardContext());
assertNotNull(searchContext);
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
InnerHitBuilder.extractInnerHits(queryBuilder, innerHitBuilders);
for (InnerHitBuilder builder : innerHitBuilders.values()) {
builder.build(searchContext, searchContext.innerHits());
}
assertNotNull(searchContext.innerHits());
assertEquals(1, searchContext.innerHits().getInnerHits().size());
assertTrue(searchContext.innerHits().getInnerHits().containsKey(queryBuilder.innerHit().getName()));
InnerHitsContext.BaseInnerHits innerHits = searchContext.innerHits()
.getInnerHits().get(queryBuilder.innerHit().getName());
assertEquals(innerHits.size(), queryBuilder.innerHit().getSize());
assertEquals(innerHits.sort().sort.getSort().length, 1);
assertEquals(innerHits.sort().sort.getSort()[0].getField(), STRING_FIELD_NAME_2);
}
}
public void testIllegalValues() throws IOException {
QueryBuilder query = RandomQueryBuilder.createQuery(random());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> QueryBuilders.hasParentQuery(null, query, false));
assertThat(e.getMessage(), equalTo("[has_parent] requires 'type' field"));
e = expectThrows(IllegalArgumentException.class,
() -> QueryBuilders.hasParentQuery("foo", null, false));
assertThat(e.getMessage(), equalTo("[has_parent] requires 'query' field"));
QueryShardContext context = createShardContext();
HasParentQueryBuilder qb = QueryBuilders.hasParentQuery("just_a_type", new MatchAllQueryBuilder(), false);
QueryShardException qse = expectThrows(QueryShardException.class, () -> qb.doToQuery(context));
assertThat(qse.getMessage(), equalTo("[has_parent] no child types found for type [just_a_type]"));
}
public void testDeprecatedXContent() throws IOException {
XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint();
builder.startObject();
builder.startObject("has_parent");
builder.field("query");
new TermQueryBuilder("a", "a").toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.field("type", "foo"); // deprecated
builder.endObject();
builder.endObject();
HasParentQueryBuilder queryBuilder = (HasParentQueryBuilder) parseQuery(builder.string());
assertEquals("foo", queryBuilder.type());
assertWarnings("Deprecated field [type] used, expected [parent_type] instead");
}
public void testToQueryInnerQueryType() throws IOException {
String[] searchTypes = new String[]{CHILD_TYPE};
QueryShardContext shardContext = createShardContext();
shardContext.setTypes(searchTypes);
HasParentQueryBuilder hasParentQueryBuilder = new HasParentQueryBuilder(PARENT_TYPE, new IdsQueryBuilder().addIds("id"),
false);
Query query = hasParentQueryBuilder.toQuery(shardContext);
//verify that the context types are still the same as the ones we previously set
assertThat(shardContext.getTypes(), equalTo(searchTypes));
HasChildQueryBuilderTests.assertLateParsingQuery(query, PARENT_TYPE, "id");
}
@Override
public void testMustRewrite() throws IOException {
try {
super.testMustRewrite();
} catch (UnsupportedOperationException e) {
if (requiresRewrite == false) {
throw e;
}
}
}
public void testFromJson() throws IOException {
String json =
"{\n" +
" \"has_parent\" : {\n" +
" \"query\" : {\n" +
" \"term\" : {\n" +
" \"tag\" : {\n" +
" \"value\" : \"something\",\n" +
" \"boost\" : 1.0\n" +
" }\n" +
" }\n" +
" },\n" +
" \"parent_type\" : \"blog\",\n" +
" \"score\" : true,\n" +
" \"ignore_unmapped\" : false,\n" +
" \"boost\" : 1.0\n" +
" }\n" +
"}";
HasParentQueryBuilder parsed = (HasParentQueryBuilder) parseQuery(json);
checkGeneratedJson(json, parsed);
assertEquals(json, "blog", parsed.type());
assertEquals(json, "something", ((TermQueryBuilder) parsed.query()).value());
}
public void testIgnoreUnmapped() throws IOException {
final HasParentQueryBuilder queryBuilder = new HasParentQueryBuilder("unmapped", new MatchAllQueryBuilder(), false);
queryBuilder.ignoreUnmapped(true);
Query query = queryBuilder.toQuery(createShardContext());
assertThat(query, notNullValue());
assertThat(query, instanceOf(MatchNoDocsQuery.class));
final HasParentQueryBuilder failingQueryBuilder = new HasParentQueryBuilder("unmapped", new MatchAllQueryBuilder(), false);
failingQueryBuilder.ignoreUnmapped(false);
QueryShardException e = expectThrows(QueryShardException.class, () -> failingQueryBuilder.toQuery(createShardContext()));
assertThat(e.getMessage(),
containsString("[" + HasParentQueryBuilder.NAME + "] query configured 'parent_type' [unmapped] is not a valid type"));
}
public void testIgnoreUnmappedWithRewrite() throws IOException {
// WrapperQueryBuilder makes sure we always rewrite
final HasParentQueryBuilder queryBuilder =
new HasParentQueryBuilder("unmapped", new WrapperQueryBuilder(new MatchAllQueryBuilder().toString()), false);
queryBuilder.ignoreUnmapped(true);
QueryShardContext queryShardContext = createShardContext();
Query query = queryBuilder.rewrite(queryShardContext).toQuery(queryShardContext);
assertThat(query, notNullValue());
assertThat(query, instanceOf(MatchNoDocsQuery.class));
}
}
|
|
package com.picmonic.selenium.tests;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.concurrent.TimeUnit;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.openqa.selenium.Alert;
import org.openqa.selenium.By;
import org.openqa.selenium.NoAlertPresentException;
import org.openqa.selenium.NoSuchElementException;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.firefox.FirefoxDriver;
import org.openqa.selenium.support.ui.Select;
import com.picmonic.selenium.utils.ApplicationConstants;
public class Library {
private static WebDriver driver;
private static String baseUrl;
private boolean acceptNextAlert = true;
private static StringBuffer verificationErrors = new StringBuffer();
@BeforeClass
public static void classSetup() {
driver = new FirefoxDriver();
baseUrl = ApplicationConstants.BASEURL;
driver.manage().timeouts().implicitlyWait(30, TimeUnit.SECONDS);
// Login code
driver.get(baseUrl + "/login");
driver.findElement(By.name("credentials[email]")).clear();
driver.findElement(By.name("credentials[email]")).sendKeys(ApplicationConstants.EMAIL);
driver.findElement(By.name("credentials[password]")).clear();
driver.findElement(By.name("credentials[password]")).sendKeys(ApplicationConstants.PASSWORD);
driver.findElement(By.xpath("//button[@type='submit']")).click();
driver.findElement(By.xpath("//button//i[@class='icon-bars']")).click();
}
@Before
public void setUp() throws Exception {
}
@Test
public void testAscendingOrderOfLibrary() throws Exception {
driver.findElement(By.xpath("//button//i[@class='icon-bars']")).click();
driver.findElement(By.xpath("//a[@href='/library']/div[@class='nav-item']")).click();
new Select(driver.findElement(By.id("sort-library"))).selectByVisibleText("Title A-Z");
driver.findElement(By.xpath("//button//i[@class='icon-bars']")).click();
}
@Test
public void testBestMatchInSorting() throws Exception {
driver.findElement(By.xpath("//button//i[@class='icon-bars']")).click();
driver.findElement(By.xpath("//a[@href='/library']/div[@class='nav-item']")).click();
new Select(driver.findElement(By.id("sort-library"))).selectByVisibleText("Best Match");
try {
assertEquals("Picmonic Learning System :: Picmonic", driver.getTitle());
} catch (Error e) {
verificationErrors.append(e.toString());
}
assertTrue(isElementPresent(By.id("sort-library")));
try {
assertEquals("Sort:", driver.findElement(By.id("sort-label")).getText());
} catch (Error e) {
verificationErrors.append(e.toString());
}
driver.findElement(By.xpath("//button//i[@class='icon-bars']")).click();
}
@Test
public void testDescendingOrderOfLibrary() throws Exception {
driver.findElement(By.xpath("//button//i[@class='icon-bars']")).click();
driver.findElement(By.xpath("//a[@href='/library']/div[@class='nav-item']")).click();
new Select(driver.findElement(By.id("sort-library"))).selectByVisibleText("Title Z-A");
try {
assertEquals("Picmonic Learning System :: Picmonic", driver.getTitle());
} catch (Error e) {
verificationErrors.append(e.toString());
}
assertTrue(isElementPresent(By.id("sort-library")));
try {
assertEquals("Sort:", driver.findElement(By.id("sort-label")).getText());
} catch (Error e) {
verificationErrors.append(e.toString());
}
driver.findElement(By.xpath("//button//i[@class='icon-bars']")).click();
}
@Test
public void testMostRecentInSorting() throws Exception {
driver.findElement(By.xpath("//button//i[@class='icon-bars']")).click();
driver.findElement(By.xpath("//a[@href='/library']/div[@class='nav-item']")).click();
new Select(driver.findElement(By.id("sort-library"))).selectByVisibleText("Most Recent Viewed");
try {
assertEquals("Picmonic Learning System :: Picmonic", driver.getTitle());
} catch (Error e) {
verificationErrors.append(e.toString());
}
assertTrue(isElementPresent(By.id("sort-library")));
try {
assertEquals("Sort:", driver.findElement(By.id("sort-label")).getText());
} catch (Error e) {
verificationErrors.append(e.toString());
}
driver.findElement(By.xpath("//button//i[@class='icon-bars']")).click();
}
@Test
public void testNewestInSorting() throws Exception {
driver.findElement(By.xpath("//button//i[@class='icon-bars']")).click();
driver.findElement(By.xpath("//a[@href='/library']/div[@class='nav-item']")).click();
new Select(driver.findElement(By.id("sort-library"))).selectByVisibleText("Newest");
try {
assertEquals("Picmonic Learning System :: Picmonic", driver.getTitle());
} catch (Error e) {
verificationErrors.append(e.toString());
}
assertTrue(isElementPresent(By.id("sort-library")));
try {
assertEquals("Sort:", driver.findElement(By.id("sort-label")).getText());
} catch (Error e) {
verificationErrors.append(e.toString());
}
driver.findElement(By.xpath("//button//i[@class='icon-bars']")).click();
}
@Test
public void testRatingOrder15InSorting() throws Exception {
driver.findElement(By.xpath("//button//i[@class='icon-bars']")).click();
driver.findElement(By.xpath("//a[@href='/library']/div[@class='nav-item']")).click();
new Select(driver.findElement(By.id("sort-library"))).selectByVisibleText("Rating 1-5");
try {
assertEquals("Picmonic Learning System :: Picmonic", driver.getTitle());
} catch (Error e) {
verificationErrors.append(e.toString());
}
assertTrue(isElementPresent(By.id("sort-library")));
try {
assertEquals("Sort:", driver.findElement(By.id("sort-label")).getText());
} catch (Error e) {
verificationErrors.append(e.toString());
}
driver.findElement(By.xpath("//button//i[@class='icon-bars']")).click();
}
@Test
public void testRatingOrder51InSorting() throws Exception {
driver.findElement(By.xpath("//button//i[@class='icon-bars']")).click();
driver.findElement(By.xpath("//a[@href='/library']/div[@class='nav-item']")).click();
new Select(driver.findElement(By.id("sort-library"))).selectByVisibleText("Rating 5-1");
try {
assertEquals("Picmonic Learning System :: Picmonic", driver.getTitle());
} catch (Error e) {
verificationErrors.append(e.toString());
}
assertTrue(isElementPresent(By.id("sort-library")));
try {
assertEquals("Sort:", driver.findElement(By.id("sort-label")).getText());
} catch (Error e) {
verificationErrors.append(e.toString());
}
driver.findElement(By.xpath("//button//i[@class='icon-bars']")).click();
}
@Test
public void testStackInSorting() throws Exception {
driver.findElement(By.xpath("//button//i[@class='icon-bars']")).click();
driver.findElement(By.xpath("//a[@href='/library']/div[@class='nav-item']")).click();
new Select(driver.findElement(By.id("sort-library"))).selectByVisibleText("Stack");
try {
assertEquals("Picmonic Learning System :: Picmonic", driver.getTitle());
} catch (Error e) {
verificationErrors.append(e.toString());
}
assertTrue(isElementPresent(By.id("sort-library")));
try {
assertEquals("Sort:", driver.findElement(By.id("sort-label")).getText());
} catch (Error e) {
verificationErrors.append(e.toString());
}
driver.findElement(By.xpath("//button//i[@class='icon-bars']")).click();
}
@After
public void tearDown() throws Exception {
}
@AfterClass
public static void classTeardown() {
driver.findElement(By.xpath("//div[@class='nav-item'][contains(text(),'Sign Out')]")).click();
driver.quit();
String verificationErrorString = verificationErrors.toString();
if (!"".equals(verificationErrorString)) {
fail(verificationErrorString);
}
}
private boolean isElementPresent(By by) {
try {
driver.findElement(by);
return true;
} catch (NoSuchElementException e) {
return false;
}
}
private boolean isAlertPresent() {
try {
driver.switchTo().alert();
return true;
} catch (NoAlertPresentException e) {
return false;
}
}
private String closeAlertAndGetItsText() {
try {
Alert alert = driver.switchTo().alert();
String alertText = alert.getText();
if (acceptNextAlert) {
alert.accept();
} else {
alert.dismiss();
}
return alertText;
} finally {
acceptNextAlert = true;
}
}
}
|
|
package de.eorg.cumulusgenius.server.db.dao;
import java.util.Random;
import com.google.appengine.api.datastore.DatastoreService;
import com.google.appengine.api.datastore.DatastoreServiceFactory;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.api.datastore.EntityNotFoundException;
import com.google.appengine.api.datastore.Key;
import com.google.appengine.api.datastore.KeyFactory;
import com.google.appengine.api.datastore.Query;
import com.google.appengine.api.datastore.Transaction;
import com.google.appengine.api.memcache.MemcacheService;
import com.google.appengine.api.memcache.MemcacheService.SetPolicy;
import com.google.appengine.api.memcache.MemcacheServiceFactory;
public class ShardedCounter {
private static final class Counter {
/**
* Entity kind representing a named sharded counter.
*/
private static final String KIND = "Counter";
/**
* Property to store the number of shards in a given {@value #KIND}
* named sharded counter.
*/
private static final String SHARD_COUNT = "shard_count";
}
/**
* Convenience class which contains constants related to the counter shards.
* The shard number (as a String) is used as the entity key.
*/
private static final class CounterShard {
/**
* Entity kind prefix, which is concatenated with the counter name to
* form the final entity kind, which represents counter shards.
*/
private static final String KIND_PREFIX = "CounterShard_";
/**
* Property to store the current count within a counter shard.
*/
private static final String COUNT = "count";
}
private static final DatastoreService ds = DatastoreServiceFactory.getDatastoreService();
/**
* * Default number of shards.
*/
private static final int INITIAL_SHARDS = 5;
/**
* The name of this counter.
*/
private final String counterName;
/**
* A random number generating, for distributing writes across shards.
*/
private final Random generator = new Random();
/**
* The counter shard kind for this counter.
*/
private String kind;
private final MemcacheService mc = MemcacheServiceFactory.getMemcacheService();
/**
* Constructor which creates a sharded counter using the provided counter
* name.
*
* @param counterName
* name of the sharded counter
*/
public ShardedCounter (String counterName) {
this.counterName = counterName;
kind = CounterShard.KIND_PREFIX + counterName;
}
/**
* Increase the number of shards for a given sharded counter. Will never
* decrease the number of shards.
*
* @param count
* Number of new shards to build and store
*/
public void addShards(int count) {
Key counterKey = KeyFactory.createKey(Counter.KIND, counterName);
incrementPropertyTx(counterKey, Counter.SHARD_COUNT, count,
INITIAL_SHARDS + count);
}
/**
* Retrieve the value of this sharded counter.
*
* @return Summed total of all shards' counts
*/
public long getCount() {
Long value = (Long) mc.get(kind);
if (value != null) {
return value;
}
long sum = 0;
Query query = new Query(kind);
for (Entity shard : ds.prepare(query).asIterable()) {
sum += (Long) shard.getProperty(CounterShard.COUNT);
}
mc.put(kind, sum, null, SetPolicy.ADD_ONLY_IF_NOT_PRESENT);
return sum;
}
/**
* Reset the value of this sharded counter.
*
*/
public void initiate() {
int shards = getShardCount();
for (int i = 0; i < shards; i++) {
Key shardKey = KeyFactory.createKey(kind, Integer.toString(i));
initiatePropertyTx(shardKey, CounterShard.COUNT);
}
mc.put(kind, 0L, null, SetPolicy.SET_ALWAYS);
}
/**
* Initiate the value of this sharded counter.
*
*/
public void reset(long count) {
int shards = getShardCount();
boolean firstShard = true;
for (int i = 0; i < shards; i++) {
Key shardKey = KeyFactory.createKey(kind, Integer.toString(i));
long shardCount = new Double(Math.floor(count / shards)).longValue() + (firstShard ? count % shards : 0L);
resetPropertyTx(shardKey, CounterShard.COUNT, shardCount);
firstShard = false;
}
mc.put(kind, count, null, SetPolicy.SET_ALWAYS);
}
/**
* Increment the value of this sharded counter.
*/
public void increment() {
// Find how many shards are in this counter.
int numShards = getShardCount();
// Choose the shard randomly from the available shards.
long shardNum = generator.nextInt(numShards);
Key shardKey = KeyFactory.createKey(kind, Long.toString(shardNum));
incrementPropertyTx(shardKey, CounterShard.COUNT, 1, 1);
mc.increment(kind, 1);
}
/**
* Get the number of shards in this counter.
*
* @return shard count
*/
private int getShardCount() {
try {
Key counterKey = KeyFactory.createKey(Counter.KIND, counterName);
Entity counter = ds.get(counterKey);
Long shardCount = (Long) counter.getProperty(Counter.SHARD_COUNT);
return shardCount.intValue();
} catch (EntityNotFoundException ignore) {
return INITIAL_SHARDS;
}
}
/**
* Increment datastore property value inside a transaction. If the entity
* with the provided key does not exist, instead create an entity with the
* supplied initial property value.
*
* @param key
* the entity key to update or create
* @param prop
* the property name to be incremented
* @param increment
* the amount by which to increment
* @param initialValue
* the value to use if the entity does not exist
*/
private void incrementPropertyTx(Key key, String prop, long increment,long initialValue) {
Transaction tx = ds.beginTransaction();
Entity thing;
long value;
try {
thing = ds.get(tx, key);
value = (Long) thing.getProperty(prop) + increment;
} catch (EntityNotFoundException e) {
thing = new Entity(key);
value = initialValue;
}
thing.setUnindexedProperty(prop, value);
ds.put(tx, thing);
tx.commit();
}
/**
* Increment datastore property value inside a transaction. If the entity
* with the provided key does not exist, instead create an entity with the
* supplied initial property value.
*
* @param key
* the entity key to update or create
* @param prop
* the property name to be incremented
*
*/
private void initiatePropertyTx(Key key, String prop) {
Transaction tx = ds.beginTransaction();
Entity thing;
try {
thing = ds.get(tx, key);
} catch (EntityNotFoundException e) {
thing = new Entity(key);
}
thing.setUnindexedProperty(prop, 0L);
ds.put(tx, thing);
tx.commit();
}
/**
* Increment datastore property value inside a transaction. If the entity
* with the provided key does not exist, instead create an entity with the
* supplied initial property value.
*
* @param key
* the entity key to update or create
* @param prop
* the property name to be incremented
* @param initialValue
* the value to use if the entity does not exist
*/
private void resetPropertyTx(Key key, String prop, long initialValue) {
Transaction tx = ds.beginTransaction();
Entity thing;
try {
thing = ds.get(tx, key);
} catch (EntityNotFoundException e) {
thing = new Entity(key);
}
thing.setUnindexedProperty(prop, initialValue);
ds.put(tx, thing);
tx.commit();
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager.webapp;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.util.NodeHealthScriptRunner;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.Token;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.AsyncDispatcher;
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService;
import org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService;
import org.apache.hadoop.yarn.server.nodemanager.NodeManager;
import org.apache.hadoop.yarn.server.nodemanager.ResourceView;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerImpl;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState;
import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics;
import org.apache.hadoop.yarn.server.nodemanager.recovery.NMNullStateStoreService;
import org.apache.hadoop.yarn.server.nodemanager.recovery.NMStateStoreService;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class TestNMWebServer {
private static final File testRootDir = new File("target",
TestNMWebServer.class.getSimpleName());
private static File testLogDir = new File("target",
TestNMWebServer.class.getSimpleName() + "LogDir");
@Before
public void setup() {
testRootDir.mkdirs();
testLogDir.mkdir();
}
@After
public void tearDown() {
FileUtil.fullyDelete(testRootDir);
FileUtil.fullyDelete(testLogDir);
}
private NodeHealthCheckerService createNodeHealthCheckerService(Configuration conf) {
NodeHealthScriptRunner scriptRunner = NodeManager.getNodeHealthScriptRunner(conf);
LocalDirsHandlerService dirsHandler = new LocalDirsHandlerService();
return new NodeHealthCheckerService(scriptRunner, dirsHandler);
}
private int startNMWebAppServer(String webAddr) {
Context nmContext = new NodeManager.NMContext(null, null, null, null,
null);
ResourceView resourceView = new ResourceView() {
@Override
public long getVmemAllocatedForContainers() {
return 0;
}
@Override
public long getPmemAllocatedForContainers() {
return 0;
}
@Override
public long getVCoresAllocatedForContainers() {
return 0;
}
@Override
public boolean isVmemCheckEnabled() {
return true;
}
@Override
public boolean isPmemCheckEnabled() {
return true;
}
};
Configuration conf = new Configuration();
conf.set(YarnConfiguration.NM_LOCAL_DIRS, testRootDir.getAbsolutePath());
conf.set(YarnConfiguration.NM_LOG_DIRS, testLogDir.getAbsolutePath());
NodeHealthCheckerService healthChecker = createNodeHealthCheckerService(conf);
healthChecker.init(conf);
LocalDirsHandlerService dirsHandler = healthChecker.getDiskHandler();
conf.set(YarnConfiguration.NM_WEBAPP_ADDRESS, webAddr);
WebServer server = new WebServer(nmContext, resourceView,
new ApplicationACLsManager(conf), dirsHandler);
try {
server.init(conf);
server.start();
return server.getPort();
} finally {
server.stop();
healthChecker.stop();
}
}
@Test
public void testNMWebAppWithOutPort() throws IOException {
int port = startNMWebAppServer("0.0.0.0");
validatePortVal(port);
}
private void validatePortVal(int portVal) {
Assert.assertTrue("Port is not updated", portVal > 0);
Assert.assertTrue("Port is default "+ YarnConfiguration.DEFAULT_NM_PORT,
portVal !=YarnConfiguration.DEFAULT_NM_PORT);
}
@Test
public void testNMWebAppWithEphemeralPort() throws IOException {
int port = startNMWebAppServer("0.0.0.0:0");
validatePortVal(port);
}
@Test
public void testNMWebApp() throws IOException, YarnException {
Context nmContext = new NodeManager.NMContext(null, null, null, null,
null);
ResourceView resourceView = new ResourceView() {
@Override
public long getVmemAllocatedForContainers() {
return 0;
}
@Override
public long getPmemAllocatedForContainers() {
return 0;
}
@Override
public long getVCoresAllocatedForContainers() {
return 0;
}
@Override
public boolean isVmemCheckEnabled() {
return true;
}
@Override
public boolean isPmemCheckEnabled() {
return true;
}
};
Configuration conf = new Configuration();
conf.set(YarnConfiguration.NM_LOCAL_DIRS, testRootDir.getAbsolutePath());
conf.set(YarnConfiguration.NM_LOG_DIRS, testLogDir.getAbsolutePath());
NodeHealthCheckerService healthChecker = createNodeHealthCheckerService(conf);
healthChecker.init(conf);
LocalDirsHandlerService dirsHandler = healthChecker.getDiskHandler();
WebServer server = new WebServer(nmContext, resourceView,
new ApplicationACLsManager(conf), dirsHandler);
server.init(conf);
server.start();
// Add an application and the corresponding containers
RecordFactory recordFactory =
RecordFactoryProvider.getRecordFactory(conf);
Dispatcher dispatcher = new AsyncDispatcher();
String user = "nobody";
long clusterTimeStamp = 1234;
ApplicationId appId =
BuilderUtils.newApplicationId(recordFactory, clusterTimeStamp, 1);
Application app = mock(Application.class);
when(app.getUser()).thenReturn(user);
when(app.getAppId()).thenReturn(appId);
nmContext.getApplications().put(appId, app);
ApplicationAttemptId appAttemptId = BuilderUtils.newApplicationAttemptId(
appId, 1);
ContainerId container1 =
BuilderUtils.newContainerId(recordFactory, appId, appAttemptId, 0);
ContainerId container2 =
BuilderUtils.newContainerId(recordFactory, appId, appAttemptId, 1);
NodeManagerMetrics metrics = mock(NodeManagerMetrics.class);
NMStateStoreService stateStore = new NMNullStateStoreService();
for (ContainerId containerId : new ContainerId[] { container1,
container2}) {
// TODO: Use builder utils
ContainerLaunchContext launchContext =
recordFactory.newRecordInstance(ContainerLaunchContext.class);
long currentTime = System.currentTimeMillis();
Token containerToken =
BuilderUtils.newContainerToken(containerId, "127.0.0.1", 1234, user,
BuilderUtils.newResource(1024, 1), currentTime + 10000L, 123,
"password".getBytes(), currentTime);
Container container =
new ContainerImpl(conf, dispatcher, stateStore, launchContext,
null, metrics,
BuilderUtils.newContainerTokenIdentifier(containerToken)) {
@Override
public ContainerState getContainerState() {
return ContainerState.RUNNING;
};
};
nmContext.getContainers().put(containerId, container);
//TODO: Gross hack. Fix in code.
ApplicationId applicationId =
containerId.getApplicationAttemptId().getApplicationId();
nmContext.getApplications().get(applicationId).getContainers()
.put(containerId, container);
writeContainerLogs(nmContext, containerId, dirsHandler);
}
// TODO: Pull logs and test contents.
// Thread.sleep(1000000);
}
private void writeContainerLogs(Context nmContext,
ContainerId containerId, LocalDirsHandlerService dirsHandler)
throws IOException, YarnException {
// ContainerLogDir should be created
File containerLogDir =
ContainerLogsUtils.getContainerLogDirs(containerId,
dirsHandler).get(0);
containerLogDir.mkdirs();
for (String fileType : new String[] { "stdout", "stderr", "syslog" }) {
Writer writer = new FileWriter(new File(containerLogDir, fileType));
writer.write(ConverterUtils.toString(containerId) + "\n Hello "
+ fileType + "!");
writer.close();
}
}
}
|
|
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.core.schema;
import com.netflix.hollow.api.error.IncompatibleSchemaException;
import com.netflix.hollow.core.read.filter.HollowFilterConfig;
import com.netflix.hollow.core.schema.HollowObjectSchema.FieldType;
import org.junit.Assert;
import org.junit.Test;
public class HollowObjectSchemaTest {
@Test
public void findsCommonSchemas() {
HollowObjectSchema s1 = new HollowObjectSchema("Test", 2, "F2");
s1.addField("F1", FieldType.INT);
s1.addField("F2", FieldType.LONG);
HollowObjectSchema s2 = new HollowObjectSchema("Test", 2, "F2");
s2.addField("F2", FieldType.LONG);
s2.addField("F3", FieldType.STRING);
HollowObjectSchema commonSchema = s1.findCommonSchema(s2);
Assert.assertEquals(1, commonSchema.numFields());
Assert.assertEquals("F2", commonSchema.getFieldName(0));
Assert.assertEquals(FieldType.LONG, commonSchema.getFieldType(0));
Assert.assertEquals(s1.getPrimaryKey(), s2.getPrimaryKey());
Assert.assertEquals(s1.getPrimaryKey(), commonSchema.getPrimaryKey());
{
HollowObjectSchema s3 = new HollowObjectSchema("Test", 2, "F3");
s3.addField("F2", FieldType.LONG);
s3.addField("F3", FieldType.STRING);
HollowObjectSchema c3 = s1.findCommonSchema(s3);
Assert.assertNotEquals(s1.getPrimaryKey(), s3.getPrimaryKey());
Assert.assertNotEquals(s1.getPrimaryKey(), c3.getPrimaryKey());
Assert.assertNull(c3.getPrimaryKey());
}
}
@Test
public void findCommonSchema_incompatible() {
try {
HollowObjectSchema s1 = new HollowObjectSchema("Test", 2, "F1");
s1.addField("F1", FieldType.INT);
HollowObjectSchema s2 = new HollowObjectSchema("Test", 2, "F1");
s2.addField("F1", FieldType.STRING);
s1.findCommonSchema(s2);
Assert.fail("Expected IncompatibleSchemaException");
} catch (IncompatibleSchemaException e) {
Assert.assertEquals("Test", e.getTypeName());
Assert.assertEquals("F1", e.getFieldName());
}
}
@Test
public void findsUnionSchemas() {
HollowObjectSchema s1 = new HollowObjectSchema("Test", 2, "F2");
s1.addField("F1", FieldType.INT);
s1.addField("F2", FieldType.LONG);
HollowObjectSchema s2 = new HollowObjectSchema("Test", 2, "F2");
s2.addField("F2", FieldType.LONG);
s2.addField("F3", FieldType.STRING);
HollowObjectSchema unionSchema = s1.findUnionSchema(s2);
Assert.assertEquals(3, unionSchema.numFields());
Assert.assertEquals("F1", unionSchema.getFieldName(0));
Assert.assertEquals(FieldType.INT, unionSchema.getFieldType(0));
Assert.assertEquals("F2", unionSchema.getFieldName(1));
Assert.assertEquals(FieldType.LONG, unionSchema.getFieldType(1));
Assert.assertEquals("F3", unionSchema.getFieldName(2));
Assert.assertEquals(FieldType.STRING, unionSchema.getFieldType(2));
Assert.assertEquals(s1.getPrimaryKey(), s2.getPrimaryKey());
Assert.assertEquals(s1.getPrimaryKey(), unionSchema.getPrimaryKey());
{
HollowObjectSchema s3 = new HollowObjectSchema("Test", 2, "F3");
s3.addField("F2", FieldType.LONG);
s3.addField("F3", FieldType.STRING);
HollowObjectSchema u3 = s1.findUnionSchema(s3);
Assert.assertNotEquals(s1.getPrimaryKey(), u3.getPrimaryKey());
Assert.assertNotEquals(s1.getPrimaryKey(), u3.getPrimaryKey());
Assert.assertNull(u3.getPrimaryKey());
}
}
@Test
public void filterSchema() {
HollowObjectSchema s1 = new HollowObjectSchema("Test", 2, "F2");
s1.addField("F1", FieldType.INT);
s1.addField("F2", FieldType.LONG);
Assert.assertEquals(2, s1.numFields());
HollowFilterConfig filter = new HollowFilterConfig();
filter.addField("Test", "F2");
HollowObjectSchema s2 = s1.filterSchema(filter);
Assert.assertEquals(1, s2.numFields());
Assert.assertEquals("F2", s2.getFieldName(0));
Assert.assertEquals(s1.getPrimaryKey(), s2.getPrimaryKey());
}
@Test
public void testEquals() {
{
HollowObjectSchema s1 = new HollowObjectSchema("Test", 2);
s1.addField("F1", FieldType.INT);
s1.addField("F2", FieldType.LONG);
HollowObjectSchema s2 = new HollowObjectSchema("Test", 2);
s2.addField("F1", FieldType.INT);
s2.addField("F2", FieldType.LONG);
Assert.assertEquals(s1, s2);
}
{
HollowObjectSchema s1 = new HollowObjectSchema("Test", 2);
s1.addField("F1", FieldType.INT);
s1.addField("F2", FieldType.LONG);
HollowObjectSchema s2 = new HollowObjectSchema("Test", 1);
s2.addField("F1", FieldType.INT);
Assert.assertNotEquals(s1, s2);
}
}
@Test
public void testEqualsWithPrimaryKey() {
{
HollowObjectSchema s1 = new HollowObjectSchema("Test", 2, "F2");
s1.addField("F1", FieldType.INT);
s1.addField("F2", FieldType.LONG);
HollowObjectSchema s2 = new HollowObjectSchema("Test", 2, "F2");
s2.addField("F1", FieldType.INT);
s2.addField("F2", FieldType.LONG);
Assert.assertEquals(s1, s2);
Assert.assertEquals(s1.getPrimaryKey(), s2.getPrimaryKey());
}
{
HollowObjectSchema s1 = new HollowObjectSchema("Test", 2, "F1", "F2");
s1.addField("F1", FieldType.INT);
s1.addField("F2", FieldType.LONG);
HollowObjectSchema s2 = new HollowObjectSchema("Test", 2, "F1", "F2");
s2.addField("F1", FieldType.INT);
s2.addField("F2", FieldType.LONG);
Assert.assertEquals(s1, s2);
Assert.assertEquals(s1.getPrimaryKey(), s2.getPrimaryKey());
}
{
HollowObjectSchema s1 = new HollowObjectSchema("Test", 2, "F1", "F2");
s1.addField("F1", FieldType.INT);
s1.addField("F2", FieldType.LONG);
HollowObjectSchema s2 = new HollowObjectSchema("Test", 2, "F1");
s2.addField("F1", FieldType.INT);
s2.addField("F2", FieldType.LONG);
Assert.assertNotEquals(s1, s2);
Assert.assertNotEquals(s1.getPrimaryKey(), s2.getPrimaryKey());
}
{
HollowObjectSchema s1 = new HollowObjectSchema("Test", 2);
s1.addField("F1", FieldType.INT);
s1.addField("F2", FieldType.LONG);
HollowObjectSchema s2 = new HollowObjectSchema("Test", 2, "F1");
s2.addField("F1", FieldType.INT);
s2.addField("F2", FieldType.LONG);
Assert.assertNotEquals(s1, s2);
Assert.assertNotEquals(s1.getPrimaryKey(), s2.getPrimaryKey());
}
}
}
|
|
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.InputStreamReader;
import java.io.IOException;
import javax.swing.JOptionPane;
import javax.swing.JTabbedPane;
import java.util.List;
import java.util.Vector;
public class GraphDisplay extends JTabbedPane
{
GraphText graphText;
GraphResults graphResults;
GraphVisualization graphVisualization;
public static final String dataDir = "./graphs/";
public GraphDisplay()
{
super();
graphText = new GraphText();
graphResults = new GraphResults();
graphVisualization = new GraphVisualization();
add("Visualization", graphVisualization);
add("Results", graphResults);
add("Input File", graphText);
setEnabledAt(0, false);
setEnabledAt(1, false);
setEnabledAt(2, false);
}
public String getGraphFileName()
{
return graphText.getFileName();
}
public File getGraphFile()
{
return graphText.getFile();
}
public boolean hasChangedGraph()
{
return graphText.hasChanged();
}
public boolean newGraph()
{
boolean result = false;
try
{
result = graphText.newGraph();
if(result)
{
setEnabledAt(indexOfComponent(graphText), true);
setSelectedIndex(indexOfComponent(graphText));
closeResults();
closeVisualization();
}
}
catch(IOException ioe)
{
JOptionPane.showMessageDialog(this, ioe.getMessage(), "Error Saving File", JOptionPane.ERROR_MESSAGE);
}
return result;
}
public boolean saveAsGraph()
{
boolean result = false;
try
{
result = graphText.saveAs();
if(result)
{
setEnabledAt(indexOfComponent(graphText), true);
closeResults();
closeVisualization();
try
{
String dotName = createDotFile(graphText.getFile().toString());
graphVisualization.open(dotName);
setEnabledAt(indexOfComponent(graphVisualization), true);
setSelectedIndex(indexOfComponent(graphVisualization));
}
catch(IOException ioe)
{
JOptionPane.showMessageDialog(this, ioe.getMessage(), "Error Running Dot.", JOptionPane.ERROR_MESSAGE);
setSelectedIndex(indexOfComponent(graphText));
}
}
}
catch(IOException ioe)
{
JOptionPane.showMessageDialog(this, ioe.getMessage(), "Error Saving File", JOptionPane.ERROR_MESSAGE);
}
return result;
}
public boolean saveGraph()
{
boolean result = false;
try
{
result = graphText.save();
if(result)
{
setEnabledAt(indexOfComponent(graphText), true);
closeResults();
closeVisualization();
try
{
String dotName = createDotFile(graphText.getFile().toString());
graphVisualization.open(dotName);
setEnabledAt(indexOfComponent(graphVisualization), true);
setSelectedIndex(indexOfComponent(graphVisualization));
}
catch(IOException ioe)
{
JOptionPane.showMessageDialog(this, ioe.getMessage(), "Error Running Dot.", JOptionPane.ERROR_MESSAGE);
setSelectedIndex(indexOfComponent(graphText));
}
}
}
catch(IOException ioe)
{
JOptionPane.showMessageDialog(this, ioe.getMessage(), "Error Saving File", JOptionPane.ERROR_MESSAGE);
}
return result;
}
public boolean askToSaveGraph(String question)
{
boolean result = false;
try
{
result = graphText.askToSave(question);
}
catch(IOException ioe)
{
JOptionPane.showMessageDialog(this, ioe.getMessage(), "Error Saving File", JOptionPane.ERROR_MESSAGE);
}
return result;
}
public void openGraphHelper()
{
closeResults();
closeVisualization();
setEnabledAt(indexOfComponent(graphText), true);
try
{
String dotName = createDotFile(graphText.getFile().toString());
graphVisualization.open(dotName);
setEnabledAt(indexOfComponent(graphVisualization), true);
setSelectedIndex(indexOfComponent(graphVisualization));
}
catch(IOException ioe)
{
JOptionPane.showMessageDialog(this, ioe.getMessage(), "Error Running Dot.", JOptionPane.ERROR_MESSAGE);
setSelectedIndex(indexOfComponent(graphText));
}
}
public void openGraph()
{
if(askToSaveGraph("Do you want to save the changes to " + graphText.getFileName() + " before opening a new graph?"))
{
try
{
if(graphText.open())
{
openGraphHelper();
}
}
catch(FileNotFoundException fnfe)
{
JOptionPane.showMessageDialog(this, fnfe.getMessage(), "Error Opening File", JOptionPane.ERROR_MESSAGE);
}
catch(IOException ioe)
{
JOptionPane.showMessageDialog(this, ioe.getMessage(), "Error Opening File", JOptionPane.ERROR_MESSAGE);
}
}
}
public void openGraph(String fileName)
{
if(askToSaveGraph("Do you want to save the changes to " + graphText.getFileName() + " before opening a new graph?"))
{
try
{
if(graphText.open(new File(fileName)))
{
openGraphHelper();
}
}
catch(FileNotFoundException fnfe)
{
JOptionPane.showMessageDialog(this, fnfe.getMessage(), "Error Opening File", JOptionPane.ERROR_MESSAGE);
}
catch(IOException ioe)
{
JOptionPane.showMessageDialog(this, ioe.getMessage(), "Error Opening File", JOptionPane.ERROR_MESSAGE);
}
}
}
private void openResultsHelper()
{
closeText();
closeVisualization();
setSelectedIndex(indexOfComponent(graphResults));
setEnabledAt(indexOfComponent(graphResults), true);
String error = "";
String graphFile = graphResults.getGraphFileName();
String dotFile = graphResults.getDotFileName();
if(dotFile != null)
{
graphVisualization.open(graphResults.getDotFileName());
setEnabledAt(indexOfComponent(graphVisualization), true);
}
else
{
if(!error.equals(""))
{
error += "\n";
}
error += "Unable to determine the name of the dot output file. ";
}
if(graphFile != null)
{
System.out.println("Try to open " + graphFile);
try
{
graphText.open(new File(graphFile));
setEnabledAt(indexOfComponent(graphText), true);
}
catch(Exception e)
{
if(!error.equals(""))
{
error += "\n";
}
error += "Unable to open graph input file (" + e.getMessage() + "). ";
}
}
else
{
if(!error.equals(""))
{
error += "\n";
}
error += "Unable to determine the name of the graph input file. ";
}
if(!error.equals(""))
{
JOptionPane.showMessageDialog(this, error, "Error Reading Results.", JOptionPane.ERROR_MESSAGE);
}
}
public void openResults()
{
if(askToSaveGraph("Do you want to save the changes to " + graphText.getFileName() + " before opening results?"))
{
try
{
if(graphResults.open())
{
openResultsHelper();
}
}
catch(FileNotFoundException fnfe)
{
JOptionPane.showMessageDialog(this, fnfe.getMessage(), "Error Opening Results File", JOptionPane.ERROR_MESSAGE);
}
catch(IOException ioe)
{
JOptionPane.showMessageDialog(this, ioe.getMessage(), "Error Opening Results File", JOptionPane.ERROR_MESSAGE);
}
}
}
public void openResults(String resultFile)
{
if(askToSaveGraph("Do you want to save the changes to " + graphText.getFileName() + " before opening results?"))
{
try
{
if(graphResults.open(new File(resultFile)))
{
openResultsHelper();
}
}
catch(FileNotFoundException fnfe)
{
JOptionPane.showMessageDialog(this, fnfe.getMessage(), "Error Opening Results File", JOptionPane.ERROR_MESSAGE);
}
catch(IOException ioe)
{
JOptionPane.showMessageDialog(this, ioe.getMessage(), "Error Opening Results File", JOptionPane.ERROR_MESSAGE);
}
}
}
public void closeResults()
{
graphResults.close();
setEnabledAt(indexOfComponent(graphResults), false);
}
public void closeVisualization()
{
graphVisualization.close();
setEnabledAt(indexOfComponent(graphVisualization), false);
}
public void closeText()
{
graphText.close();
setEnabledAt(indexOfComponent(graphText), false);
}
private String createDotFile(String graphFileName) throws IOException
{
List<String> cmd = new Vector<String>();
String graph2dot;
try
{
graph2dot = System.getProperty(Preferences.GRAPH2DOT);
if(graph2dot == null)
{
throw new SecurityException();
}
}
catch(SecurityException se)
{
graph2dot = Preferences.GRAPH2DOT;
}
cmd.add(graph2dot);
cmd.add(graphFileName);
String dotName;
File graphFile = new File(graphFileName);
if(graphFile.getParent() == null)
{
dotName = "./results/";
try
{
(new File(dotName)).mkdirs();
}
catch(Exception ex)
{
// can not create directory, ignore
ex.printStackTrace();
}
}
else
{
dotName = graphFile.getParent() + "/results/";
try
{
(new File(dotName)).mkdirs();
}
catch(Exception ex)
{
// can not create directory, ignore
ex.printStackTrace();
}
}
dotName += graphFile.getName();
// debug
//dotName += "__" + GUI.getCurTime() + "__.dot";
// debug override for test - hardcoded
dotName += "test.dot";
cmd.add(dotName);
// run the conversion program
ProcessBuilder processBuilder = new ProcessBuilder(cmd);
Process process;
StringBuffer error = new StringBuffer();
try
{
process = processBuilder.start();
process.waitFor();
BufferedReader reader = new BufferedReader(new InputStreamReader(process.getErrorStream()));
String line = reader.readLine();
while(line != null)
{
error.append(line);
error.append("\n");
line = reader.readLine();
}
}
catch(IOException ioe)
{
throw new IOException("graph2dot: " + ioe.getMessage());
}
catch(InterruptedException ie)
{
ie.printStackTrace();
}
if(!(error.toString()).equals(""))
{
throw new IOException(error.toString());
}
return dotName;
}
}
|
|
/*
* Copyright (c) 2014 Oculus Info Inc.
* http://www.oculusinfo.com/
*
* Released under the MIT License.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is furnished to do
* so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.oculusinfo.binning.util;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author jandre
*
*/
public class JsonUtilities {
private static final Logger LOGGER = LoggerFactory.getLogger(JsonUtilities.class);
private static Object getJSONNull () {
try {
return new JSONObject("{a: null}").get("a");
} catch (JSONException e) {
LOGGER.error("Can't come up with JSON null value");
return null;
}
}
private static final Object JSON_NULL = getJSONNull();
/**
* Clone a JSON object and all its child objects
*/
public static JSONObject deepClone (JSONObject source) {
if (null == source) return null;
try {
JSONObject clone = new JSONObject();
String[] keys = JSONObject.getNames(source);
if (null != keys) {
for (String key: keys) {
Object value = source.get(key);
if (value instanceof JSONObject) {
JSONObject valueClone = deepClone((JSONObject) value);
clone.put(key, valueClone);
} else if (value instanceof JSONArray) {
JSONArray valueClone = deepClone((JSONArray) value);
clone.put(key, valueClone);
} else {
clone.put(key, value);
}
}
}
return clone;
} catch (JSONException e) {
LOGGER.error("Weird JSON exception cloning object", e);
return null;
}
}
/**
* Clone a JSON array and all its child objects
*/
public static JSONArray deepClone (JSONArray source) {
if (null == source) return null;
try {
JSONArray clone = new JSONArray();
for (int i=0; i<source.length(); ++i) {
Object value = source.get(i);
if (value instanceof JSONObject) {
JSONObject valueClone = deepClone((JSONObject) value);
clone.put(i, valueClone);
} else if (value instanceof JSONArray) {
JSONArray valueClone = deepClone((JSONArray) value);
clone.put(i, valueClone);
} else {
clone.put(i, value);
}
}
return clone;
} catch (JSONException e) {
LOGGER.error("Weird JSON exception cloning object", e);
return null;
}
}
/**
* Overlays one JSON object, in place, over another, deeply.
*
* @param base The object to alter
* @param overlay The object defining how the base will be altered.
* @return The base object, with the overlay now overlaid upon it.
*/
public static JSONObject overlayInPlace (JSONObject base, JSONObject overlay) {
if (null == overlay) return base;
if (null == base) return deepClone(overlay);
try {
for (String key: JSONObject.getNames(overlay)) {
Object value = overlay.get(key);
if (value instanceof JSONObject) {
if (base.has(key) && base.get(key) instanceof JSONObject) {
overlayInPlace((JSONObject) base.get(key), (JSONObject) value);
} else {
base.put(key, deepClone((JSONObject) value));
}
} else if (value instanceof JSONArray) {
if (base.has(key) && base.get(key) instanceof JSONArray) {
base.put(key, overlay((JSONArray) base.get(key), (JSONArray) value));
} else {
base.put(key, deepClone((JSONArray) value));
}
} else {
base.put(key, value);
}
}
return base;
} catch (JSONException e) {
LOGGER.error("Weird JSON exception cloning object", e);
return null;
}
}
/**
* Overlays one JSON array over another, deeply. This does not work in
* place, but passes back a new array
*
* @param base
* The array to alter
* @param overlay
* The array defining how the base will be altered.
* @return The base array, with the overlay now overlaid upon it.
*/
public static JSONArray overlay (JSONArray base, JSONArray overlay) {
if (null == overlay) return base;
if (null == base) return deepClone(overlay);
try {
JSONArray result = new JSONArray();
// Overlay elements in both or just in the overlay
for (int i=0; i<overlay.length(); ++i) {
Object value = overlay.get(i);
if (JSON_NULL.equals(value)) {
// Null array element; ignore, don't everlay
Object baseValue = base.get(i);
if (baseValue instanceof JSONObject) {
result.put(i, deepClone((JSONObject) baseValue));
} else if (baseValue instanceof JSONArray) {
result.put(i, deepClone((JSONArray) baseValue));
} else {
result.put(i, baseValue);
}
} else if (value instanceof JSONObject) {
if (base.length() > i && base.get(i) instanceof JSONObject) {
result.put(i, overlayInPlace((JSONObject) base.get(i), (JSONObject) value));
} else {
result.put(i, deepClone((JSONObject) value));
}
} else if (value instanceof JSONArray) {
if (base.length() > i && base.get(i) instanceof JSONArray) {
result.put(i, overlay((JSONArray) base.get(i), (JSONArray) value));
} else {
result.put(i, deepClone((JSONArray) value));
}
} else {
result.put(i, value);
}
}
return result;
} catch (JSONException e) {
LOGGER.error("Weird JSON exception cloning object", e);
return null;
}
}
/**
* Converts a {@link JSONObject} into a {@link Map} of key-value pairs.
* This iterates through the tree and converts all {@link JSONObject}s
* into their equivalent map, and converts {@link JSONArray}s into
* {@link List}s.
*
* @param jsonObj
* @return
* Returns a map with the same
*/
public static Map<String, Object> jsonObjToMap(JSONObject jsonObj) {
Map<String, Object> map = new HashMap<String, Object>();
Iterator<?> keys = jsonObj.keys();
while (keys.hasNext()) {
String key = keys.next().toString();
Object obj = jsonObj.opt(key);
if (obj instanceof JSONObject) {
map.put(key, jsonObjToMap((JSONObject)obj));
}
else if (obj instanceof JSONArray) {
map.put(key, jsonArrayToList((JSONArray)obj));
}
else {
map.put(key, obj);
}
}
return map;
}
/**
* Converts a {@link JSONArray} into a {@link List} of values.
* @param jsonList
* @return
* Returns a list of values
*/
public static List<Object> jsonArrayToList(JSONArray jsonList) {
int numItems = jsonList.length();
List<Object> list = new ArrayList<Object>(numItems);
for (int i = 0; i < numItems; i++) {
Object obj = jsonList.opt(i);
if (obj instanceof JSONObject) {
list.add(jsonObjToMap((JSONObject)obj));
}
else if (obj instanceof JSONArray) {
list.add(jsonArrayToList((JSONArray)obj));
}
else {
list.add(obj);
}
}
return list;
}
/**
* Converts an object into a number.
* @return
* If the object is already a number then it just casts it.
* If the object is a string, then it parses it as a double.
* Otherwise the number returned is 0.
*/
public static Number getNumber(Object o) {
Number val = 0;
if (o instanceof Number) {
val = (Number)o;
}
else if (o instanceof String) {
val = Double.valueOf((String)o);
}
else if (o instanceof JSONArray) {
//if the object is an array, then assume it only has one element that is the value
JSONArray arr = (JSONArray)o;
if (arr.length() == 1) {
try {
val = getNumber(arr.get(0));
}
catch (JSONException e) {
val = 0;
}
}
}
return val;
}
/**
* Gets a name to use from an object.
* If the object is a String, then it will treat the string as the name.
* If the object is a {@link JSONObject}, then the name must be a parameter within the object.
* If the object is a {@link JSONArray}, then there can only be a single element, which should
* contain the name.
*
* @param params
* @return
* Returns the name for the object, or null if none can be found.
*/
public static String getName(Object params) {
String name = null;
if (params instanceof String) {
name = (String)params;
}
else if (params instanceof JSONObject) {
JSONObject transformObj = (JSONObject)params;
try {
name = (transformObj.has("name"))? transformObj.getString("name") : null;
}
catch (JSONException e) {
name = null;
}
}
else if (params instanceof JSONArray) {
//if the transform params is an array, then it should only have one parameter.
JSONArray vals = (JSONArray)params;
if (vals.length() == 1) {
try {
name = getName(vals.get(0));
}
catch (JSONException e) {
name = null;
}
}
else {
name = null;
}
}
return name;
}
/**
* Simple getter for a {@link JSONObject} that handles exception handling, and
* returns a default value in case there are any problems.
*
* @param obj
* The {@link JSONObject} to query
* @param keyName
* The String name to query from the json object.
* @param defaultVal
* The default value to use if there are any problems.
* @return
* Returns the double value for the key name, or the default value.
*/
public static double getDoubleOrElse(JSONObject obj, String keyName, double defaultVal) {
double val;
try {
val = (obj.has(keyName))? obj.getDouble(keyName) : defaultVal;
}
catch (JSONException e) {
val = defaultVal;
}
return val;
}
/**
* Transform a JSON object into a properties object, concatenating levels
* into keys using a period.
*
* @param jsonObj
* The JSON object to translate
* @return The same data, in properties form
*/
public static Properties jsonObjToProperties (JSONObject jsonObj) {
Properties properties = new Properties();
addProperties(jsonObj, properties, null);
return properties;
}
private static void addProperties (JSONObject object, Properties properties, String keyBase) {
Iterator<?> keys = object.keys();
while (keys.hasNext()) {
String specificKey = keys.next().toString();
Object value = object.opt(specificKey);
String key = (null == keyBase ? "" : keyBase+".") + specificKey;
if (value instanceof JSONObject) {
addProperties((JSONObject) value, properties, key);
} else if (value instanceof JSONArray) {
addProperties((JSONArray) value, properties, key);
} else if (null != value) {
properties.setProperty(key, value.toString());
}
}
}
private static void addProperties (JSONArray array, Properties properties, String keyBase) {
for (int i=0; i<array.length(); ++i) {
String key = (null == keyBase ? "" : keyBase+".")+i;
Object value = array.opt(i);
if (value instanceof JSONObject) {
addProperties((JSONObject) value, properties, key);
} else if (value instanceof JSONArray) {
addProperties((JSONArray) value, properties, key);
} else if (null != value) {
properties.setProperty(key, value.toString());
}
}
}
/**
* Transform a JSON object into a properties object, concatenating levels
* into keys using a period.
*
* @param jsonObj
* The JSON object to translate
* @return The same data, in properties form
*/
public static JSONObject propertiesObjToJSON (Properties properties) {
JSONObject json = new JSONObject();
for (Object keyObj: properties.keySet()) {
String key = keyObj.toString();
try {
addKey(json, key, properties.getProperty(key));
} catch (JSONException e) {
LOGGER.warn("Error transfering property {} from properties file to json", key, e);
}
}
return json;
}
private static void addKey (JSONObject json, String key, String value) throws JSONException {
int keyBreak = key.indexOf(".");
if (-1 == keyBreak) {
// At leaf object.
if (json.has(key)) {
throw new JSONException("Duplicate key "+key);
}
json.put(key, value);
} else {
String keyCAR = key.substring(0, keyBreak);
String keyCDR = key.substring(keyBreak+1);
String keyCADR;
int cdrBreak = keyCDR.indexOf(".");
if (-1 == cdrBreak) {
keyCADR = keyCDR;
} else {
keyCADR = keyCDR.substring(0, cdrBreak);
}
// See if our next element can be an array element.
boolean arrayOk;
try {
Integer.parseInt(keyCADR);
arrayOk = true;
} catch (NumberFormatException e) {
arrayOk = false;
}
if (json.has(keyCAR)) {
Object elt = json.get(keyCAR);
if (elt instanceof JSONArray) {
JSONArray arrayElt = (JSONArray) elt;
if (arrayOk) {
addKey(arrayElt, keyCDR, value);
} else {
JSONObject arrayTrans = new JSONObject();
for (int i=0; i<arrayElt.length(); ++i) {
arrayTrans.put(""+i, arrayElt.get(i));
}
json.put(keyCAR, arrayTrans);
addKey(arrayTrans, keyCDR, value);
}
} else if (elt instanceof JSONObject) {
addKey((JSONObject) elt, keyCDR, value);
} else {
throw new JSONException("Attempt to put both object and value in JSON object at key "+keyCAR);
}
} else {
if (arrayOk) {
JSONArray arrayElt = new JSONArray();
json.put(keyCAR, arrayElt);
addKey(arrayElt, keyCDR, value);
} else {
JSONObject elt = new JSONObject();
json.put(keyCAR, elt);
addKey(elt, keyCDR, value);
}
}
}
}
private static void addKey (JSONArray json, String key, String value) throws JSONException {
int keyBreak = key.indexOf(".");
if (-1 == keyBreak) {
// At leaf object.
int index = Integer.parseInt(key);
json.put(index, value);
} else {
String keyCAR = key.substring(0, keyBreak);
String keyCDR = key.substring(keyBreak+1);
String keyCADR;
int cdrBreak = keyCDR.indexOf(".");
if (-1 == cdrBreak) {
keyCADR = keyCDR;
} else {
keyCADR = keyCDR.substring(0, cdrBreak);
}
// See if our next element can be an array element.
boolean arrayOk;
try {
Integer.parseInt(keyCADR);
arrayOk = true;
} catch (NumberFormatException e) {
arrayOk = false;
}
int index = Integer.parseInt(keyCAR);
Object raw;
try {
raw = json.get(index);
} catch (JSONException e) {
raw = null;
}
if (raw instanceof JSONArray) {
JSONArray arrayElt = (JSONArray) raw;
if (arrayOk) {
addKey(arrayElt, keyCDR, value);
} else {
JSONObject arrayTrans = new JSONObject();
for (int i=0; i<arrayElt.length(); ++i) {
arrayTrans.put(""+i, arrayElt.get(i));
}
json.put(index, arrayTrans);
addKey(arrayTrans, keyCDR, value);
}
} else if (raw instanceof JSONObject) {
addKey((JSONObject) raw, keyCDR, value);
} else {
if (arrayOk) {
JSONArray arrayElt = new JSONArray();
json.put(index, arrayElt);
addKey(arrayElt, keyCDR, value);
} else {
JSONObject elt = new JSONObject();
json.put(index, elt);
addKey(elt, keyCDR, value);
}
}
}
}
}
|
|
package apple.authenticationservices;
import apple.NSObject;
import apple.authenticationservices.protocol.ASAuthorizationControllerDelegate;
import apple.authenticationservices.protocol.ASAuthorizationControllerPresentationContextProviding;
import apple.foundation.NSArray;
import apple.foundation.NSMethodSignature;
import apple.foundation.NSSet;
import org.moe.natj.c.ann.FunctionPtr;
import org.moe.natj.general.NatJ;
import org.moe.natj.general.Pointer;
import org.moe.natj.general.ann.Generated;
import org.moe.natj.general.ann.Library;
import org.moe.natj.general.ann.Mapped;
import org.moe.natj.general.ann.MappedReturn;
import org.moe.natj.general.ann.NInt;
import org.moe.natj.general.ann.NUInt;
import org.moe.natj.general.ann.Owned;
import org.moe.natj.general.ann.Runtime;
import org.moe.natj.general.ptr.VoidPtr;
import org.moe.natj.objc.Class;
import org.moe.natj.objc.ObjCRuntime;
import org.moe.natj.objc.SEL;
import org.moe.natj.objc.ann.ObjCClassBinding;
import org.moe.natj.objc.ann.Selector;
import org.moe.natj.objc.map.ObjCObjectMapper;
@Generated
@Library("AuthenticationServices")
@Runtime(ObjCRuntime.class)
@ObjCClassBinding
public class ASAuthorizationController extends NSObject {
static {
NatJ.register();
}
@Generated
protected ASAuthorizationController(Pointer peer) {
super(peer);
}
@Generated
@Selector("accessInstanceVariablesDirectly")
public static native boolean accessInstanceVariablesDirectly();
@Generated
@Owned
@Selector("alloc")
public static native ASAuthorizationController alloc();
@Owned
@Generated
@Selector("allocWithZone:")
public static native ASAuthorizationController allocWithZone(VoidPtr zone);
/**
* Authorization requests that are being serviced by this controller
*/
@Generated
@Selector("authorizationRequests")
public native NSArray<? extends ASAuthorizationRequest> authorizationRequests();
@Generated
@Selector("automaticallyNotifiesObserversForKey:")
public static native boolean automaticallyNotifiesObserversForKey(String key);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:")
public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:selector:object:")
public static native void cancelPreviousPerformRequestsWithTargetSelectorObject(
@Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector,
@Mapped(ObjCObjectMapper.class) Object anArgument);
@Generated
@Selector("classFallbacksForKeyedArchiver")
public static native NSArray<String> classFallbacksForKeyedArchiver();
@Generated
@Selector("classForKeyedUnarchiver")
public static native Class classForKeyedUnarchiver();
@Generated
@Selector("debugDescription")
public static native String debugDescription_static();
/**
* This delegate will be invoked upon completion of the authorization indicating success or failure.
* Delegate is required to receive the results of authorization.
*/
@Generated
@Selector("delegate")
@MappedReturn(ObjCObjectMapper.class)
public native ASAuthorizationControllerDelegate delegate();
@Generated
@Selector("description")
public static native String description_static();
@Generated
@Selector("hash")
@NUInt
public static native long hash_static();
@Generated
@Selector("init")
public native ASAuthorizationController init();
/**
* Initialize the controller with authorization requests.
*
* @param authorizationRequests At least one request should be provided. Requests of same type maybe honored in first in first out order
*/
@Generated
@Selector("initWithAuthorizationRequests:")
public native ASAuthorizationController initWithAuthorizationRequests(
NSArray<? extends ASAuthorizationRequest> authorizationRequests);
@Generated
@Selector("instanceMethodForSelector:")
@FunctionPtr(name = "call_instanceMethodForSelector_ret")
public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector);
@Generated
@Selector("instanceMethodSignatureForSelector:")
public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector);
@Generated
@Selector("instancesRespondToSelector:")
public static native boolean instancesRespondToSelector(SEL aSelector);
@Generated
@Selector("isSubclassOfClass:")
public static native boolean isSubclassOfClass(Class aClass);
@Generated
@Selector("keyPathsForValuesAffectingValueForKey:")
public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key);
@Generated
@Owned
@Selector("new")
public static native ASAuthorizationController new_objc();
/**
* Initiate the authorization flows. Upon completion, the delegate will be called with either success or failure.
* Certain authorization flows may require a presentation context, the presentationContextProvider will be called to provider it.
* <p>
* The instance will remain retained until the user completes the flow and the delegate callback is made.
*/
@Generated
@Selector("performRequests")
public native void performRequests();
/**
* This delegate will be invoked upon needing a presentation context to display authorization UI.
*/
@Generated
@Selector("presentationContextProvider")
@MappedReturn(ObjCObjectMapper.class)
public native ASAuthorizationControllerPresentationContextProviding presentationContextProvider();
@Generated
@Selector("resolveClassMethod:")
public static native boolean resolveClassMethod(SEL sel);
@Generated
@Selector("resolveInstanceMethod:")
public static native boolean resolveInstanceMethod(SEL sel);
/**
* This delegate will be invoked upon completion of the authorization indicating success or failure.
* Delegate is required to receive the results of authorization.
*/
@Generated
@Selector("setDelegate:")
public native void setDelegate_unsafe(@Mapped(ObjCObjectMapper.class) ASAuthorizationControllerDelegate value);
/**
* This delegate will be invoked upon completion of the authorization indicating success or failure.
* Delegate is required to receive the results of authorization.
*/
@Generated
public void setDelegate(@Mapped(ObjCObjectMapper.class) ASAuthorizationControllerDelegate value) {
Object __old = delegate();
if (value != null) {
org.moe.natj.objc.ObjCRuntime.associateObjCObject(this, value);
}
setDelegate_unsafe(value);
if (__old != null) {
org.moe.natj.objc.ObjCRuntime.dissociateObjCObject(this, __old);
}
}
/**
* This delegate will be invoked upon needing a presentation context to display authorization UI.
*/
@Generated
@Selector("setPresentationContextProvider:")
public native void setPresentationContextProvider_unsafe(
@Mapped(ObjCObjectMapper.class) ASAuthorizationControllerPresentationContextProviding value);
/**
* This delegate will be invoked upon needing a presentation context to display authorization UI.
*/
@Generated
public void setPresentationContextProvider(
@Mapped(ObjCObjectMapper.class) ASAuthorizationControllerPresentationContextProviding value) {
Object __old = presentationContextProvider();
if (value != null) {
org.moe.natj.objc.ObjCRuntime.associateObjCObject(this, value);
}
setPresentationContextProvider_unsafe(value);
if (__old != null) {
org.moe.natj.objc.ObjCRuntime.dissociateObjCObject(this, __old);
}
}
@Generated
@Selector("setVersion:")
public static native void setVersion_static(@NInt long aVersion);
@Generated
@Selector("superclass")
public static native Class superclass_static();
@Generated
@Selector("version")
@NInt
public static native long version_static();
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.broker.service.persistent;
import io.netty.buffer.ByteBuf;
import io.prometheus.client.Gauge;
import java.io.IOException;
import java.time.Clock;
import java.util.Collections;
import java.util.Iterator;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.bookkeeper.mledger.Position;
import org.apache.bookkeeper.mledger.impl.PositionImpl;
import org.apache.pulsar.broker.service.Topic;
import org.apache.pulsar.common.api.proto.PulsarApi.CommandAck.AckType;
import org.apache.pulsar.common.api.proto.PulsarApi.CommandSubscribe.InitialPosition;
import org.apache.pulsar.common.api.proto.PulsarMarkers.ClusterMessageId;
import org.apache.pulsar.common.api.proto.PulsarMarkers.MarkerType;
import org.apache.pulsar.common.api.proto.PulsarMarkers.MessageIdData;
import org.apache.pulsar.common.api.proto.PulsarMarkers.ReplicatedSubscriptionsSnapshot;
import org.apache.pulsar.common.api.proto.PulsarMarkers.ReplicatedSubscriptionsSnapshotRequest;
import org.apache.pulsar.common.api.proto.PulsarMarkers.ReplicatedSubscriptionsSnapshotResponse;
import org.apache.pulsar.common.api.proto.PulsarMarkers.ReplicatedSubscriptionsUpdate;
import org.apache.pulsar.common.protocol.Markers;
/**
* Encapsulate all the logic of replicated subscriptions tracking for a given topic.
*/
@Slf4j
public class ReplicatedSubscriptionsController implements AutoCloseable, Topic.PublishContext {
private final PersistentTopic topic;
private final String localCluster;
private final ScheduledFuture<?> timer;
private final ConcurrentMap<String, ReplicatedSubscriptionsSnapshotBuilder> pendingSnapshots = new ConcurrentHashMap<>();
private final static Gauge pendingSnapshotsMetric = Gauge
.build("pulsar_replicated_subscriptions_pending_snapshots",
"Counter of currently pending snapshots")
.register();
public ReplicatedSubscriptionsController(PersistentTopic topic, String localCluster) {
this.topic = topic;
this.localCluster = localCluster;
timer = topic.getBrokerService().pulsar().getExecutor()
.scheduleAtFixedRate(this::startNewSnapshot, 0,
topic.getBrokerService().pulsar().getConfiguration()
.getReplicatedSubscriptionsSnapshotFrequencyMillis(),
TimeUnit.MILLISECONDS);
}
public void receivedReplicatedSubscriptionMarker(Position position, int markerType, ByteBuf payload) {
try {
switch (markerType) {
case MarkerType.REPLICATED_SUBSCRIPTION_SNAPSHOT_REQUEST_VALUE:
receivedSnapshotRequest(Markers.parseReplicatedSubscriptionsSnapshotRequest(payload));
break;
case MarkerType.REPLICATED_SUBSCRIPTION_SNAPSHOT_RESPONSE_VALUE:
receivedSnapshotResponse(position, Markers.parseReplicatedSubscriptionsSnapshotResponse(payload));
break;
case MarkerType.REPLICATED_SUBSCRIPTION_UPDATE_VALUE:
receiveSubscriptionUpdated(Markers.parseReplicatedSubscriptionsUpdate(payload));
break;
default:
// Ignore
}
} catch (IOException e) {
log.warn("[{}] Failed to parse marker: {}", topic.getName(), e);
}
}
public void localSubscriptionUpdated(String subscriptionName, ReplicatedSubscriptionsSnapshot snapshot) {
if (log.isDebugEnabled()) {
log.debug("[{}][{}] Updating subscription to snapshot {}", topic, subscriptionName,
snapshot.getClustersList().stream()
.map(cmid -> String.format("%s -> %d:%d", cmid.getCluster(),
cmid.getMessageId().getLedgerId(), cmid.getMessageId().getEntryId()))
.collect(Collectors.toList()));
}
Map<String, MessageIdData> clusterIds = new TreeMap<>();
for (int i = 0, size = snapshot.getClustersCount(); i < size; i++) {
ClusterMessageId cmid = snapshot.getClusters(i);
clusterIds.put(cmid.getCluster(), cmid.getMessageId());
}
ByteBuf subscriptionUpdate = Markers.newReplicatedSubscriptionsUpdate(subscriptionName, clusterIds);
topic.publishMessage(subscriptionUpdate, this);
}
private void receivedSnapshotRequest(ReplicatedSubscriptionsSnapshotRequest request) {
// Send response containing the current last written message id. The response
// marker we're publishing locally and then replicating will have a higher
// message id.
PositionImpl lastMsgId = (PositionImpl) topic.getLastMessageId();
if (log.isDebugEnabled()) {
log.debug("[{}] Received snapshot request. Last msg id: {}", topic.getName(), lastMsgId);
}
ByteBuf marker = Markers.newReplicatedSubscriptionsSnapshotResponse(
request.getSnapshotId(),
request.getSourceCluster(),
localCluster,
lastMsgId.getLedgerId(), lastMsgId.getEntryId());
topic.publishMessage(marker, this);
}
private void receivedSnapshotResponse(Position position, ReplicatedSubscriptionsSnapshotResponse response) {
String snapshotId = response.getSnapshotId();
ReplicatedSubscriptionsSnapshotBuilder builder = pendingSnapshots.get(snapshotId);
if (builder == null) {
if (log.isDebugEnabled()) {
log.debug("[{}] Received late reply for timed-out snapshot {} from {}", topic.getName(), snapshotId,
response.getCluster().getCluster());
}
return;
}
builder.receivedSnapshotResponse(position, response);
}
private void receiveSubscriptionUpdated(ReplicatedSubscriptionsUpdate update) {
MessageIdData updatedMessageId = null;
for (int i = 0, size = update.getClustersCount(); i < size; i++) {
ClusterMessageId cmid = update.getClusters(i);
if (localCluster.equals(cmid.getCluster())) {
updatedMessageId = cmid.getMessageId();
}
}
if (updatedMessageId == null) {
// No updates for this cluster, ignore
return;
}
Position pos = new PositionImpl(updatedMessageId.getLedgerId(), updatedMessageId.getEntryId());
if (log.isDebugEnabled()) {
log.debug("[{}][{}] Received update for subscription to {}", topic, update.getSubscriptionName(), pos);
}
PersistentSubscription sub = topic.getSubscription(update.getSubscriptionName());
if (sub != null) {
sub.acknowledgeMessage(Collections.singletonList(pos), AckType.Cumulative, Collections.emptyMap());
} else {
// Subscription doesn't exist. We need to force the creation of the subscription in this cluster, because
log.info("[{}][{}] Creating subscription at {}:{} after receiving update from replicated subcription",
topic, update.getSubscriptionName(), updatedMessageId.getLedgerId(), pos);
topic.createSubscription(update.getSubscriptionName(),
InitialPosition.Latest, true /* replicateSubscriptionState */);
}
}
private void startNewSnapshot() {
cleanupTimedOutSnapshots();
AtomicBoolean anyReplicatorDisconnected = new AtomicBoolean();
topic.getReplicators().forEach((cluster, replicator) -> {
if (!replicator.isConnected()) {
anyReplicatorDisconnected.set(true);
}
});
if (anyReplicatorDisconnected.get()) {
// Do not attempt to create snapshot when some of the clusters are not reachable
return;
}
pendingSnapshotsMetric.inc();
ReplicatedSubscriptionsSnapshotBuilder builder = new ReplicatedSubscriptionsSnapshotBuilder(this,
topic.getReplicators().keys(), topic.getBrokerService().pulsar().getConfiguration(), Clock.systemUTC());
pendingSnapshots.put(builder.getSnapshotId(), builder);
builder.start();
}
private void cleanupTimedOutSnapshots() {
Iterator<Map.Entry<String, ReplicatedSubscriptionsSnapshotBuilder>> it = pendingSnapshots.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String, ReplicatedSubscriptionsSnapshotBuilder> entry = it.next();
if (entry.getValue().isTimedOut()) {
if (log.isDebugEnabled()) {
log.debug("[{}] Snapshot creation timed out for {}", topic.getName(), entry.getKey());
}
pendingSnapshotsMetric.dec();
it.remove();
}
}
}
void snapshotCompleted(String snapshotId) {
pendingSnapshots.remove(snapshotId);
pendingSnapshotsMetric.dec();
}
void writeMarker(ByteBuf marker) {
topic.publishMessage(marker, this);
}
/**
* From Topic.PublishContext
*/
@Override
public void completed(Exception e, long ledgerId, long entryId) {
// Nothing to do in case of publish errors since the retry logic is applied upstream after a snapshot is not
// closed
if (log.isDebugEnabled()) {
log.debug("[{}] Published marker at {}:{}. Exception: {}", topic.getName(), ledgerId, entryId, e);
}
}
PersistentTopic topic() {
return topic;
}
String localCluster() {
return localCluster;
}
@Override
public void close() {
timer.cancel(true);
}
}
|
|
package org.apache.cassandra.cql3.validation.operations;
import java.util.Arrays;
import org.junit.BeforeClass;
import org.junit.Test;
import static junit.framework.Assert.assertNull;
import static org.junit.Assert.assertEquals;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.cql3.CQLTester;
import org.apache.cassandra.dht.ByteOrderedPartitioner;
/**
* SELECT statement tests that require a ByteOrderedPartitioner
*/
public class SelectOrderedPartitionerTest extends CQLTester
{
@BeforeClass
public static void setUp()
{
DatabaseDescriptor.setPartitioner(ByteOrderedPartitioner.instance);
}
@Test
public void testTokenFunctionWithSingleColumnPartitionKey() throws Throwable
{
createTable("CREATE TABLE IF NOT EXISTS %s (a int PRIMARY KEY, b text)");
execute("INSERT INTO %s (a, b) VALUES (0, 'a')");
assertRows(execute("SELECT * FROM %s WHERE token(a) >= token(?)", 0), row(0, "a"));
assertRows(execute("SELECT * FROM %s WHERE token(a) >= token(?) and token(a) < token(?)", 0, 1), row(0, "a"));
assertInvalid("SELECT * FROM %s WHERE token(a) > token(?)", "a");
assertInvalidMessage("The token() function must contains only partition key components",
"SELECT * FROM %s WHERE token(a, b) >= token(?, ?)", "b", 0);
assertInvalidMessage("More than one restriction was found for the start bound on a",
"SELECT * FROM %s WHERE token(a) >= token(?) and token(a) >= token(?)", 0, 1);
assertInvalidMessage("Columns \"a\" cannot be restricted by both an equality and an inequality relation",
"SELECT * FROM %s WHERE token(a) >= token(?) and token(a) = token(?)", 0, 1);
assertInvalidSyntax("SELECT * FROM %s WHERE token(a) = token(?) and token(a) IN (token(?))", 0, 1);
assertInvalidMessage("More than one restriction was found for the start bound on a",
"SELECT * FROM %s WHERE token(a) > token(?) AND token(a) > token(?)", 1, 2);
assertInvalidMessage("More than one restriction was found for the end bound on a",
"SELECT * FROM %s WHERE token(a) <= token(?) AND token(a) < token(?)", 1, 2);
assertInvalidMessage("Columns \"a\" cannot be restricted by both an equality and an inequality relation",
"SELECT * FROM %s WHERE token(a) > token(?) AND token(a) = token(?)", 1, 2);
assertInvalidMessage("a cannot be restricted by more than one relation if it includes an Equal",
"SELECT * FROM %s WHERE token(a) = token(?) AND token(a) > token(?)", 1, 2);
}
@Test
public void testTokenFunctionWithPartitionKeyAndClusteringKeyArguments() throws Throwable
{
createTable("CREATE TABLE IF NOT EXISTS %s (a int, b text, PRIMARY KEY (a, b))");
assertInvalidMessage("The token() function must contains only partition key components",
"SELECT * FROM %s WHERE token(a, b) > token(0, 'c')");
}
@Test
public void testTokenFunctionWithMultiColumnPartitionKey() throws Throwable
{
createTable("CREATE TABLE IF NOT EXISTS %s (a int, b text, PRIMARY KEY ((a, b)))");
execute("INSERT INTO %s (a, b) VALUES (0, 'a')");
execute("INSERT INTO %s (a, b) VALUES (0, 'b')");
execute("INSERT INTO %s (a, b) VALUES (0, 'c')");
assertRows(execute("SELECT * FROM %s WHERE token(a, b) > token(?, ?)", 0, "a"),
row(0, "b"),
row(0, "c"));
assertRows(execute("SELECT * FROM %s WHERE token(a, b) > token(?, ?) and token(a, b) < token(?, ?)",
0, "a",
0, "d"),
row(0, "b"),
row(0, "c"));
assertInvalidMessage("The token() function must be applied to all partition key components or none of them",
"SELECT * FROM %s WHERE token(a) > token(?) and token(b) > token(?)", 0, "a");
assertInvalidMessage("The token() function must be applied to all partition key components or none of them",
"SELECT * FROM %s WHERE token(a) > token(?, ?) and token(a) < token(?, ?) and token(b) > token(?, ?) ",
0, "a", 0, "d", 0, "a");
assertInvalidMessage("The token function arguments must be in the partition key order: a, b",
"SELECT * FROM %s WHERE token(b, a) > token(0, 'c')");
assertInvalidMessage("The token() function must be applied to all partition key components or none of them",
"SELECT * FROM %s WHERE token(a, b) > token(?, ?) and token(b) < token(?, ?)", 0, "a", 0, "a");
assertInvalidMessage("The token() function must be applied to all partition key components or none of them",
"SELECT * FROM %s WHERE token(a) > token(?, ?) and token(b) > token(?, ?)", 0, "a", 0, "a");
}
@Test
public void testSingleColumnPartitionKeyWithTokenNonTokenRestrictionsMix() throws Throwable
{
createTable("CREATE TABLE %s (a int primary key, b int)");
execute("INSERT INTO %s (a, b) VALUES (0, 0);");
execute("INSERT INTO %s (a, b) VALUES (1, 1);");
execute("INSERT INTO %s (a, b) VALUES (2, 2);");
execute("INSERT INTO %s (a, b) VALUES (3, 3);");
execute("INSERT INTO %s (a, b) VALUES (4, 4);");
assertRows(execute("SELECT * FROM %s WHERE a IN (?, ?);", 1, 3),
row(1, 1),
row(3, 3));
assertRows(execute("SELECT * FROM %s WHERE token(a)> token(?) and token(a) <= token(?);", 1, 3),
row(2, 2),
row(3, 3));
assertRows(execute("SELECT * FROM %s WHERE token(a)= token(2);"),
row(2, 2));
assertRows(execute("SELECT * FROM %s WHERE token(a) > token(?) AND token(a) <= token(?) AND a IN (?, ?);",
1, 3, 1, 3),
row(3, 3));
assertRows(execute("SELECT * FROM %s WHERE token(a) < token(?) AND token(a) >= token(?) AND a IN (?, ?);",
1, 3, 1, 3),
row(3, 3));
assertInvalidMessage("Only EQ and IN relation are supported on the partition key (unless you use the token() function)",
"SELECT * FROM %s WHERE token(a) > token(?) AND token(a) <= token(?) AND a > ?;", 1, 3, 1);
assertRows(execute("SELECT * FROM %s WHERE token(a) > token(?) AND token(a) <= token(?) AND a IN ?;",
1, 3, Arrays.asList(1, 3)),
row(3, 3));
assertRows(execute("SELECT * FROM %s WHERE token(a) > token(?) AND a = ?;", 1, 3),
row(3, 3));
assertRows(execute("SELECT * FROM %s WHERE a = ? AND token(a) > token(?);", 3, 1),
row(3, 3));
assertEmpty(execute("SELECT * FROM %s WHERE token(a) > token(?) AND a = ?;", 3, 1));
assertEmpty(execute("SELECT * FROM %s WHERE a = ? AND token(a) > token(?);", 1, 3));
assertRows(execute("SELECT * FROM %s WHERE token(a) > token(?) AND a IN (?, ?);", 2, 1, 3),
row(3, 3));
assertRows(execute("SELECT * FROM %s WHERE token(a) > token(?) AND token(a) < token(?) AND a IN (?, ?) ;", 2, 5, 1, 3),
row(3, 3));
assertRows(execute("SELECT * FROM %s WHERE a IN (?, ?) AND token(a) > token(?) AND token(a) < token(?);", 1, 3, 2, 5),
row(3, 3));
assertRows(execute("SELECT * FROM %s WHERE token(a) > token(?) AND a IN (?, ?) AND token(a) < token(?);", 2, 1, 3, 5),
row(3, 3));
assertEmpty(execute("SELECT * FROM %s WHERE a IN (?, ?) AND token(a) > token(?);", 1, 3, 3));
assertRows(execute("SELECT * FROM %s WHERE token(a) <= token(?) AND a = ?;", 2, 2),
row(2, 2));
assertEmpty(execute("SELECT * FROM %s WHERE token(a) <= token(?) AND a = ?;", 2, 3));
assertEmpty(execute("SELECT * FROM %s WHERE token(a) = token(?) AND a = ?;", 2, 3));
assertRows(execute("SELECT * FROM %s WHERE token(a) >= token(?) AND token(a) <= token(?) AND a = ?;", 2, 2, 2),
row(2, 2));
assertEmpty(execute("SELECT * FROM %s WHERE token(a) >= token(?) AND token(a) < token(?) AND a = ?;", 2, 2, 2));
assertEmpty(execute("SELECT * FROM %s WHERE token(a) > token(?) AND token(a) <= token(?) AND a = ?;", 2, 2, 2));
assertEmpty(execute("SELECT * FROM %s WHERE token(a) > token(?) AND token(a) < token(?) AND a = ?;", 2, 2, 2));
}
@Test
public void testMultiColumnPartitionKeyWithTokenNonTokenRestrictionsMix() throws Throwable
{
createTable("CREATE TABLE %s (a int, b int, c int, primary key((a, b)))");
execute("INSERT INTO %s (a, b, c) VALUES (0, 0, 0);");
execute("INSERT INTO %s (a, b, c) VALUES (0, 1, 1);");
execute("INSERT INTO %s (a, b, c) VALUES (0, 2, 2);");
execute("INSERT INTO %s (a, b, c) VALUES (1, 0, 3);");
execute("INSERT INTO %s (a, b, c) VALUES (1, 1, 4);");
assertRows(execute("SELECT * FROM %s WHERE token(a, b) > token(?, ?);", 0, 0),
row(0, 1, 1),
row(0, 2, 2),
row(1, 0, 3),
row(1, 1, 4));
assertRows(execute("SELECT * FROM %s WHERE token(a, b) > token(?, ?) AND a = ? AND b IN (?, ?);",
0, 0, 1, 0, 1),
row(1, 0, 3),
row(1, 1, 4));
assertRows(execute("SELECT * FROM %s WHERE a = ? AND token(a, b) > token(?, ?) AND b IN (?, ?);",
1, 0, 0, 0, 1),
row(1, 0, 3),
row(1, 1, 4));
assertRows(execute("SELECT * FROM %s WHERE b IN (?, ?) AND token(a, b) > token(?, ?) AND a = ?;",
0, 1, 0, 0, 1),
row(1, 0, 3),
row(1, 1, 4));
assertEmpty(execute("SELECT * FROM %s WHERE b IN (?, ?) AND token(a, b) > token(?, ?) AND token(a, b) < token(?, ?) AND a = ?;",
0, 1, 0, 0, 0, 0, 1));
assertEmpty(execute("SELECT * FROM %s WHERE b IN (?, ?) AND token(a, b) > token(?, ?) AND token(a, b) <= token(?, ?) AND a = ?;",
0, 1, 0, 0, 0, 0, 1));
assertEmpty(execute("SELECT * FROM %s WHERE b IN (?, ?) AND token(a, b) >= token(?, ?) AND token(a, b) < token(?, ?) AND a = ?;",
0, 1, 0, 0, 0, 0, 1));
assertEmpty(execute("SELECT * FROM %s WHERE b IN (?, ?) AND token(a, b) = token(?, ?) AND a = ?;",
0, 1, 0, 0, 1));
assertInvalidMessage("Partition key parts: b must be restricted as other parts are",
"SELECT * FROM %s WHERE token(a, b) > token(?, ?) AND a = ?;", 0, 0, 1);
}
@Test
public void testMultiColumnPartitionKeyWithIndexAndTokenNonTokenRestrictionsMix() throws Throwable
{
createTable("CREATE TABLE %s (a int, b int, c int, primary key((a, b)))");
createIndex("CREATE INDEX ON %s(b)");
createIndex("CREATE INDEX ON %s(c)");
execute("INSERT INTO %s (a, b, c) VALUES (0, 0, 0);");
execute("INSERT INTO %s (a, b, c) VALUES (0, 1, 1);");
execute("INSERT INTO %s (a, b, c) VALUES (0, 2, 2);");
execute("INSERT INTO %s (a, b, c) VALUES (1, 0, 3);");
execute("INSERT INTO %s (a, b, c) VALUES (1, 1, 4);");
assertRows(execute("SELECT * FROM %s WHERE b = ?;", 1),
row(0, 1, 1),
row(1, 1, 4));
assertRows(execute("SELECT * FROM %s WHERE token(a, b) > token(?, ?) AND b = ?;", 0, 0, 1),
row(0, 1, 1),
row(1, 1, 4));
assertRows(execute("SELECT * FROM %s WHERE b = ? AND token(a, b) > token(?, ?);", 1, 0, 0),
row(0, 1, 1),
row(1, 1, 4));
assertRows(execute("SELECT * FROM %s WHERE b = ? AND token(a, b) > token(?, ?) and c = ? ALLOW FILTERING;", 1, 0, 0, 4),
row(1, 1, 4));
}
@Test
public void testTokenFunctionWithCompoundPartitionAndClusteringCols() throws Throwable
{
createTable("CREATE TABLE IF NOT EXISTS %s (a int, b int, c int, d int, PRIMARY KEY ((a, b), c, d))");
// just test that the queries don't error
execute("SELECT * FROM %s WHERE token(a, b) > token(0, 0) AND c > 10 ALLOW FILTERING;");
execute("SELECT * FROM %s WHERE c > 10 AND token(a, b) > token(0, 0) ALLOW FILTERING;");
execute("SELECT * FROM %s WHERE token(a, b) > token(0, 0) AND (c, d) > (0, 0) ALLOW FILTERING;");
execute("SELECT * FROM %s WHERE (c, d) > (0, 0) AND token(a, b) > token(0, 0) ALLOW FILTERING;");
}
/**
* Test undefined columns
* migrated from cql_tests.py:TestCQL.undefined_column_handling_test()
*/
@Test
public void testUndefinedColumns() throws Throwable
{
createTable("CREATE TABLE %s (k int PRIMARY KEY, v1 int, v2 int,)");
execute("INSERT INTO %s (k, v1, v2) VALUES (0, 0, 0)");
execute("INSERT INTO %s (k, v1) VALUES (1, 1)");
execute("INSERT INTO %s (k, v1, v2) VALUES (2, 2, 2)");
Object[][] rows = getRows(execute("SELECT v2 FROM %s"));
assertEquals(0, rows[0][0]);
assertEquals(null, rows[1][0]);
assertEquals(2, rows[2][0]);
rows = getRows(execute("SELECT v2 FROM %s WHERE k = 1"));
assertEquals(1, rows.length);
assertNull(rows[0][0]);
}
/**
* Check table with only a PK (#4361),
* migrated from cql_tests.py:TestCQL.only_pk_test()
*/
@Test
public void testPrimaryKeyOnly() throws Throwable
{
createTable("CREATE TABLE %s (k int, c int, PRIMARY KEY (k, c))");
for (int k = 0; k < 2; k++)
for (int c = 0; c < 2; c++)
execute("INSERT INTO %s (k, c) VALUES (?, ?)", k, c);
assertRows(execute("SELECT * FROM %s"),
row(0, 0),
row(0, 1),
row(1, 0),
row(1, 1));
// Check for dense tables too
createTable(" CREATE TABLE %s (k int, c int, PRIMARY KEY (k, c)) WITH COMPACT STORAGE");
for (int k = 0; k < 2; k++)
for (int c = 0; c < 2; c++)
execute("INSERT INTO %s (k, c) VALUES (?, ?)", k, c);
assertRows(execute("SELECT * FROM %s"),
row(0, 0),
row(0, 1),
row(1, 0),
row(1, 1));
}
/**
* Migrated from cql_tests.py:TestCQL.composite_index_with_pk_test()
*/
@Test
public void testCompositeIndexWithPK() throws Throwable
{
createTable("CREATE TABLE %s (blog_id int, time1 int, time2 int, author text, content text, PRIMARY KEY (blog_id, time1, time2))");
createIndex("CREATE INDEX ON %s(author)");
execute("INSERT INTO %s (blog_id, time1, time2, author, content) VALUES (?, ?, ?, ?, ?)", 1, 0, 0, "foo", "bar1");
execute("INSERT INTO %s (blog_id, time1, time2, author, content) VALUES (?, ?, ?, ?, ?)", 1, 0, 1, "foo", "bar2");
execute("INSERT INTO %s (blog_id, time1, time2, author, content) VALUES (?, ?, ?, ?, ?)", 2, 1, 0, "foo", "baz");
execute("INSERT INTO %s (blog_id, time1, time2, author, content) VALUES (?, ?, ?, ?, ?)", 3, 0, 1, "gux", "qux");
assertRows(execute("SELECT blog_id, content FROM %s WHERE author='foo'"),
row(1, "bar1"),
row(1, "bar2"),
row(2, "baz"));
assertRows(execute("SELECT blog_id, content FROM %s WHERE time1 > 0 AND author='foo' ALLOW FILTERING"),
row(2, "baz"));
assertRows(execute("SELECT blog_id, content FROM %s WHERE time1 = 1 AND author='foo' ALLOW FILTERING"),
row(2, "baz"));
assertRows(execute("SELECT blog_id, content FROM %s WHERE time1 = 1 AND time2 = 0 AND author='foo' ALLOW FILTERING"),
row(2, "baz"));
assertEmpty(execute("SELECT content FROM %s WHERE time1 = 1 AND time2 = 1 AND author='foo' ALLOW FILTERING"));
assertEmpty(execute("SELECT content FROM %s WHERE time1 = 1 AND time2 > 0 AND author='foo' ALLOW FILTERING"));
assertInvalid("SELECT content FROM %s WHERE time2 >= 0 AND author='foo'");
assertInvalid("SELECT blog_id, content FROM %s WHERE time1 > 0 AND author='foo'");
assertInvalid("SELECT blog_id, content FROM %s WHERE time1 = 1 AND author='foo'");
assertInvalid("SELECT blog_id, content FROM %s WHERE time1 = 1 AND time2 = 0 AND author='foo'");
assertInvalid("SELECT content FROM %s WHERE time1 = 1 AND time2 = 1 AND author='foo'");
assertInvalid("SELECT content FROM %s WHERE time1 = 1 AND time2 > 0 AND author='foo'");
}
/**
* Test for LIMIT bugs from 4579,
* migrated from cql_tests.py:TestCQL.limit_bugs_test()
*/
@Test
public void testLimitBug() throws Throwable
{
createTable("CREATE TABLE %s (a int, b int, c int, d int, e int, PRIMARY KEY (a, b))");
execute("INSERT INTO %s (a, b, c, d, e) VALUES (1, 1, 1, 1, 1);");
execute("INSERT INTO %s (a, b, c, d, e) VALUES (2, 2, 2, 2, 2);");
execute("INSERT INTO %s (a, b, c, d, e) VALUES (3, 3, 3, 3, 3);");
execute("INSERT INTO %s (a, b, c, d, e) VALUES (4, 4, 4, 4, 4);");
assertRows(execute("SELECT * FROM %s"),
row(1, 1, 1, 1, 1),
row(2, 2, 2, 2, 2),
row(3, 3, 3, 3, 3),
row(4, 4, 4, 4, 4));
assertRows(execute("SELECT * FROM %s LIMIT 1"),
row(1, 1, 1, 1, 1));
assertRows(execute("SELECT * FROM %s LIMIT 2"),
row(1, 1, 1, 1, 1),
row(2, 2, 2, 2, 2));
createTable("CREATE TABLE %s (a int primary key, b int, c int,)");
execute("INSERT INTO %s (a, b, c) VALUES (1, 1, 1)");
execute("INSERT INTO %s (a, b, c) VALUES (2, 2, 2)");
execute("INSERT INTO %s (a, b, c) VALUES (3, 3, 3)");
execute("INSERT INTO %s (a, b, c) VALUES (4, 4, 4)");
assertRows(execute("SELECT * FROM %s"),
row(1, 1, 1),
row(2, 2, 2),
row(3, 3, 3),
row(4, 4, 4));
assertRows(execute("SELECT * FROM %s LIMIT 1"),
row(1, 1, 1));
assertRows(execute("SELECT * FROM %s LIMIT 2"),
row(1, 1, 1),
row(2, 2, 2));
assertRows(execute("SELECT * FROM %s LIMIT 3"),
row(1, 1, 1),
row(2, 2, 2),
row(3, 3, 3));
assertRows(execute("SELECT * FROM %s LIMIT 4"),
row(1, 1, 1),
row(2, 2, 2),
row(3, 3, 3),
row(4, 4, 4));
assertRows(execute("SELECT * FROM %s LIMIT 5"),
row(1, 1, 1),
row(2, 2, 2),
row(3, 3, 3),
row(4, 4, 4));
}
/**
* Test for #4612 bug and more generally order by when multiple C* rows are queried
* migrated from cql_tests.py:TestCQL.order_by_multikey_test()
*/
@Test
public void testOrderByMultikey() throws Throwable
{
createTable("CREATE TABLE %s (my_id varchar, col1 int, col2 int, value varchar, PRIMARY KEY (my_id, col1, col2))");
execute("INSERT INTO %s (my_id, col1, col2, value) VALUES ( 'key1', 1, 1, 'a');");
execute("INSERT INTO %s (my_id, col1, col2, value) VALUES ( 'key2', 3, 3, 'a');");
execute("INSERT INTO %s (my_id, col1, col2, value) VALUES ( 'key3', 2, 2, 'b');");
execute("INSERT INTO %s (my_id, col1, col2, value) VALUES ( 'key4', 2, 1, 'b');");
assertRows(execute("SELECT col1 FROM %s WHERE my_id in('key1', 'key2', 'key3') ORDER BY col1"),
row(1), row(2), row(3));
assertRows(execute("SELECT col1, value, my_id, col2 FROM %s WHERE my_id in('key3', 'key4') ORDER BY col1, col2"),
row(2, "b", "key4", 1), row(2, "b", "key3", 2));
assertInvalid("SELECT col1 FROM %s ORDER BY col1");
assertInvalid("SELECT col1 FROM %s WHERE my_id > 'key1' ORDER BY col1");
}
/**
* Migrated from cql_tests.py:TestCQL.composite_index_collections_test()
*/
@Test
public void testIndexOnCompositeWithCollections() throws Throwable
{
createTable("CREATE TABLE %s (blog_id int, time1 int, time2 int, author text, content set<text>, PRIMARY KEY (blog_id, time1, time2))");
createIndex("CREATE INDEX ON %s (author)");
execute("INSERT INTO %s (blog_id, time1, time2, author, content) VALUES (?, ?, ?, ?, { 'bar1', 'bar2' })", 1, 0, 0, "foo");
execute("INSERT INTO %s (blog_id, time1, time2, author, content) VALUES (?, ?, ?, ?, { 'bar2', 'bar3' })", 1, 0, 1, "foo");
execute("INSERT INTO %s (blog_id, time1, time2, author, content) VALUES (?, ?, ?, ?, { 'baz' })", 2, 1, 0, "foo");
execute("INSERT INTO %s (blog_id, time1, time2, author, content) VALUES (?, ?, ?, ?, { 'qux' })", 3, 0, 1, "gux");
assertRows(execute("SELECT blog_id, content FROM %s WHERE author='foo'"),
row(1, set("bar1", "bar2")),
row(1, set("bar2", "bar3")),
row(2, set("baz")));
}
/**
* Migrated from cql_tests.py:TestCQL.truncate_clean_cache_test()
*/
@Test
public void testTruncateWithCaching() throws Throwable
{
createTable("CREATE TABLE %s (k int PRIMARY KEY, v1 int, v2 int,) WITH CACHING = ALL;");
for (int i = 0; i < 3; i++)
execute("INSERT INTO %s (k, v1, v2) VALUES (?, ?, ?)", i, i, i * 2);
assertRows(execute("SELECT v1, v2 FROM %s WHERE k IN (0, 1, 2)"),
row(0, 0),
row(1, 2),
row(2, 4));
execute("TRUNCATE %s");
assertEmpty(execute("SELECT v1, v2 FROM %s WHERE k IN (0, 1, 2)"));
}
/**
* Migrated from cql_tests.py:TestCQL.range_key_ordered_test()
*/
@Test
public void testRangeKey() throws Throwable
{
createTable("CREATE TABLE %s (k int PRIMARY KEY)");
execute("INSERT INTO %s (k) VALUES (-1)");
execute("INSERT INTO %s (k) VALUES ( 0)");
execute("INSERT INTO %s (k) VALUES ( 1)");
assertRows(execute("SELECT * FROM %s"),
row(0),
row(1),
row(-1));
assertInvalid("SELECT * FROM %s WHERE k >= -1 AND k < 1");
}
}
|
|
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.java;
import static com.google.common.base.Preconditions.checkArgument;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMap.Builder;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration.Fragment;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration.StrictDepsMode;
import com.google.devtools.build.lib.analysis.config.BuildOptions;
import com.google.devtools.build.lib.analysis.config.InvalidConfigurationException;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.cmdline.LabelSyntaxException;
import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.EventHandler;
import com.google.devtools.build.lib.skylarkinterface.SkylarkCallable;
import com.google.devtools.build.lib.skylarkinterface.SkylarkModule;
import com.google.devtools.build.lib.skylarkinterface.SkylarkModuleCategory;
import com.google.devtools.common.options.TriState;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
/** A java compiler configuration containing the flags required for compilation. */
@Immutable
@SkylarkModule(
name = "java",
doc = "A java compiler configuration.",
category = SkylarkModuleCategory.CONFIGURATION_FRAGMENT
)
public final class JavaConfiguration extends Fragment {
/** Values for the --java_classpath option */
public static enum JavaClasspathMode {
/** Use full transitive classpaths, the default behavior. */
OFF,
/** JavaBuilder computes the reduced classpath before invoking javac. */
JAVABUILDER,
/** Blaze computes the reduced classpath before invoking JavaBuilder. */
BLAZE
}
/** Values for the --experimental_one_version_enforcement option */
public enum OneVersionEnforcementLevel {
/** Don't attempt to check for one version violations (the default) */
OFF,
/**
* Check for one version violations, emit warnings to stderr if any are found, but don't break
* the binary.
*/
WARNING,
/**
* Check for one version violations, emit warnings to stderr if any are found, and break the
* rule if it's found.
*/
ERROR
}
/**
* Values for the --java_optimization_mode option, which controls how Proguard is run over binary
* and test targets. Note that for the moment this has no effect when building library targets.
*/
public static enum JavaOptimizationMode {
/** Proguard is used iff top-level target has {@code proguard_specs} attribute. */
LEGACY,
/**
* No link-time optimizations are applied, regardless of the top-level target's attributes. In
* practice this mode skips Proguard completely, rather than invoking Proguard as a no-op.
*/
NOOP("-dontshrink", "-dontoptimize", "-dontobfuscate"),
/**
* Symbols have different names except where configured not to rename. This mode is primarily
* intended to aid in identifying missing configuration directives that prevent symbols accessed
* reflectively etc. from being renamed or removed.
*/
RENAME("-dontshrink", "-dontoptimize"),
/**
* "Quickly" produce small binary typically without changing code structure. In practice this
* mode removes unreachable code and uses short symbol names except where configured not to
* rename or remove. This mode should build faster than {@link #OPTIMIZE_MINIFY} and may hence
* be preferable during development.
*/
FAST_MINIFY("-dontoptimize"),
/**
* Produce fully optimized binary with short symbol names and unreachable code removed. Unlike
* {@link #FAST_MINIFY}, this mode may apply code transformations, in addition to removing and
* renaming code as the configuration allows, to produce a more compact binary. This mode
* should be preferable for producing and testing release binaries.
*/
OPTIMIZE_MINIFY;
private final String proguardDirectives;
private JavaOptimizationMode(String... donts) {
StringBuilder proguardDirectives = new StringBuilder();
for (String dont : donts) {
checkArgument(dont.startsWith("-dont"), "invalid Proguard directive: %s", dont);
proguardDirectives.append(dont).append('\n');
}
this.proguardDirectives = proguardDirectives.toString();
}
/**
* Returns additional Proguard directives necessary for this mode (can be empty).
*/
public String getImplicitProguardDirectives() {
return proguardDirectives;
}
/**
* Returns true if all affected targets should produce mappings from original to renamed symbol
* names, regardless of the proguard_generate_mapping attribute. This should be the case for
* all modes that force symbols to be renamed. By contrast, the {@link #NOOP} mode will never
* produce a mapping file since no symbols are ever renamed.
*/
public boolean alwaysGenerateOutputMapping() {
switch (this) {
case LEGACY:
case NOOP:
return false;
case RENAME:
case FAST_MINIFY:
case OPTIMIZE_MINIFY:
return true;
default:
throw new AssertionError("Unexpected mode: " + this);
}
}
}
private final ImmutableList<String> commandLineJavacFlags;
private final Label javaLauncherLabel;
private final boolean useIjars;
private final boolean useHeaderCompilation;
private final boolean headerCompilationDirectClasspath;
private final boolean headerCompilationDisableJavacFallback;
private final boolean generateJavaDeps;
private final boolean strictDepsJavaProtos;
private final OneVersionEnforcementLevel enforceOneVersion;
private final JavaClasspathMode javaClasspath;
private final ImmutableList<String> defaultJvmFlags;
private final ImmutableList<String> checkedConstraints;
private final StrictDepsMode strictJavaDeps;
private final Label proguardBinary;
private final ImmutableList<Label> extraProguardSpecs;
private final TriState bundleTranslations;
private final ImmutableList<Label> translationTargets;
private final JavaOptimizationMode javaOptimizationMode;
private final ImmutableMap<String, Optional<Label>> bytecodeOptimizers;
private final Label javaToolchain;
private final boolean explicitJavaTestDeps;
private final boolean experimentalTestRunner;
// TODO(dmarting): remove once we have a proper solution for #2539
private final boolean legacyBazelJavaTest;
JavaConfiguration(
boolean generateJavaDeps,
List<String> defaultJvmFlags,
JavaOptions javaOptions,
Label javaToolchain)
throws InvalidConfigurationException {
this.commandLineJavacFlags =
ImmutableList.copyOf(JavaHelper.tokenizeJavaOptions(javaOptions.javacOpts));
this.javaLauncherLabel = javaOptions.javaLauncher;
this.useIjars = javaOptions.useIjars;
this.useHeaderCompilation = javaOptions.headerCompilation;
this.headerCompilationDirectClasspath = javaOptions.headerCompilationDirectClasspath;
this.headerCompilationDisableJavacFallback = javaOptions.headerCompilationDisableJavacFallback;
this.generateJavaDeps = generateJavaDeps;
this.javaClasspath = javaOptions.javaClasspath;
this.defaultJvmFlags = ImmutableList.copyOf(defaultJvmFlags);
this.checkedConstraints = ImmutableList.copyOf(javaOptions.checkedConstraints);
this.strictJavaDeps = javaOptions.strictJavaDeps;
this.proguardBinary = javaOptions.proguard;
this.extraProguardSpecs = ImmutableList.copyOf(javaOptions.extraProguardSpecs);
this.bundleTranslations = javaOptions.bundleTranslations;
this.javaToolchain = javaToolchain;
this.javaOptimizationMode = javaOptions.javaOptimizationMode;
this.legacyBazelJavaTest = javaOptions.legacyBazelJavaTest;
this.strictDepsJavaProtos = javaOptions.strictDepsJavaProtos;
this.enforceOneVersion = javaOptions.enforceOneVersion;
this.explicitJavaTestDeps = javaOptions.explicitJavaTestDeps;
this.experimentalTestRunner = javaOptions.experimentalTestRunner;
ImmutableList.Builder<Label> translationsBuilder = ImmutableList.builder();
for (String s : javaOptions.translationTargets) {
try {
Label label = Label.parseAbsolute(s);
translationsBuilder.add(label);
} catch (LabelSyntaxException e) {
throw new InvalidConfigurationException("Invalid translations target '" + s + "', make " +
"sure it uses correct absolute path syntax.", e);
}
}
this.translationTargets = translationsBuilder.build();
ImmutableMap.Builder<String, Optional<Label>> optimizersBuilder = ImmutableMap.builder();
for (Map.Entry<String, Label> optimizer : javaOptions.bytecodeOptimizers.entrySet()) {
String mnemonic = optimizer.getKey();
if (optimizer.getValue() == null && !"Proguard".equals(mnemonic)) {
throw new InvalidConfigurationException("Must supply label for optimizer " + mnemonic);
}
optimizersBuilder.put(mnemonic, Optional.fromNullable(optimizer.getValue()));
}
this.bytecodeOptimizers = optimizersBuilder.build();
}
@SkylarkCallable(name = "default_javac_flags", structField = true,
doc = "The default flags for the Java compiler.")
// TODO(bazel-team): this is the command-line passed options, we should remove from skylark
// probably.
public ImmutableList<String> getDefaultJavacFlags() {
return commandLineJavacFlags;
}
@Override
public void reportInvalidOptions(EventHandler reporter, BuildOptions buildOptions) {
if ((bundleTranslations == TriState.YES) && translationTargets.isEmpty()) {
reporter.handle(Event.error("Translations enabled, but no message translations specified. " +
"Use '--message_translations' to select the message translations to use"));
}
}
@Override
public void addGlobalMakeVariables(Builder<String, String> globalMakeEnvBuilder) {
globalMakeEnvBuilder.put("JAVA_TRANSLATIONS", buildTranslations() ? "1" : "0");
}
@Override
public boolean compatibleWithStrategy(String strategyName) {
if (strategyName.equals("experimental_worker")) {
return explicitJavaTestDeps() && useExperimentalTestRunner();
}
return true;
}
/**
* Returns true iff Java compilation should use ijars.
*/
public boolean getUseIjars() {
return useIjars;
}
/** Returns true iff Java header compilation is enabled. */
public boolean useHeaderCompilation() {
return useHeaderCompilation;
}
/** Returns true if header compilations should use direct dependencies only. */
public boolean headerCompilationDirectClasspath() {
return headerCompilationDirectClasspath;
}
/**
* If --java_header_compilation is set, report diagnostics from turbine instead of falling back to
* javac. Diagnostics will be produced more quickly, but may be less helpful.
*/
public boolean headerCompilationDisableJavacFallback() {
return headerCompilationDisableJavacFallback;
}
/**
* Returns true iff dependency information is generated after compilation.
*/
public boolean getGenerateJavaDeps() {
return generateJavaDeps;
}
public JavaClasspathMode getReduceJavaClasspath() {
return javaClasspath;
}
public ImmutableList<String> getDefaultJvmFlags() {
return defaultJvmFlags;
}
public ImmutableList<String> getCheckedConstraints() {
return checkedConstraints;
}
public StrictDepsMode getStrictJavaDeps() {
return strictJavaDeps;
}
public StrictDepsMode getFilteredStrictJavaDeps() {
StrictDepsMode strict = getStrictJavaDeps();
switch (strict) {
case STRICT:
case DEFAULT:
return StrictDepsMode.ERROR;
default: // OFF, WARN, ERROR
return strict;
}
}
/**
* @return proper label only if --java_launcher= is specified, otherwise null.
*/
public Label getJavaLauncherLabel() {
return javaLauncherLabel;
}
/**
* Returns the label provided with --proguard_top, if any.
*/
@Nullable
public Label getProguardBinary() {
return proguardBinary;
}
/**
* Returns all labels provided with --extra_proguard_specs.
*/
public ImmutableList<Label> getExtraProguardSpecs() {
return extraProguardSpecs;
}
/**
* Returns the raw translation targets.
*/
public ImmutableList<Label> getTranslationTargets() {
return translationTargets;
}
/**
* Returns true if the we should build translations.
*/
public boolean buildTranslations() {
return (bundleTranslations != TriState.NO) && !translationTargets.isEmpty();
}
/**
* Returns whether translations were explicitly disabled.
*/
public boolean isTranslationsDisabled() {
return bundleTranslations == TriState.NO;
}
/**
* Returns the label of the default java_toolchain rule
*/
public Label getToolchainLabel() {
return javaToolchain;
}
/**
* Returns the --java_optimization_mode flag setting. Note that running with a different mode over
* the same binary or test target typically invalidates the cached output Jar for that target,
* but since Proguard doesn't run on libraries, the outputs for library targets remain valid.
*/
public JavaOptimizationMode getJavaOptimizationMode() {
return javaOptimizationMode;
}
/**
* Returns ordered list of optimizers to run.
*/
public ImmutableMap<String, Optional<Label>> getBytecodeOptimizers() {
return bytecodeOptimizers;
}
/**
* Returns true if java_test in Bazel should behave in legacy mode that existed before we
* open-sourced our test runner.
*/
public boolean useLegacyBazelJavaTest() {
return legacyBazelJavaTest;
}
/**
* Returns true if we should be the ExperimentalTestRunner instead of the BazelTestRunner for
* bazel's java_test runs.
*/
public boolean useExperimentalTestRunner() {
return experimentalTestRunner;
}
/**
* Make it mandatory for java_test targets to explicitly declare any JUnit or Hamcrest
* dependencies instead of accidentally obtaining them from the TestRunner's dependencies.
*/
public boolean explicitJavaTestDeps() {
return explicitJavaTestDeps;
}
/**
* Returns an enum representing whether or not Bazel should attempt to enforce one-version
* correctness on java_binary rules using the 'oneversion' tool in the java_toolchain.
*
* One-version correctness will inspect for multiple non-identical versions of java classes in the
* transitive dependencies for a java_binary.
*/
public OneVersionEnforcementLevel oneVersionEnforcementLevel() {
return enforceOneVersion;
}
public boolean strictDepsJavaProtos() {
return strictDepsJavaProtos;
}
}
|
|
package som.vmobjects;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.RandomAccessFile;
import com.oracle.truffle.api.CompilerDirectives;
import com.oracle.truffle.api.CompilerDirectives.CompilationFinal;
import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary;
import com.oracle.truffle.api.profiles.BranchProfile;
import som.interpreter.nodes.ExceptionSignalingNode;
import som.interpreter.nodes.dispatch.BlockDispatchNode;
import som.vm.NotYetImplementedException;
import som.vm.Symbols;
import som.vm.constants.Classes;
import som.vmobjects.SArray.SMutableArray;
public class SFileDescriptor extends SObjectWithClass {
@CompilationFinal public static SClass fileDescriptorClass;
private static final SSymbol FILE_NOT_FOUND = Symbols.symbolFor("FileNotFound");
private static final SSymbol FILE_IS_CLOSED = Symbols.symbolFor("FileIsClosed");
private static final SSymbol READ_ONLY_MODE = Symbols.symbolFor("ReadOnlyMode");
private static final SSymbol WRITE_ONLY_MODE = Symbols.symbolFor("WriteOnlyMode");
public static final int BUFFER_SIZE = 32 * 1024;
private SArray buffer;
private int bufferSize = BUFFER_SIZE;
private AccessModes accessMode;
private RandomAccessFile raf;
private final File f;
public static void setSOMClass(final SClass cls) {
fileDescriptorClass = cls;
}
@TruffleBoundary
public SFileDescriptor(final String uri) {
super(fileDescriptorClass, fileDescriptorClass.getInstanceFactory());
f = new File(uri);
}
public Object openFile(final SBlock fail, final BlockDispatchNode dispatchHandler) {
long[] storage = new long[bufferSize];
buffer = new SMutableArray(storage, Classes.arrayClass);
try {
raf = open();
} catch (FileNotFoundException e) {
return dispatchHandler.executeDispatch(new Object[] {fail, FILE_NOT_FOUND});
}
return this;
}
@TruffleBoundary
private RandomAccessFile open() throws FileNotFoundException {
return new RandomAccessFile(f, accessMode.mode);
}
public void closeFile(final ExceptionSignalingNode ioException) {
if (raf == null) {
return;
}
try {
closeFile();
} catch (IOException e) {
ioException.signal(e.getMessage());
}
}
@TruffleBoundary
private void closeFile() throws IOException {
raf.close();
raf = null;
}
public int read(final long position, final SBlock fail,
final BlockDispatchNode dispatchHandler, final BranchProfile errorCases) {
if (raf == null) {
errorCases.enter();
fail.getMethod().invoke(new Object[] {fail, FILE_IS_CLOSED});
return 0;
}
if (accessMode == AccessModes.write) {
errorCases.enter();
fail.getMethod().invoke(new Object[] {fail, WRITE_ONLY_MODE});
return 0;
}
long[] storage = buffer.getLongStorage();
byte[] buff = new byte[bufferSize];
int bytes = 0;
try {
assert raf != null;
// set position in file
bytes = read(position, buff);
} catch (IOException e) {
errorCases.enter();
dispatchHandler.executeDispatch(new Object[] {fail, toString(e)});
}
// move read data to the storage
for (int i = 0; i < bufferSize; i++) {
storage[i] = buff[i];
}
return bytes;
}
@TruffleBoundary
private int read(final long position, final byte[] buff) throws IOException {
int bytes;
raf.seek(position);
bytes = raf.read(buff);
return bytes;
}
@TruffleBoundary
private String toString(final IOException e) {
return e.toString();
}
public void write(final int nBytes, final long position, final SBlock fail,
final BlockDispatchNode dispatchHandler, final ExceptionSignalingNode ioException,
final BranchProfile errorCases) {
if (raf == null) {
errorCases.enter();
dispatchHandler.executeDispatch(new Object[] {fail, FILE_IS_CLOSED});
return;
}
if (accessMode == AccessModes.read) {
errorCases.enter();
fail.getMethod().invoke(new Object[] {fail, READ_ONLY_MODE});
return;
}
long[] storage = buffer.getLongStorage();
byte[] buff = new byte[bufferSize];
for (int i = 0; i < bufferSize; i++) {
long val = storage[i];
if (val <= Byte.MIN_VALUE && Byte.MAX_VALUE <= val) {
errorCases.enter();
ioException.signal(errorMsg(val));
}
buff[i] = (byte) val;
}
try {
write(nBytes, position, buff);
} catch (IOException e) {
errorCases.enter();
dispatchHandler.executeDispatch(new Object[] {fail, toString(e)});
}
}
@TruffleBoundary
private static String errorMsg(final long val) {
return "Buffer only supports values in the range -128 to 127 (" + val + ")";
}
@TruffleBoundary
private void write(final int nBytes, final long position, final byte[] buff)
throws IOException {
raf.seek(position);
raf.write(buff, 0, nBytes);
}
public long getFileSize(final ExceptionSignalingNode ioException) {
try {
return length();
} catch (IOException e) {
ioException.signal(e.getMessage());
}
return 0;
}
@TruffleBoundary
private long length() throws IOException {
return raf.length();
}
public boolean isClosed() {
return raf == null;
}
@Override
public boolean isValue() {
return false;
}
public SArray getBuffer() {
return buffer;
}
public int getBufferSize() {
return bufferSize;
}
public static String getValidAccessModes() {
return AccessModes.VALID_MODES;
}
public void setBufferSize(final int bufferSize) {
// buffer size only changeable for closed files.
if (raf == null) {
this.bufferSize = bufferSize;
} else {
CompilerDirectives.transferToInterpreter();
throw new NotYetImplementedException();
}
}
@TruffleBoundary
public void setMode(final SSymbol mode) {
this.accessMode = AccessModes.valueOf(mode.getString());
}
private enum AccessModes {
read("r"), write("rw"), readWrite("rw");
final String mode;
AccessModes(final String mode) {
this.mode = mode;
}
static final String VALID_MODES = renderValid();
private static String renderValid() {
String result = "Valid access modes are ";
boolean first = true;
for (AccessModes m : values()) {
if (first) {
first = false;
} else {
result += ", ";
}
result += "#" + m.name();
}
return result;
}
}
}
|
|
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInspection.ex;
import com.intellij.codeHighlighting.HighlightDisplayLevel;
import com.intellij.codeInspection.CleanupLocalInspectionTool;
import com.intellij.codeInspection.GlobalInspectionContext;
import com.intellij.codeInspection.InspectionEP;
import com.intellij.codeInspection.InspectionProfileEntry;
import com.intellij.lang.Language;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.util.ResourceUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.net.URL;
/**
* @author Dmitry Avdeev
* Date: 9/28/11
*/
public abstract class InspectionToolWrapper<T extends InspectionProfileEntry, E extends InspectionEP> {
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInspection.ex.InspectionToolWrapper");
protected T myTool;
protected final E myEP;
protected InspectionToolWrapper(@NotNull E ep) {
this(null, ep);
}
protected InspectionToolWrapper(@NotNull T tool) {
this(tool, null);
}
protected InspectionToolWrapper(@Nullable T tool, @Nullable E ep) {
assert tool != null || ep != null : "must not be both null";
myEP = ep;
myTool = tool;
}
/** Copy ctor */
protected InspectionToolWrapper(@NotNull InspectionToolWrapper<T, E> other) {
myEP = other.myEP;
// we need to create a copy for buffering
if (other.myTool != null) {
//noinspection unchecked
myTool = myEP != null ? (T)myEP.instantiateTool() : (T)InspectionToolsRegistrarCore.instantiateTool(other.myTool.getClass());
}
else {
myTool = null;
}
}
public void initialize(@NotNull GlobalInspectionContext context) {
projectOpened(context.getProject());
}
@NotNull
public abstract InspectionToolWrapper<T, E> createCopy();
@NotNull
public T getTool() {
T tool = myTool;
if (tool == null) {
//noinspection unchecked
myTool = tool = (T)myEP.instantiateTool();
if (!tool.getShortName().equals(myEP.getShortName())) {
LOG.error("Short name not matched for " + tool.getClass() + ": getShortName() = " + tool.getShortName() + "; ep.shortName = " + myEP.getShortName());
}
}
return tool;
}
public boolean isInitialized() {
return myTool != null;
}
/**
* @see #applyToDialects()
* @see #isApplicable(Language)
*/
@Nullable
public String getLanguage() {
return myEP == null ? null : myEP.language;
}
public boolean applyToDialects() {
return myEP != null && myEP.applyToDialects;
}
public boolean isApplicable(@NotNull Language language) {
String langId = getLanguage();
return langId == null || language.getID().equals(langId) || applyToDialects() && language.isKindOf(langId);
}
public boolean isCleanupTool() {
return myEP != null ? myEP.cleanupTool : getTool() instanceof CleanupLocalInspectionTool;
}
@NotNull
public String getShortName() {
return myEP != null ? myEP.getShortName() : getTool().getShortName();
}
public String getID() {
return getShortName();
}
@NotNull
public String getDisplayName() {
if (myEP == null) {
return getTool().getDisplayName();
}
else {
String name = myEP.getDisplayName();
return name == null ? getTool().getDisplayName() : name;
}
}
@NotNull
public String getGroupDisplayName() {
if (myEP == null) {
return getTool().getGroupDisplayName();
}
else {
String groupDisplayName = myEP.getGroupDisplayName();
return groupDisplayName == null ? getTool().getGroupDisplayName() : groupDisplayName;
}
}
public boolean isEnabledByDefault() {
return myEP == null ? getTool().isEnabledByDefault() : myEP.enabledByDefault;
}
@NotNull
public HighlightDisplayLevel getDefaultLevel() {
return myEP == null ? getTool().getDefaultLevel() : myEP.getDefaultLevel();
}
@NotNull
public String[] getGroupPath() {
if (myEP == null) {
return getTool().getGroupPath();
}
else {
String[] path = myEP.getGroupPath();
return path == null ? getTool().getGroupPath() : path;
}
}
public void projectOpened(@NotNull Project project) {
if (myEP == null) {
getTool().projectOpened(project);
}
}
public void projectClosed(@NotNull Project project) {
if (myEP == null) {
getTool().projectClosed(project);
}
}
public String getStaticDescription() {
return myEP == null || myEP.hasStaticDescription ? getTool().getStaticDescription() : null;
}
public String loadDescription() {
final String description = getStaticDescription();
if (description != null) return description;
try {
URL descriptionUrl = getDescriptionUrl();
if (descriptionUrl == null) return null;
return ResourceUtil.loadText(descriptionUrl);
}
catch (IOException ignored) { }
return getTool().loadDescription();
}
protected URL getDescriptionUrl() {
Application app = ApplicationManager.getApplication();
if (myEP == null || app.isUnitTestMode() || app.isHeadlessEnvironment()) {
return superGetDescriptionUrl();
}
String fileName = getDescriptionFileName();
return myEP.getLoaderForClass().getResource("/inspectionDescriptions/" + fileName);
}
@Nullable
protected URL superGetDescriptionUrl() {
final String fileName = getDescriptionFileName();
return ResourceUtil.getResource(getDescriptionContextClass(), "/inspectionDescriptions", fileName);
}
@NotNull
public String getDescriptionFileName() {
return getShortName() + ".html";
}
@NotNull
public final String getFolderName() {
return getShortName();
}
@NotNull
public Class<? extends InspectionProfileEntry> getDescriptionContextClass() {
return getTool().getClass();
}
public String getMainToolId() {
return getTool().getMainToolId();
}
public E getExtension() {
return myEP;
}
@Override
public String toString() {
return getShortName();
}
public void cleanup(@NotNull Project project) {
T tool = myTool;
if (tool != null) {
tool.cleanup(project);
}
}
@NotNull
public abstract JobDescriptor[] getJobDescriptors(@NotNull GlobalInspectionContext context);
}
|
|
package be.jvb.ipv6;
import org.junit.Test;
import java.net.Inet6Address;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Random;
import static be.jvb.ipv6.IPv6Address.fromInetAddress;
import static be.jvb.ipv6.IPv6Address.fromString;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* @author Jan Van Besien
*/
public class IPv6AddressTest
{
@Test
public void parseFromAllZeroes()
{
assertEquals("::", fromString("0000:0000:0000:0000:0000:0000:0000:0000").toString());
}
@Test
public void parseFromAllZeroesShortNotation()
{
assertEquals("::", fromString("::").toString());
}
@Test
public void parseSomeRealAddresses()
{
assertEquals("::1", fromString("0000:0000:0000:0000:0000:0000:0000:0001").toString());
assertEquals("::1:0", fromString("0000:0000:0000:0000:0000:0000:0001:0000").toString());
assertEquals("1::1:0:0:0", fromString("0001:0000:0000:0000:0001:0000:0000:0000").toString());
assertEquals("::ffff", fromString("0000:0000:0000:0000:0000:0000:0000:ffff").toString());
assertEquals("ffff::", fromString("ffff:0000:0000:0000:0000:0000:0000:0000").toString());
assertEquals("2001:db8:85a3::8a2e:370:7334", fromString("2001:0db8:85a3:0000:0000:8a2e:0370:7334").toString());
}
@Test
public void parseSomeRealAddressesShortNotation()
{
assertEquals("::1", fromString("::1").toString());
assertEquals("::1:0", fromString("::1:0").toString());
assertEquals("1::1:0:0:0", fromString("1::1:0:0:0").toString());
assertEquals("::ffff", fromString("::ffff").toString());
assertEquals("ffff::", fromString("ffff::").toString());
assertEquals("2001:db8:85a3::8a2e:370:7334", fromString("2001:db8:85a3::8a2e:370:7334").toString());
}
@Test(expected = IllegalArgumentException.class)
public void parseInvalid_1()
{
fromString(":");
}
@Test(expected = IllegalArgumentException.class)
public void parseInvalid_2()
{
fromString(":a");
}
@Test(expected = IllegalArgumentException.class)
public void parseInvalidTooShort_1()
{
fromString("a:");
}
@Test(expected = IllegalArgumentException.class)
public void parseInvalidTooShort_2()
{
fromString("a:a:");
}
@Test(expected = IllegalArgumentException.class)
public void parseInvalidTooLong()
{
fromString("a:a:a:a:a:a:a:a:a:a:a:a");
}
@Test
public void constructFromInet6Address() throws UnknownHostException
{
final InetAddress inetAddress = Inet6Address.getByName("2001:0db8:85a3:0000:0000:8a2e:0370:7334");
assertEquals("2001:db8:85a3::8a2e:370:7334", fromInetAddress(inetAddress).toString());
}
@Test
public void convertToInet6Address() throws UnknownHostException
{
final InetAddress inetAddress = Inet6Address.getByName("2001:0db8:85a3:0000:0000:8a2e:0370:7334");
assertEquals(inetAddress, fromString("2001:0db8:85a3:0000:0000:8a2e:0370:7334").toInetAddress());
}
@Test
public void toStringCanBeUsedInFromStringAndViceVersa()
{
final int nTests = 10000;
final Random rg = new Random();
for (int i = 0; i < nTests; i++)
{
final IPv6Address address = new IPv6Address(rg.nextLong(), rg.nextLong());
assertEquals(address, fromString(address.toString()));
}
}
@Test
public void addition()
{
assertEquals(fromString("::2"), fromString("::1").add(1));
assertEquals(fromString("::1:0:0:0"), fromString("::ffff:ffff:ffff").add(1));
assertEquals(fromString("::1:0:0:0:0"), fromString("::ffff:ffff:ffff:ffff").add(1));
assertEquals(fromString("::1:0:0:0:1"), fromString("::ffff:ffff:ffff:ffff").add(2));
assertEquals(fromString("::").add(Integer.MAX_VALUE).add(Long.MAX_VALUE), fromString("::").add(Long.MAX_VALUE).add(
Integer.MAX_VALUE));
}
@Test
public void additionOverflow()
{
assertEquals(fromString("::"), fromString("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff").add(1));
}
@Test
public void subtraction()
{
assertEquals(fromString("::1"), fromString("::2").subtract(1));
assertEquals(fromString("::ffff:ffff:ffff:ffff"), fromString("::0001:0:0:0:0").subtract(1));
assertEquals(fromString("::ffff:ffff:ffff:fffe"), fromString("::0001:0:0:0:0").subtract(2));
assertEquals(fromString("::").subtract(Integer.MAX_VALUE).subtract(Long.MAX_VALUE), fromString("::").subtract(
Long.MAX_VALUE).subtract(
Integer.MAX_VALUE));
}
@Test
public void subtractionVersusAdditionWithRandomAddresses()
{
final Random random = new Random();
final long randomLong = random.nextLong();
final IPv6Address randomAddress = new IPv6Address(random.nextLong(), random.nextLong());
assertEquals(randomAddress, randomAddress.add(randomLong).subtract(randomLong));
}
@Test
public void subtractionVersusAdditionCornerCases()
{
final Random random = new Random();
final IPv6Address randomAddress = new IPv6Address(random.nextLong(), random.nextLong());
assertEquals(randomAddress, randomAddress.add(Integer.MAX_VALUE).subtract(Integer.MAX_VALUE));
assertEquals(randomAddress, randomAddress.add(Integer.MIN_VALUE).subtract(Integer.MIN_VALUE));
assertEquals(randomAddress, randomAddress.add(Long.MAX_VALUE).subtract(Long.MAX_VALUE));
assertEquals(randomAddress, randomAddress.add(Long.MIN_VALUE).subtract(Long.MIN_VALUE));
}
@Test
public void subtractionUnderflow()
{
assertEquals(fromString("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff"), fromString("::").subtract(1));
}
@Test
public void compare()
{
assertTrue(0 == fromString("::").compareTo(fromString("::")));
assertTrue(0 > fromString("::").compareTo(fromString("::1")));
assertTrue(0 < fromString("::1").compareTo(fromString("::")));
assertTrue(0 > fromString("::").compareTo(fromString("::ffff:ffff:ffff:ffff")));
assertTrue(0 > fromString("::efff:ffff:ffff:ffff").compareTo(fromString("::ffff:ffff:ffff:ffff")));
assertTrue(0 > fromString("efff:ffff:ffff:ffff:0:1:2:3").compareTo(fromString("ffff:ffff:ffff:ffff:4:5:6:7")));
}
@Test
public void maskWithPrefixLength()
{
assertEquals(fromString("2001:0db8:85a3:0000:0000:8a2e:0370:7334"),
fromString("2001:0db8:85a3:0000:0000:8a2e:0370:7334").maskWithPrefixLength(128));
assertEquals(fromString("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ff00"),
fromString("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff").maskWithPrefixLength(120));
assertEquals(fromString("2001:0db8:85a3:0000:0000:8a2e:0370:7300"),
fromString("2001:0db8:85a3:0000:0000:8a2e:0370:7334").maskWithPrefixLength(120));
assertEquals(fromString("2001:0db8:85a3::"), fromString("2001:0db8:85a3:0000:0000:8a2e:0370:7334").maskWithPrefixLength(64));
assertEquals(fromString("2000::"), fromString("2001:0db8:85a3:0000:0000:8a2e:0370:7334").maskWithPrefixLength(15));
assertEquals(fromString("8000::"),
fromString("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff").maskWithPrefixLength(1));
}
@Test
public void maximumAddressWithPrefixLength()
{
assertEquals(fromString("2001:0db8:85a3:0000:0000:8a2e:0370:7334"),
fromString("2001:0db8:85a3:0000:0000:8a2e:0370:7334").maximumAddressWithPrefixLength(128));
assertEquals(fromString("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff"),
fromString("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ff00").maximumAddressWithPrefixLength(120));
assertEquals(fromString("2001:0db8:85a3:0000:0000:8a2e:0370:73ff"),
fromString("2001:0db8:85a3:0000:0000:8a2e:0370:7300").maximumAddressWithPrefixLength(120));
assertEquals(fromString("2001:0db8:85a3:0000:ffff:ffff:ffff:ffff"),
fromString("2001:0db8:85a3:0000:0000:8a2e:0370:7334").maximumAddressWithPrefixLength(64));
assertEquals(fromString("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff"),
fromString("8000::").maximumAddressWithPrefixLength(1));
assertEquals(fromString("7fff:ffff:ffff:ffff:ffff:ffff:ffff:ffff"),
fromString("7fff::").maximumAddressWithPrefixLength(1));
}
@Test
public void numberOfTrailingOnes()
{
assertEquals(0, fromString("::").numberOfTrailingOnes());
assertEquals(1, fromString("::1").numberOfTrailingOnes());
assertEquals(4, fromString("::f").numberOfTrailingOnes());
final IPv6Address addressWithLowBitsEqualToLongMaxValue = fromString("::7fff:ffff:ffff:ffff");
assertEquals(Long.MAX_VALUE, addressWithLowBitsEqualToLongMaxValue.getLowBits());
assertEquals(63, addressWithLowBitsEqualToLongMaxValue.numberOfTrailingOnes());
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.lucene.search;
import com.carrotsearch.hppc.ObjectHashSet;
import org.apache.lucene.index.*;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.MultiPhraseQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.StringHelper;
import org.apache.lucene.util.ToStringUtils;
import java.io.IOException;
import java.util.*;
public class MultiPhrasePrefixQuery extends Query {
private String field;
private ArrayList<Term[]> termArrays = new ArrayList<>();
private ArrayList<Integer> positions = new ArrayList<>();
private int maxExpansions = Integer.MAX_VALUE;
private int slop = 0;
/**
* Sets the phrase slop for this query.
*
* @see org.apache.lucene.search.PhraseQuery#setSlop(int)
*/
public void setSlop(int s) {
slop = s;
}
public void setMaxExpansions(int maxExpansions) {
this.maxExpansions = maxExpansions;
}
/**
* Sets the phrase slop for this query.
*
* @see org.apache.lucene.search.PhraseQuery#getSlop()
*/
public int getSlop() {
return slop;
}
/**
* Add a single term at the next position in the phrase.
*
* @see org.apache.lucene.search.PhraseQuery#add(Term)
*/
public void add(Term term) {
add(new Term[]{term});
}
/**
* Add multiple terms at the next position in the phrase. Any of the terms
* may match.
*
* @see org.apache.lucene.search.PhraseQuery#add(Term)
*/
public void add(Term[] terms) {
int position = 0;
if (positions.size() > 0)
position = positions.get(positions.size() - 1) + 1;
add(terms, position);
}
/**
* Allows to specify the relative position of terms within the phrase.
*
* @param terms the terms
* @param position the position of the terms provided as argument
* @see org.apache.lucene.search.PhraseQuery#add(Term, int)
*/
public void add(Term[] terms, int position) {
if (termArrays.size() == 0)
field = terms[0].field();
for (int i = 0; i < terms.length; i++) {
if (terms[i].field() != field) {
throw new IllegalArgumentException(
"All phrase terms must be in the same field (" + field + "): "
+ terms[i]);
}
}
termArrays.add(terms);
positions.add(position);
}
/**
* Returns the relative positions of terms in this phrase.
*/
public int[] getPositions() {
int[] result = new int[positions.size()];
for (int i = 0; i < positions.size(); i++)
result[i] = positions.get(i);
return result;
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
if (getBoost() != 1.0F) {
return super.rewrite(reader);
}
if (termArrays.isEmpty()) {
return new MatchNoDocsQuery();
}
MultiPhraseQuery query = new MultiPhraseQuery();
query.setSlop(slop);
int sizeMinus1 = termArrays.size() - 1;
for (int i = 0; i < sizeMinus1; i++) {
query.add(termArrays.get(i), positions.get(i));
}
Term[] suffixTerms = termArrays.get(sizeMinus1);
int position = positions.get(sizeMinus1);
ObjectHashSet<Term> terms = new ObjectHashSet<>();
for (Term term : suffixTerms) {
getPrefixTerms(terms, term, reader);
if (terms.size() > maxExpansions) {
break;
}
}
if (terms.isEmpty()) {
return Queries.newMatchNoDocsQuery();
}
query.add(terms.toArray(Term.class), position);
query.setBoost(getBoost());
return query.rewrite(reader);
}
private void getPrefixTerms(ObjectHashSet<Term> terms, final Term prefix, final IndexReader reader) throws IOException {
// SlowCompositeReaderWrapper could be used... but this would merge all terms from each segment into one terms
// instance, which is very expensive. Therefore I think it is better to iterate over each leaf individually.
List<LeafReaderContext> leaves = reader.leaves();
for (LeafReaderContext leaf : leaves) {
Terms _terms = leaf.reader().terms(field);
if (_terms == null) {
continue;
}
TermsEnum termsEnum = _terms.iterator();
TermsEnum.SeekStatus seekStatus = termsEnum.seekCeil(prefix.bytes());
if (TermsEnum.SeekStatus.END == seekStatus) {
continue;
}
for (BytesRef term = termsEnum.term(); term != null; term = termsEnum.next()) {
if (!StringHelper.startsWith(term, prefix.bytes())) {
break;
}
terms.add(new Term(field, BytesRef.deepCopyOf(term)));
if (terms.size() >= maxExpansions) {
return;
}
}
}
}
@Override
public final String toString(String f) {
StringBuilder buffer = new StringBuilder();
if (field == null || !field.equals(f)) {
buffer.append(field);
buffer.append(":");
}
buffer.append("\"");
Iterator<Term[]> i = termArrays.iterator();
while (i.hasNext()) {
Term[] terms = i.next();
if (terms.length > 1) {
buffer.append("(");
for (int j = 0; j < terms.length; j++) {
buffer.append(terms[j].text());
if (j < terms.length - 1) {
if (i.hasNext()) {
buffer.append(" ");
} else {
buffer.append("* ");
}
}
}
if (i.hasNext()) {
buffer.append(") ");
} else {
buffer.append("*)");
}
} else {
buffer.append(terms[0].text());
if (i.hasNext()) {
buffer.append(" ");
} else {
buffer.append("*");
}
}
}
buffer.append("\"");
if (slop != 0) {
buffer.append("~");
buffer.append(slop);
}
buffer.append(ToStringUtils.boost(getBoost()));
return buffer.toString();
}
/**
* Returns true if <code>o</code> is equal to this.
*/
@Override
public boolean equals(Object o) {
if (!(o instanceof MultiPhrasePrefixQuery)) return false;
MultiPhrasePrefixQuery other = (MultiPhrasePrefixQuery) o;
return this.getBoost() == other.getBoost()
&& this.slop == other.slop
&& termArraysEquals(this.termArrays, other.termArrays)
&& this.positions.equals(other.positions);
}
/**
* Returns a hash code value for this object.
*/
@Override
public int hashCode() {
return Float.floatToIntBits(getBoost())
^ slop
^ termArraysHashCode()
^ positions.hashCode()
^ 0x4AC65113;
}
// Breakout calculation of the termArrays hashcode
private int termArraysHashCode() {
int hashCode = 1;
for (final Term[] termArray : termArrays) {
hashCode = 31 * hashCode
+ (termArray == null ? 0 : Arrays.hashCode(termArray));
}
return hashCode;
}
// Breakout calculation of the termArrays equals
private boolean termArraysEquals(List<Term[]> termArrays1, List<Term[]> termArrays2) {
if (termArrays1.size() != termArrays2.size()) {
return false;
}
ListIterator<Term[]> iterator1 = termArrays1.listIterator();
ListIterator<Term[]> iterator2 = termArrays2.listIterator();
while (iterator1.hasNext()) {
Term[] termArray1 = iterator1.next();
Term[] termArray2 = iterator2.next();
if (!(termArray1 == null ? termArray2 == null : Arrays.equals(termArray1,
termArray2))) {
return false;
}
}
return true;
}
public String getField() {
return field;
}
}
|
|
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python;
import com.intellij.codeInsight.CodeInsightSettings;
import com.intellij.openapi.actionSystem.IdeActions;
import com.intellij.openapi.editor.LogicalPosition;
import com.intellij.openapi.editor.ex.EditorSettingsExternalizable;
import com.intellij.psi.PsiFile;
import com.intellij.testFramework.PlatformTestUtil;
import com.jetbrains.python.codeInsight.PyCodeInsightSettings;
import com.jetbrains.python.documentation.docstrings.DocStringFormat;
import com.jetbrains.python.fixtures.PyTestCase;
import com.jetbrains.python.psi.LanguageLevel;
import org.jetbrains.annotations.NotNull;
/**
* @author yole
*/
public class PyEditingTest extends PyTestCase {
public void testNoPairedParenthesesBeforeIdentifier() { // PY-290
assertEquals("(abc", doTestTyping("abc", 0, '('));
}
public void testPairedParenthesesAtEOF() {
assertEquals("abc()", doTestTyping("abc", 3, '('));
}
public void testPairedQuotesInRawString() { // PY-263
assertEquals("r''", doTestTyping("r", 1, '\''));
}
public void testQuotesInString() { // PY-5041
assertEquals("'st''ring'", doTestTyping("'st'ring'", 3, '\''));
}
public void testNonClosingQuoteAtIdent() { // PY-380
assertEquals("'abc", doTestTyping("abc", 0, '\''));
}
public void testNonClosingQuoteAtNumber() { // PY-380
assertEquals("'123", doTestTyping("123", 0, '\''));
}
public void testAutoClosingQuoteAtRBracket() {
assertEquals("'']", doTestTyping("]", 0, '\''));
}
public void testAutoClosingQuoteAtRParen() {
assertEquals("'')", doTestTyping(")", 0, '\''));
}
public void testAutoClosingQuoteAtComma() {
assertEquals("'',", doTestTyping(",", 0, '\''));
}
public void testAutoClosingQuoteAtSpace() {
assertEquals("'' ", doTestTyping(" ", 0, '\''));
}
// PY-1779
public void testAutoCloseTriple() {
assertEquals("''''''", doTestTyping("''", 2, '\''));
}
// PY-1779
public void testAutoRemoveTriple() {
doTestBackspace("closedTripleQuoteBackspace", new LogicalPosition(1, 3));
}
// PY-19084
public void testNoAoutoclosingAtTheEnd() {
assertEquals("'''docstring'''", doTestTyping("'''docstring''", 14, '\''));
}
public void testAutoCloseAfterIllegalPrefix() {
assertEquals("rrr''", doTestTyping("rrr", 3, '\''));
}
// PY-18972
public void testFString() {
assertEquals("f''", doTestTyping("f", 1, '\''));
assertEquals("rf''", doTestTyping("rf", 2, '\''));
assertEquals("fr''", doTestTyping("fr", 2, '\''));
assertEquals("fr''''''", doTestTyping("fr''", 4, '\''));
}
public void testOvertypeFromInside() {
assertEquals("''", doTestTyping("''", 1, '\''));
}
public void testGreedyBackspace() { // PY-254
final EditorSettingsExternalizable settings = EditorSettingsExternalizable.getInstance();
boolean oldVSpaceValue = settings.isVirtualSpace();
try {
settings.setVirtualSpace(true);
doTestBackspace("py254", new LogicalPosition(4, 8));
}
finally {
settings.setVirtualSpace(oldVSpaceValue);
}
}
public void testUnindentBackspace() { // PY-853
doTestBackspace("smartUnindent", new LogicalPosition(1, 4));
}
public void testUnindentTab() { // PY-1270
doTestBackspace("unindentTab", new LogicalPosition(4, 4));
}
private void doTestBackspace(final String fileName, final LogicalPosition pos) {
myFixture.configureByFile("/editing/" + fileName + ".before.py");
myFixture.getEditor().getCaretModel().moveToLogicalPosition(pos);
pressButton(IdeActions.ACTION_EDITOR_BACKSPACE);
myFixture.checkResultByFile("/editing/" + fileName + ".after.py", true);
}
public void testUncommentWithSpace() { // PY-980
myFixture.configureByFile("/editing/uncommentWithSpace.before.py");
myFixture.getEditor().getCaretModel().moveToLogicalPosition(new LogicalPosition(0, 1));
PlatformTestUtil.invokeNamedAction(IdeActions.ACTION_COMMENT_LINE);
myFixture.checkResultByFile("/editing/uncommentWithSpace.after.py", true);
}
public void testEnterInLineComment() { // PY-1739
doTestEnter("# foo <caret>bar", "# foo \n# <caret>bar");
}
public void testEnterInPrefixString() { // PY-5058
doTestEnter("s = r'some <caret>string'", "s = r'some ' \\\n" +
" r'string'");
}
public void testEnterInStringFormatting() { // PY-7039
doTestEnter("foo += \"fooba<caret>r\" % foo\n",
"foo += \"fooba\" \\\n" +
" \"r\" % foo\n");
}
public void testEnterInStatement() {
doTestEnter("if a <caret>and b: pass", "if a \\\n and b: pass");
}
public void testEnterBeforeStatement() {
doTestEnter("def foo(): <caret>pass", "def foo(): \n pass");
}
public void testEnterInParameterList() {
doTestEnter("def foo(a,<caret>b): pass", "def foo(a,\n b): pass");
}
public void testEnterInTuple() {
doTestEnter("for x in 'a', <caret>'b': pass", "for x in 'a', \\\n 'b': pass");
}
public void testEnterInCodeWithErrorElements() {
doTestEnter("z=1 <caret>2", "z=1 \n2");
}
public void testEnterAtStartOfComment() { // PY-1958
doTestEnter("# bar\n<caret># foo", "# bar\n\n# foo");
}
public void testEnterAtEndOfComment() { // PY-1958
doTestEnter("# bar<caret>\n# foo", "# bar\n\n# foo");
}
public void testEnterAfterBackslash() { // PY-1960
doTestEnter("s = \\<caret>\n'some string'", "s = \\\n\n'some string'");
}
public void testEnterBetweenCommentAndStatement() { // PY-1958
doTestEnter("def test(a):\n <caret># some comment\n if a: return", "def test(a):\n \n # some comment\n if a: return");
}
public void testEnterBetweenDecoratorAndFunction() { // PY-1985
doTestEnter("@foo\n<caret>def bar(x): pass", "@foo\n\ndef bar(x): pass");
}
public void testEnterInSliceExpression() { // PY-1992
doTestEnter("a = some_list[<caret>slice_start:slice_end]", "a = some_list[\n slice_start:slice_end]");
}
public void testEnterInSubscriptionExpression() { // PY-1992
doTestEnter("a = some_list[<caret>slice_start]", "a = some_list[\n slice_start]");
}
public void testEnterBeforeComment() { // PY-2138
doTestEnter("def x():\n if foo():<caret>\n #bar\n baz()", "def x():\n if foo():\n \n #bar\n baz()");
}
public void testEnterInEmptyFile() { // PY-2194
doTestEnter(" <caret>\n", " \n \n");
}
public void testEnterInDocstring() { // CR-PY-144
doTestEnter(" def foo():\n \"\"\" some comment<caret>\"\"\"\n pass", " def foo():\n \"\"\" some comment\n \"\"\"\n pass");
}
public void testEnterStubInDocstring() { // CR-PY-144
runWithDocStringFormat(DocStringFormat.PLAIN, () -> doTestEnter("def foo():\n \"\"\"<caret>", "def foo():\n" +
" \"\"\"\n" +
" \n" +
" \"\"\""));
}
// PY-18486
public void testTripleQuotesThenEnterInsertsDocstring() {
doDocStringTypingTest("\"\"\"\n", DocStringFormat.REST);
}
public void testEnterDocStringStubInClass() {
doDocStringTypingTest("\n", DocStringFormat.REST);
}
public void testEnterDocStringStubInFile() {
doDocStringTypingTest("\n", DocStringFormat.REST);
}
// PY-16656
public void testEnterDocStringStubInFunctionWithSelf() {
doDocStringTypingTest("\n", DocStringFormat.REST);
}
// PY-16656
public void testEnterDocStringStubInStaticMethodWithSelf() {
doDocStringTypingTest("\n", DocStringFormat.REST);
}
// PY-16828
public void testEnterDocStringStubWithStringPrefix() {
doDocStringTypingTest("\n", DocStringFormat.REST);
}
// PY-3421
public void testSpaceDocStringStubInFunction() {
doDocStringTypingTest(" ", DocStringFormat.REST);
}
// PY-3421
public void testSpaceDocStringStubInFile() {
doDocStringTypingTest(" ", DocStringFormat.REST);
}
// PY-3421
public void testSpaceDocStringStubInClass() {
doDocStringTypingTest(" ", DocStringFormat.REST);
}
// PY-16765
public void testSectionIndentInsideGoogleDocString() {
doDocStringTypingTest("\nparam", DocStringFormat.GOOGLE);
}
// PY-16765
public void testSectionIndentInsideGoogleDocStringCustomIndent() {
getIndentOptions().INDENT_SIZE = 2;
doDocStringTypingTest("\nparam", DocStringFormat.GOOGLE);
}
// PY-17183
public void testEnterDocstringStubWhenFunctionDocstringBelow() {
doDocStringTypingTest("\n", DocStringFormat.GOOGLE);
}
// PY-17183
public void testEnterDocstringStubWhenClassDocstringBelow() {
doDocStringTypingTest("\n", DocStringFormat.GOOGLE);
}
// PY-17183
public void testEnterNoDocstringStubWhenCodeExampleInDocstring() {
doDocStringTypingTest("\n", DocStringFormat.GOOGLE);
}
// PY-15332
public void testEnterDocstringStubNoReturnTagForInit() {
doDocStringTypingTest("\n", DocStringFormat.REST);
}
// PY-15532
public void testSpaceDocstringStubNoReturnSectionForInit() {
final PyCodeInsightSettings codeInsightSettings = PyCodeInsightSettings.getInstance();
final boolean oldInsertTypeDocStub = codeInsightSettings.INSERT_TYPE_DOCSTUB;
codeInsightSettings.INSERT_TYPE_DOCSTUB = true;
try {
doDocStringTypingTest(" ", DocStringFormat.GOOGLE);
}
finally {
codeInsightSettings.INSERT_TYPE_DOCSTUB = oldInsertTypeDocStub;
}
}
public void testEnterInString() { // PY-1738
doTestEnter("a = \"some <caret>string\"", "a = \"some \" \\\n" +
" \"string\"");
}
public void testEnterInImportWithParens() { // PY-2661
doTestEnter("from django.http import (HttpResponse,<caret>)",
"from django.http import (HttpResponse,\n" +
" )");
}
public void testEnterInKeyword() {
doTestEnter("imp<caret>ort django.http",
"imp\n" +
"ort django.http");
}
public void testEnterInIdentifier() {
doTestEnter("import dja<caret>ngo.http",
"import dja\n"+
"ngo.http");
}
public void testEnterAfterStringPrefix() {
doTestEnter("r<caret>\"string\"",
"r\n"+
"\"string\"");
}
public void testEnterInStringInParenth() {
doTestEnter("a = (\"str<caret>ing\")",
"a = (\"str\"\n" +
" \"ing\")");
}
public void testEnterEscapedQuote() {
doTestEnter("a = 'some \\<caret>' string'",
"a = 'some \\'' \\\n" +
" ' string'");
}
public void testEnterEscapedBackslash() {
doTestEnter("a = 'some \\\\<caret> string'",
"a = 'some \\\\' \\\n" +
" ' string'");
}
public void testEnterAfterSlash() {
doTestEnter("a = 'some \\<caret> string'",
"a = 'some \\\n" +
" string'");
}
public void testStringFormatting() {
doTestEnter("print (\"foo<caret> %s\" % 1)",
"print (\"foo\"\n" +
" \" %s\" % 1)");
}
public void testEndOfStringInParenth() {
doTestEnter("print (\"foo\"<caret>\n" +
" \"bar\")",
"print (\"foo\"\n" +
" \n" +
" \"bar\")");
}
public void testSlashAfterSlash() {
doTestEnter("a = a+\\<caret>b",
"a = a+\\\n" +
" b");
}
public void testComprehensionInReturn() {
doTestEnter("def dbl():\n" +
" return (<caret>(a, a) for a in [])",
"def dbl():\n" +
" return (\n" +
" (a, a) for a in [])");
}
public void testParenthesizedInIf() {
doTestEnter("if isinstance(bz_value, list) and <caret>(isinstance(bz_value[0], str)):\n" +
" pass",
"if isinstance(bz_value, list) and \\\n" +
" (isinstance(bz_value[0], str)):\n" +
" pass");
}
public void testEmptyStringInParenthesis() {
doTestEnter("a = ('<caret>')",
"a = (''\n" +
" '')");
}
public void testEmptyStringInParenthesis2() {
doTestEnter("a = (''\n" +
" <caret>'')",
"a = (''\n" +
" \n" +
" '')");
}
public void testBracesInString() {
doTestEnter("a = 'test(<caret>)'",
"a = 'test(' \\\n" +
" ')'");
}
public void testEnterAfterDefKeywordInFunction() {
doTestEnter("def <caret>func():\n" +
" pass",
"def \\\n" +
" func():\n" +
" pass");
}
public void testEnterBeforeColonInFunction() {
doTestEnter("def func()<caret>:\n" +
" pass",
"def func()\\\n" +
" :\n" +
" pass");
}
// PY-15469
public void testEnterBeforeArrowInFunction() {
runWithLanguageLevel(LanguageLevel.PYTHON34, () -> doTestEnter("def func() <caret>-> int:\n" +
" pass",
"def func() \\\n" +
" -> int:\n" +
" pass"));
}
// PY-15469
public void testEnterAfterArrowInFunction() {
runWithLanguageLevel(LanguageLevel.PYTHON34, () -> doTestEnter("def func() -><caret> int:\n" +
" pass",
"def func() ->\\\n" +
" int:\n" +
" pass"));
}
// PY-15469
public void testEnterDoesNotInsertSlashInsideArrow() {
runWithLanguageLevel(LanguageLevel.PYTHON34, () -> doTestEnter("def func() -<caret>> int:\n" +
" pass",
"def func() -\n" +
"> int:\n" +
" pass"));
}
private void doTestEnter(String before, final String after) {
int pos = before.indexOf("<caret>");
before = before.replace("<caret>", "");
doTestTyping(before, pos, '\n');
myFixture.checkResult(after);
}
// PY-21478
public void testContinuationIndentForFunctionArguments() {
getPythonCodeStyleSettings().USE_CONTINUATION_INDENT_FOR_ARGUMENTS = true;
doTestEnter("func(<caret>)",
"func(\n" +
" <caret>\n" +
")");
}
// PY-21840
public void testEditInjectedRegexpFragmentWithLongUnicodeEscape() {
myFixture.configureByText(PythonFileType.INSTANCE,
"import re\n" +
"re.compile(ur'\\U00010000<caret>')");
myFixture.type("t");
myFixture.checkResult("import re\n" +
"re.compile(ur'\\U00010000t')");
}
// PY-21697
public void testTripleQuotesInsideTripleQuotedStringLiteral() {
// TODO an extra quote is inserted due to PY-21993
doTypingTest("'");
}
private String doTestTyping(final String text, final int offset, final char character) {
final PsiFile file = myFixture.configureByText(PythonFileType.INSTANCE, text);
myFixture.getEditor().getCaretModel().moveToOffset(offset);
myFixture.type(character);
return myFixture.getDocument(file).getText();
}
private void doTypingTest(final char character) {
final String testName = "editing/" + getTestName(true);
myFixture.configureByFile(testName + ".py");
myFixture.type(character);
myFixture.checkResultByFile(testName + ".after.py");
}
private void doTypingTest(@NotNull String text) {
final String testName = "editing/" + getTestName(true);
myFixture.configureByFile(testName + ".py");
myFixture.type(text);
myFixture.checkResultByFile(testName + ".after.py");
}
private void doDocStringTypingTest(final String text, @NotNull DocStringFormat format) {
runWithDocStringFormat(format, () -> doTypingTest(text));
}
public void testFirstParamClassmethod() {
doTypingTest('(');
}
public void testFirstParamMetaClass() {
doTypingTest('(');
}
public void testFirstParamMetaNew() {
doTypingTest('(');
}
public void testFirstParamMetaSimple() {
doTypingTest('(');
}
public void testFirstParamSimpleInit() {
doTypingTest('(');
}
public void testFirstParamSimpleNew() {
doTypingTest('(');
}
public void testFirstParamSimple() {
doTypingTest('(');
}
public void testFirstParamStaticmethod() {
doTypingTest('(');
}
public void testFirstParamDuplicateColon() { // PY-2652
doTypingTest('(');
}
// PY-21269
public void testFirstParamMultipleMethods() {
doTypingTest('(');
}
// PY-15240
public void testFirstParamSpacesInsideParentheses() {
getCommonCodeStyleSettings().SPACE_WITHIN_METHOD_PARENTHESES = true;
doTypingTest('(');
}
// PY-15240
public void testFirstParamSpacesInsideEmptyParentheses() {
getCommonCodeStyleSettings().SPACE_WITHIN_EMPTY_METHOD_PARENTHESES = true;
doTypingTest('(');
}
// PY-21289
public void testPairedParenthesesMultipleCalls() {
doTypingTest('(');
}
public void testEnterBeforeString() { // PY-3673
doTestEnter("<caret>''", "\n''");
}
public void testEnterInUnicodeString() {
doTestEnter("a = u\"some <caret>text\"", "a = u\"some \" \\\n" +
" u\"<caret>text\"");
}
public void testBackslashInParenthesis() { // PY-5106
doTestEnter("(\"some <caret>string\", 1)", "(\"some \"\n" +
" \"string\", 1)");
}
// PY-15609
public void testEnterInStringInTupleWithoutParenthesis() {
doTestEnter("def hello_world():\n" +
" return bar, 'so<caret>me'",
"def hello_world():\n" +
" return bar, 'so' \\\n" +
" 'me'");
}
// PY-27178
public void testIncompleteFunctionTypeComment() {
doTypingTest('.');
}
// PY-10972
public void testEnterInIncompleteTupleLiteral() {
doTypingTest("\n'baz'");
}
// PY-10972
public void testEnterInIncompleteListLiteral() {
doTypingTest("\n'baz'");
}
// PY-10972
public void testEnterInIncompleteSetLiteral() {
doTypingTest("\n'baz'");
}
// PY-10972
public void testEnterInIncompleteDictLiteral() {
doTypingTest("\n'baz'");
}
// PY-10972
public void testEnterInIncompleteGluedStringLiteralInParentheses() {
doTypingTest("\n'bar'");
}
// PY-10972
public void testEnterInIncompleteListComprehension() {
doTypingTest("\nfoo");
}
// PY-10972
public void testEnterInIncompleteSetComprehension() {
doTypingTest("\nfoo");
}
// PY-10972
public void testEnterInIncompleteDictComprehension() {
doTypingTest("\nfoo");
}
// PY-10972
public void testEnterInIncompleteParenthesizedGenerator() {
doTypingTest("\nfoo");
}
// PY-10972
public void testEnterInIncompleteNestedListLiteral() {
doTypingTest("\n'baz'");
}
// PY-10972
public void testEnterInIncompleteNestedTupleLiteral() {
doTypingTest("\n'baz'");
}
// PY-10972
public void testEnterInIncompleteNestedGluedStringInParentheses() {
doTypingTest("\n'baz'");
}
public void testTabOutFromStringLiteral() {
boolean savedValue = CodeInsightSettings.getInstance().TAB_EXITS_BRACKETS_AND_QUOTES;
CodeInsightSettings.getInstance().TAB_EXITS_BRACKETS_AND_QUOTES = true;
try {
myFixture.configureByText(getTestName(true) + ".py",
"def some():\n" +
" print<caret>");
myFixture.type("(\"");
myFixture.performEditorAction(IdeActions.ACTION_BRACE_OR_QUOTE_OUT);
myFixture.checkResult("def some():\n" +
" print(\"\"<caret>)");
myFixture.performEditorAction(IdeActions.ACTION_BRACE_OR_QUOTE_OUT);
myFixture.checkResult("def some():\n" +
" print(\"\")<caret>");
}
finally {
CodeInsightSettings.getInstance().TAB_EXITS_BRACKETS_AND_QUOTES = savedValue;
}
}
}
|
|
package com.esri.geoevent.solutions.transport.tcpsquirt;
import static com.esri.geoevent.solutions.transport.tcpsquirt.ClientServerMode.CLIENT;
import static com.esri.geoevent.solutions.transport.tcpsquirt.ClientServerMode.SERVER;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.BufferOverflowException;
import java.nio.ByteBuffer;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.ServerSocketChannel;
import java.nio.channels.SocketChannel;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.esri.ges.core.component.ComponentException;
import com.esri.ges.core.component.RunningException;
import com.esri.ges.core.component.RunningState;
import com.esri.ges.core.property.Property;
import com.esri.ges.transport.OutboundTransportBase;
import com.esri.ges.transport.TransportDefinition;
public class TcpSquirtOutboundTransport extends OutboundTransportBase implements Runnable{
private static final int CLIENT_BUFFER_SIZE = 500 * 1024;
private static final int MAIN_BUFFER_SIZE = 500 * 1024;
static final private Log log = LogFactory.getLog(TcpSquirtOutboundTransport.class);
private ClientServerMode mode;
private String host;
private int port;
private SocketChannel socketChannel = null;
private Thread thread;
private String errorMessage;
private final ByteBuffer buffer = ByteBuffer.allocate(MAIN_BUFFER_SIZE);
private Selector selector;
private ServerSocketChannel serverSocketChannel;
private final Map<SocketChannel, ByteBuffer> connectionBuffers = new HashMap<SocketChannel, ByteBuffer>();
private boolean haveDataInIndividualBuffers;
private boolean receiving = false;
private boolean socketOpened = false;
private int clientConnectionTimeout;
private long timeStartedConnectingAsClient;
private ArrayList<ByteBuffer> bufferCache = new ArrayList<ByteBuffer>();
public TcpSquirtOutboundTransport(TransportDefinition definition)
throws ComponentException {
super(definition);
}
Integer activity = 0;
@Override
public boolean isClusterable()
{
return false;
}
//@Override
public void start() throws RunningException {
switch (getRunningState())
{
case STARTING:
case STARTED:
return;
}
setRunningState(RunningState.STARTING);
thread = new Thread(this);
thread.setPriority(thread.getPriority()+1);
thread.start();
}
private void processBuffer(ByteBuffer bb)
{
}
//@Override
public void receive(ByteBuffer bb, String channelId) {
if (getRunningState() == RunningState.STARTED) {
receiving = true;
synchronized (buffer) {
try {
if (buffer.remaining() < bb.remaining())
log.error("The TCP/IP outbound transport is unable to keep up with the incoming data rate, dropping "
+ bb.remaining() + " bytes.");
else
buffer.put(bb);
} catch (BufferOverflowException ex) {
log.error("The TCP/IP outbound transport is unable to keep up with the incoming data rate, dropping "
+ bb.remaining() + " bytes.");
}
}
wakupThread();
}
}
private synchronized void OpenSockets() {
if (mode == CLIENT) {
try
{
attemptClientConnection();
receiving = true;
} catch (IOException e) {
log.error(e);
}
} else if (mode == SERVER) {
try {
selector = Selector.open();
serverSocketChannel = ServerSocketChannel.open();
serverSocketChannel.configureBlocking(false);
serverSocketChannel.socket().bind(new InetSocketAddress(port));
serverSocketChannel.register(selector, SelectionKey.OP_ACCEPT);
receiving = true;
} catch (IOException ex) {
selector = null;
}
}
}
//@Override
public void run() {
try {
errorMessage = null;
applyProperties();
setRunningState(RunningState.STARTED);
while (getRunningState() == RunningState.STARTED) {
try {
if (receiving) {
synchronized (this) {
activity = 0;
while (activity > -1) {
if(!socketOpened)
{
OpenSockets();
socketOpened = true;
}
activity = manageAllSockets();
if (haveDataInIndividualBuffers
&& connectionBuffers.isEmpty())
haveDataInIndividualBuffers = false;
if (activity == 0
&& !haveDataInIndividualBuffers) {
receiving = false;
activity = -1;
closeConnection();
socketOpened = false;
}
}
}
}
} catch (Exception e) {
log.error("Error trying to write buffer", e);
setRunningState(RunningState.ERROR);
}
}
cleanup();
if (getRunningState() == RunningState.STOPPING)
setRunningState(RunningState.STOPPED);
}
catch (Exception ex)
{
errorMessage = ex.getMessage();
log.error("Exiting TCP Transport due to unforeseen error", ex);
setRunningState(RunningState.ERROR);
return;
}
}
private synchronized void wakupThread()
{
notifyAll();
}
private Integer manageAllSockets()
{
int a = 0;
if (mode == CLIENT)
{
if( ! socketChannel.isConnected() )
{
try
{
attemptClientConnection();
}catch(IOException ex)
{
if( (System.currentTimeMillis() - timeStartedConnectingAsClient) > (clientConnectionTimeout * 1000) )
{
errorMessage = "Error connecting to the host "+host+":"+port+" ("+ex.getMessage()+").";
log.error( errorMessage, ex );
setRunningState(RunningState.ERROR);
}
}
}
else
{
synchronized(buffer)
{
if( buffer.position() != 0 )
{
buffer.flip();
int bytesWritten = 0;
try
{
bytesWritten = socketChannel.write( buffer );
} catch (IOException e)
{
if(e.getMessage().contains("An existing connection was forcibly closed by the remote host"))
{
try
{
socketChannel.close();
} catch (IOException e1)
{
// Do nothing.
}
}
log.error( "Error writing to the client "+host+":"+port+".", e );
}
buffer.compact();
a = bytesWritten;
}
}
}
}
else if (mode == SERVER)
{
int dataMoved = moveDataIntoIndividualChannelBuffers();
// Service all the sockets
int activeChannels = manageSelector();
a = activeChannels + dataMoved;
//if( haveDataInIndividualBuffers && connectionBuffers.isEmpty() )
//haveDataInIndividualBuffers = false;
}
//if( activity == 0 && !haveDataInIndividualBuffers )
//{
//receiving=false;
//closeConnection();
//}
return a;
}
private void attemptClientConnection() throws IOException
{
if( timeStartedConnectingAsClient == 0 )
{
timeStartedConnectingAsClient = System.currentTimeMillis();
}
socketChannel = SocketChannel.open();
socketChannel.connect(new InetSocketAddress(host, port));
timeStartedConnectingAsClient = 0;
}
private synchronized void snooze(long timer)
{
try
{
wait(timer);
}catch(InterruptedException ex)
{
}
}
private synchronized void closeConnection()
{
if (mode == CLIENT)
{
if (socketChannel != null)
{
try
{
socketChannel.close();
socketChannel = null;
}
catch (IOException ioe)
{
log.debug("Ignoring Exception", ioe);
}
}
}
else if (mode == SERVER)
{
for (SocketChannel chan : connectionBuffers.keySet())
{
try
{
if( chan.isOpen() )
chan.close();
}catch(IOException ioe)
{
log.debug("Exception while closing all sockets as part of the transport shutdown process.", ioe);
}
}
try
{
if(serverSocketChannel != null && serverSocketChannel.isOpen())
{
serverSocketChannel.close();
serverSocketChannel=null;
}
if( selector != null && selector.isOpen() ){
selector.close();
selector = null;
}
} catch (IOException e)
{
log.debug("Exception while trying to stop listening for client connections. " + e);
}
}
}
private int moveDataIntoIndividualChannelBuffers()
{
byte[] dst = {};
synchronized( buffer )
{
if (buffer.position() == 0)
return 0;
buffer.flip();
dst = new byte[buffer.remaining()];
buffer.get(dst);
buffer.compact();
}
for( SocketChannel chan : connectionBuffers.keySet() )
{
if(!chan.isConnected())
{
connectionBuffers.remove(chan);
continue;
}
ByteBuffer connectionBuffer = connectionBuffers.get(chan);
try
{
if( connectionBuffer.remaining() > dst.length )
{
connectionBuffer.put(dst);
haveDataInIndividualBuffers = true;
}
else
{
String remoteClientAddress = chan.toString();
log.error( "Overflow while trying to write to the output buffer associated with address "+remoteClientAddress+".");
}
}catch(BufferOverflowException ex)
{
log.error( "Overflow while trying to write to an output buffer.");
}
}
return dst.length;
}
private int manageSelector()
{
int activeSelectors = 0;
int activeChannels = 0;
try
{
activeSelectors = selector.selectNow();
if( activeSelectors > 0 )
{
for (Iterator<SelectionKey> iterator = selector.selectedKeys().iterator(); iterator.hasNext();)
{
SelectionKey selectionKey = iterator.next();
iterator.remove();
try
{
activeChannels += processSelectionKey(selectionKey);
}
catch (IOException ex)
{
log.error(ex);
selectionKey.cancel();
}
}
}
}
catch (IOException ex)
{
log.error(ex);
}
return activeChannels;
}
private int processSelectionKey(SelectionKey key) throws IOException
{
if (!key.isValid())
return 0;
int count = 0;
if (key.isAcceptable())
{
ServerSocketChannel serverSocketChannel = (ServerSocketChannel) key.channel();
SocketChannel socketChannel = serverSocketChannel.accept();
if (socketChannel != null)
{
socketChannel.configureBlocking(false);
socketChannel.register(selector, SelectionKey.OP_WRITE);
connectionBuffers.put(socketChannel, ByteBuffer.allocate(CLIENT_BUFFER_SIZE));
count++;
}
}
if (key.isWritable())
{
SocketChannel channel = (SocketChannel) key.channel();
ByteBuffer buf = connectionBuffers.get(channel);
try
{
buf.flip();
int numberOfBytesWritten = channel.write(buf);
buf.compact();
if (numberOfBytesWritten == -1)
{
// The channel is closed.
connectionBuffers.remove(channel);
channel.register(selector, 0);
channel.close();
}
else
count += numberOfBytesWritten;
// Now we are going to see if all the individual buffers are empty
boolean foundOne = false;
for(SocketChannel chan : connectionBuffers.keySet())
{
ByteBuffer individualBuffer = connectionBuffers.get(chan);
if( individualBuffer.position() > 0 )
{
foundOne = true;
break;
}
}
haveDataInIndividualBuffers = foundOne;
}
catch (Exception ex)
{
if( !ex.getMessage().contains("An existing connection was forcibly closed by the remote host"))
log.error("Exception while writing to a socket.", ex);
connectionBuffers.remove(channel);
channel.register(selector, 0);
channel.close();
}
}
return count;
}
private void readProperties()
{
Property prop = getProperty( "host" );
host = prop.getValueAsString();
prop = getProperty( "port" );
port = (Integer)prop.getValue();
prop = getProperty( "mode" );
String modeString = prop.getValueAsString();
clientConnectionTimeout = hasProperty("clientConnectionTimeout") ? (Integer) getProperty("clientConnectionTimeout").getValue() : 60;
if( modeString != null && modeString.toUpperCase().trim().equals("CLIENT") )
mode = CLIENT;
else if( modeString != null && modeString.toUpperCase().trim().equals("SERVER") )
mode = SERVER;
else
{
log.error("Setting the TCP Transport to mode \""+modeString+"\" is not allowed. Must be SERVER or CLIENT.");
setRunningState(RunningState.ERROR);
return;
}
}
/*private void applyProperties() throws IOException
{
if (mode == CLIENT)
{
attemptClientConnection();
}
else if (mode == SERVER)
{
try
{
selector = Selector.open();
serverSocketChannel = ServerSocketChannel.open();
serverSocketChannel.configureBlocking(false);
serverSocketChannel.socket().bind(new InetSocketAddress(port));
serverSocketChannel.register(selector, SelectionKey.OP_ACCEPT);
}
catch (IOException ex)
{
selector = null;
throw ex;
}
}
}*/
private void applyProperties() throws IOException
{
if(mode==SERVER)
{
selector = Selector.open();
}
}
@Override
public void afterPropertiesSet()
{
try
{
readProperties();
if( getRunningState() == RunningState.STARTED )
{
cleanup();
applyProperties();
}
}catch( IOException ex )
{
errorMessage = ex.getMessage();
log.error(errorMessage);
setRunningState(RunningState.ERROR);
}
}
public synchronized void cleanup()
{
if(receiving)
receiving = false;
if (mode == CLIENT)
{
if (socketChannel != null)
{
try
{
socketChannel.close();
}
catch (IOException ioe)
{
log.debug("Ignoring Exception", ioe);
}
}
}
else if (mode == SERVER)
{
for (SocketChannel chan : connectionBuffers.keySet())
{
try
{
if( chan.isOpen() )
chan.close();
}catch(IOException ioe)
{
log.debug("Exception while closing all sockets as part of the transport shutdown process.", ioe);
}
}
try
{
if(serverSocketChannel != null && serverSocketChannel.isOpen())
serverSocketChannel.close();
if( selector != null && selector.isOpen() )
selector.close();
} catch (IOException e)
{
log.debug("Exception while trying to stop listening for client connections. " + e);
}
}
}
@Override
public void stop()
{
super.stop();
errorMessage = null;
if( getRunningState() == RunningState.ERROR )
setRunningState( RunningState.STOPPED );
}
@Override
public String getStatusDetails()
{
return errorMessage;
}
}
|
|
/*
* Copyright (C) 2010 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect.testing;
import java.io.Serializable;
import java.util.AbstractSet;
import java.util.Collection;
import java.util.Comparator;
import java.util.Iterator;
import java.util.Map;
import java.util.NavigableMap;
import java.util.NavigableSet;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
/**
* A wrapper around {@code TreeMap} that aggressively checks to see if keys are
* mutually comparable. This implementation passes the navigable map test
* suites.
*
* @author Louis Wasserman
*/
public final class SafeTreeMap<K, V>
implements Serializable, NavigableMap<K, V> {
@SuppressWarnings("unchecked")
private static final Comparator NATURAL_ORDER = new Comparator<Comparable>() {
@Override public int compare(Comparable o1, Comparable o2) {
return o1.compareTo(o2);
}
};
private final NavigableMap<K, V> delegate;
public SafeTreeMap() {
this(new TreeMap<K, V>());
}
public SafeTreeMap(Comparator<? super K> comparator) {
this(new TreeMap<K, V>(comparator));
}
public SafeTreeMap(Map<? extends K, ? extends V> map) {
this(new TreeMap<K, V>(map));
}
public SafeTreeMap(SortedMap<K, ? extends V> map) {
this(new TreeMap<K, V>(map));
}
private SafeTreeMap(NavigableMap<K, V> delegate) {
this.delegate = delegate;
if (delegate == null) {
throw new NullPointerException();
}
for (K k : keySet()) {
checkValid(k);
}
}
@Override public Entry<K, V> ceilingEntry(K key) {
return delegate.ceilingEntry(checkValid(key));
}
@Override public K ceilingKey(K key) {
return delegate.ceilingKey(checkValid(key));
}
@Override public void clear() {
delegate.clear();
}
@SuppressWarnings("unchecked")
@Override public Comparator<? super K> comparator() {
Comparator<? super K> comparator = delegate.comparator();
if (comparator == null) {
comparator = NATURAL_ORDER;
}
return comparator;
}
@Override public boolean containsKey(Object key) {
try {
return delegate.containsKey(checkValid(key));
} catch (NullPointerException e) {
return false;
} catch (ClassCastException e) {
return false;
}
}
@Override public boolean containsValue(Object value) {
return delegate.containsValue(value);
}
@Override public NavigableSet<K> descendingKeySet() {
return delegate.descendingKeySet();
}
@Override public NavigableMap<K, V> descendingMap() {
return new SafeTreeMap<K, V>(delegate.descendingMap());
}
@Override public Set<Entry<K, V>> entrySet() {
return new AbstractSet<Entry<K, V>>() {
private Set<Entry<K, V>> delegate() {
return delegate.entrySet();
}
@Override
public boolean contains(Object object) {
try {
return delegate().contains(object);
} catch (NullPointerException e) {
return false;
} catch (ClassCastException e) {
return false;
}
}
@Override
public Iterator<Entry<K, V>> iterator() {
return delegate().iterator();
}
@Override
public int size() {
return delegate().size();
}
@Override
public boolean remove(Object o) {
return delegate().remove(o);
}
@Override
public void clear() {
delegate().clear();
}
};
}
@Override public Entry<K, V> firstEntry() {
return delegate.firstEntry();
}
@Override public K firstKey() {
return delegate.firstKey();
}
@Override public Entry<K, V> floorEntry(K key) {
return delegate.floorEntry(checkValid(key));
}
@Override public K floorKey(K key) {
return delegate.floorKey(checkValid(key));
}
@Override public V get(Object key) {
return delegate.get(checkValid(key));
}
@Override public SortedMap<K, V> headMap(K toKey) {
return headMap(toKey, false);
}
@Override public NavigableMap<K, V> headMap(K toKey, boolean inclusive) {
return new SafeTreeMap<K, V>(
delegate.headMap(checkValid(toKey), inclusive));
}
@Override public Entry<K, V> higherEntry(K key) {
return delegate.higherEntry(checkValid(key));
}
@Override public K higherKey(K key) {
return delegate.higherKey(checkValid(key));
}
@Override public boolean isEmpty() {
return delegate.isEmpty();
}
@Override public NavigableSet<K> keySet() {
return navigableKeySet();
}
@Override public Entry<K, V> lastEntry() {
return delegate.lastEntry();
}
@Override public K lastKey() {
return delegate.lastKey();
}
@Override public Entry<K, V> lowerEntry(K key) {
return delegate.lowerEntry(checkValid(key));
}
@Override public K lowerKey(K key) {
return delegate.lowerKey(checkValid(key));
}
@Override public NavigableSet<K> navigableKeySet() {
return delegate.navigableKeySet();
}
@Override public Entry<K, V> pollFirstEntry() {
return delegate.pollFirstEntry();
}
@Override public Entry<K, V> pollLastEntry() {
return delegate.pollLastEntry();
}
@Override public V put(K key, V value) {
return delegate.put(checkValid(key), value);
}
@Override public void putAll(Map<? extends K, ? extends V> map) {
for (K key : map.keySet()) {
checkValid(key);
}
delegate.putAll(map);
}
@Override public V remove(Object key) {
return delegate.remove(checkValid(key));
}
@Override public int size() {
return delegate.size();
}
@Override public NavigableMap<K, V> subMap(
K fromKey, boolean fromInclusive, K toKey, boolean toInclusive) {
return new SafeTreeMap<K, V>(delegate.subMap(
checkValid(fromKey), fromInclusive, checkValid(toKey), toInclusive));
}
@Override public SortedMap<K, V> subMap(K fromKey, K toKey) {
return subMap(fromKey, true, toKey, false);
}
@Override public SortedMap<K, V> tailMap(K fromKey) {
return tailMap(fromKey, true);
}
@Override public NavigableMap<K, V> tailMap(K fromKey, boolean inclusive) {
return new SafeTreeMap<K, V>(
delegate.tailMap(checkValid(fromKey), inclusive));
}
@Override public Collection<V> values() {
return delegate.values();
}
private <T> T checkValid(T t) {
// a ClassCastException is what's supposed to happen!
@SuppressWarnings("unchecked")
K k = (K) t;
comparator().compare(k, k);
return t;
}
@Override public boolean equals(Object obj) {
return delegate.equals(obj);
}
@Override public int hashCode() {
return delegate.hashCode();
}
@Override public String toString() {
return delegate.toString();
}
private static final long serialVersionUID = 0L;
}
|
|
package commom;
import android.app.Activity;
import android.app.AlertDialog;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.media.MediaDataSource;
import android.media.MediaPlayer;
import android.os.Build;
import android.os.Environment;
import android.util.Log;
import android.view.Gravity;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.Toast;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
//import javax.swing.JFrame;
//import com.sun.media.jfxmedia.MediaPlayer;
import implementations.dm_kernel.IoTuser.JCL_Context;
import implementations.util.ImageFrame;
import implementations.util.ImagePanel;
import implementations.util.JCL_ApplicationContext;
import interfaces.kernel.JCL_Sensor;
import io.protostuff.Tag;
//import sun.audio.AudioData;
//import sun.audio.AudioDataStream;
//import sun.audio.AudioPlayer;
//import javax.sound.sampled.AudioFormat;
//import javax.sound.sampled.AudioInputStream;
//import javax.sound.sampled.AudioSystem;
//import javax.sound.sampled.Clip;
//import javax.sound.sampled.LineUnavailableException;
//import javax.sound.sampled.UnsupportedAudioFileException;
public class JCL_SensorImpl implements JCL_Sensor {
/**
*
*/
private static final long serialVersionUID = 4539013136634398910L;
@Tag(1)
private Object object;
@Tag(2)
private long time;
@Tag(3)
private String dataType;
@Override
public Object getObject() {
return object;
}
@Override
public void setObject(Object object) {
// TODO Auto-generated method stub
this.object = object;
}
@Override
public String getType() {
if (object instanceof float[])
return "float array";
if (object instanceof byte[])
return "image or audio";
return object.getClass().getName();
}
@Override
public String toString(){
if (object instanceof float[])
return Arrays.toString((float[]) object);
if (object instanceof byte[] && dataType!=null && dataType.equals("3gp"))
return "audio";
else if (object instanceof byte[] && dataType!=null && dataType.equals("jpeg"))
return "image";
return object.toString();
}
@Override
public void showData() {
try {
if (object instanceof byte[] && dataType != null && dataType.equals("jpeg")) {
// ImageFrame image = new ImageFrame(new ImagePanel((byte[]) object));
// image.setDefaultCloseOperation( JFrame.EXIT_ON_CLOSE );
// //determina a resolucao
// image.setSize( 1280, 960 );
// //no centro
// image.setLocationRelativeTo(null);
// image.setVisible( true );
try {
ByteArrayInputStream inputStream = new ByteArrayInputStream((byte[]) object);
Bitmap bitmap = BitmapFactory.decodeStream(inputStream);
ImageView picture = new ImageView(JCL_ApplicationContext.getContext());
picture.setImageBitmap(bitmap);
//LinearLayout linearLayout = new LinearLayout(JCL_ApplicationContext.getContext());
final ImageView imageView = new ImageView(JCL_ApplicationContext.getContext());
LinearLayout.LayoutParams vp =
new LinearLayout.LayoutParams(LinearLayout.LayoutParams.WRAP_CONTENT,
LinearLayout.LayoutParams.WRAP_CONTENT);
imageView.setLayoutParams(vp);
imageView.setImageBitmap(bitmap);
//linearLayout.addView(imageView);
// Toast toast = new Toast(JCL_ApplicationContext.getContext());
// toast.setGravity(Gravity.BOTTOM, 0, 200);
// toast.setDuration(Toast.LENGTH_LONG);
// toast.setView(imageView);
// toast.show();
Activity activity = JCL_ApplicationContext.getActivity();
activity.runOnUiThread(new Runnable() {
@Override
public void run() {
try {
AlertDialog.Builder builder = new AlertDialog.Builder(JCL_ApplicationContext.getContext());
builder.setView(imageView).setNeutralButton("Close", null).show();
} catch (Exception e) {
e.printStackTrace();
}
}
});
} catch (Exception e) {
Log.e("ShowData", e.getMessage());
}
} else if (object instanceof byte[] && dataType != null && dataType.equals("3gp")) {
final String path = Environment.getExternalStorageDirectory().toString() + File.separatorChar + "jclAndroid"
+ File.separatorChar + System.currentTimeMillis() + ".wav";
MediaPlayer mPlayer = new MediaPlayer();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
mPlayer.setDataSource(new ByteArrayMediaDataSource((byte[]) object));
} else {
try {
FileOutputStream fout = new FileOutputStream(path);
fout.write((byte[]) object);
fout.flush();
fout.close();
mPlayer.setDataSource(path);
} catch (IOException e) {
e.printStackTrace();
}
}
mPlayer.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
@Override
public void onCompletion(MediaPlayer mp) {
File file = new File(path);
if (file.exists())
file.delete();
mp.release();
}
});
try {
mPlayer.prepare();
} catch (IOException e) {
e.printStackTrace();
}
mPlayer.start();
// try {
// // audioIn.read((byte[]) object, 0, ((byte[]) object).length);
// AudioInputStream audioIn = AudioSystem.getAudioInputStream(new ByteArrayInputStream((byte[]) object));
// Clip clip = AudioSystem.getClip();
// clip.open(audioIn);
// clip.start();
// } catch (UnsupportedAudioFileException | IOException | LineUnavailableException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
} else {
Activity activity = JCL_ApplicationContext.getActivity();
activity.runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(JCL_ApplicationContext.getContext(),JCL_SensorImpl.this.toString(),
Toast.LENGTH_SHORT).show();
}
});
//System.out.println(this.toString());
}
}catch (Exception e){
Log.e("Show data", e.getMessage());
}
}
@Override
public long getTime() {
return time;
}
@Override
public void setTime(long time) {
this.time = time;
}
@Override
public void setDataType(String dataType) {
this.dataType = dataType;
}
}
|
|
/*
* Copyright 2013 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.buffer;
import io.netty.util.internal.StringUtil;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.channels.GatheringByteChannel;
import java.nio.channels.ScatteringByteChannel;
import java.nio.charset.Charset;
public class WrappedByteBuf extends ByteBuf {
protected final ByteBuf buf;
protected WrappedByteBuf(ByteBuf buf) {
if (buf == null) {
throw new NullPointerException("buf");
}
this.buf = buf;
}
@Override
public boolean hasMemoryAddress() {
return buf.hasMemoryAddress();
}
@Override
public long memoryAddress() {
return buf.memoryAddress();
}
@Override
public int capacity() {
return buf.capacity();
}
@Override
public ByteBuf capacity(int newCapacity) {
buf.capacity(newCapacity);
return this;
}
@Override
public int maxCapacity() {
return buf.maxCapacity();
}
@Override
public ByteBufAllocator alloc() {
return buf.alloc();
}
@Override
public ByteOrder order() {
return buf.order();
}
@Override
public ByteBuf order(ByteOrder endianness) {
return buf.order(endianness);
}
@Override
public ByteBuf unwrap() {
return buf;
}
@Override
public boolean isDirect() {
return buf.isDirect();
}
@Override
public int readerIndex() {
return buf.readerIndex();
}
@Override
public ByteBuf readerIndex(int readerIndex) {
buf.readerIndex(readerIndex);
return this;
}
@Override
public int writerIndex() {
return buf.writerIndex();
}
@Override
public ByteBuf writerIndex(int writerIndex) {
buf.writerIndex(writerIndex);
return this;
}
@Override
public ByteBuf setIndex(int readerIndex, int writerIndex) {
buf.setIndex(readerIndex, writerIndex);
return this;
}
@Override
public int readableBytes() {
return buf.readableBytes();
}
@Override
public int writableBytes() {
return buf.writableBytes();
}
@Override
public int maxWritableBytes() {
return buf.maxWritableBytes();
}
@Override
public boolean isReadable() {
return buf.isReadable();
}
@Override
public boolean isWritable() {
return buf.isWritable();
}
@Override
public ByteBuf clear() {
buf.clear();
return this;
}
@Override
public ByteBuf markReaderIndex() {
buf.markReaderIndex();
return this;
}
@Override
public ByteBuf resetReaderIndex() {
buf.resetReaderIndex();
return this;
}
@Override
public ByteBuf markWriterIndex() {
buf.markWriterIndex();
return this;
}
@Override
public ByteBuf resetWriterIndex() {
buf.resetWriterIndex();
return this;
}
@Override
public ByteBuf discardReadBytes() {
buf.discardReadBytes();
return this;
}
@Override
public ByteBuf discardSomeReadBytes() {
buf.discardSomeReadBytes();
return this;
}
@Override
public ByteBuf ensureWritable(int minWritableBytes) {
buf.ensureWritable(minWritableBytes);
return this;
}
@Override
public int ensureWritable(int minWritableBytes, boolean force) {
return buf.ensureWritable(minWritableBytes, force);
}
@Override
public boolean getBoolean(int index) {
return buf.getBoolean(index);
}
@Override
public byte getByte(int index) {
return buf.getByte(index);
}
@Override
public short getUnsignedByte(int index) {
return buf.getUnsignedByte(index);
}
@Override
public short getShort(int index) {
return buf.getShort(index);
}
@Override
public int getUnsignedShort(int index) {
return buf.getUnsignedShort(index);
}
@Override
public int getMedium(int index) {
return buf.getMedium(index);
}
@Override
public int getUnsignedMedium(int index) {
return buf.getUnsignedMedium(index);
}
@Override
public int getInt(int index) {
return buf.getInt(index);
}
@Override
public long getUnsignedInt(int index) {
return buf.getUnsignedInt(index);
}
@Override
public long getLong(int index) {
return buf.getLong(index);
}
@Override
public char getChar(int index) {
return buf.getChar(index);
}
@Override
public float getFloat(int index) {
return buf.getFloat(index);
}
@Override
public double getDouble(int index) {
return buf.getDouble(index);
}
@Override
public ByteBuf getBytes(int index, ByteBuf dst) {
buf.getBytes(index, dst);
return this;
}
@Override
public ByteBuf getBytes(int index, ByteBuf dst, int length) {
buf.getBytes(index, dst, length);
return this;
}
@Override
public ByteBuf getBytes(int index, ByteBuf dst, int dstIndex, int length) {
buf.getBytes(index, dst, dstIndex, length);
return this;
}
@Override
public ByteBuf getBytes(int index, byte[] dst) {
buf.getBytes(index, dst);
return this;
}
@Override
public ByteBuf getBytes(int index, byte[] dst, int dstIndex, int length) {
buf.getBytes(index, dst, dstIndex, length);
return this;
}
@Override
public ByteBuf getBytes(int index, ByteBuffer dst) {
buf.getBytes(index, dst);
return this;
}
@Override
public ByteBuf getBytes(int index, OutputStream out, int length) throws IOException {
buf.getBytes(index, out, length);
return this;
}
@Override
public int getBytes(int index, GatheringByteChannel out, int length) throws IOException {
return buf.getBytes(index, out, length);
}
@Override
public ByteBuf setBoolean(int index, boolean value) {
buf.setBoolean(index, value);
return this;
}
@Override
public ByteBuf setByte(int index, int value) {
buf.setByte(index, value);
return this;
}
@Override
public ByteBuf setShort(int index, int value) {
buf.setShort(index, value);
return this;
}
@Override
public ByteBuf setMedium(int index, int value) {
buf.setMedium(index, value);
return this;
}
@Override
public ByteBuf setInt(int index, int value) {
buf.setInt(index, value);
return this;
}
@Override
public ByteBuf setLong(int index, long value) {
buf.setLong(index, value);
return this;
}
@Override
public ByteBuf setChar(int index, int value) {
buf.setChar(index, value);
return this;
}
@Override
public ByteBuf setFloat(int index, float value) {
buf.setFloat(index, value);
return this;
}
@Override
public ByteBuf setDouble(int index, double value) {
buf.setDouble(index, value);
return this;
}
@Override
public ByteBuf setBytes(int index, ByteBuf src) {
buf.setBytes(index, src);
return this;
}
@Override
public ByteBuf setBytes(int index, ByteBuf src, int length) {
buf.setBytes(index, src, length);
return this;
}
@Override
public ByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length) {
buf.setBytes(index, src, srcIndex, length);
return this;
}
@Override
public ByteBuf setBytes(int index, byte[] src) {
buf.setBytes(index, src);
return this;
}
@Override
public ByteBuf setBytes(int index, byte[] src, int srcIndex, int length) {
buf.setBytes(index, src, srcIndex, length);
return this;
}
@Override
public ByteBuf setBytes(int index, ByteBuffer src) {
buf.setBytes(index, src);
return this;
}
@Override
public int setBytes(int index, InputStream in, int length) throws IOException {
return buf.setBytes(index, in, length);
}
@Override
public int setBytes(int index, ScatteringByteChannel in, int length) throws IOException {
return buf.setBytes(index, in, length);
}
@Override
public ByteBuf setZero(int index, int length) {
buf.setZero(index, length);
return this;
}
@Override
public boolean readBoolean() {
return buf.readBoolean();
}
@Override
public byte readByte() {
return buf.readByte();
}
@Override
public short readUnsignedByte() {
return buf.readUnsignedByte();
}
@Override
public short readShort() {
return buf.readShort();
}
@Override
public int readUnsignedShort() {
return buf.readUnsignedShort();
}
@Override
public int readMedium() {
return buf.readMedium();
}
@Override
public int readUnsignedMedium() {
return buf.readUnsignedMedium();
}
@Override
public int readInt() {
return buf.readInt();
}
@Override
public long readUnsignedInt() {
return buf.readUnsignedInt();
}
@Override
public long readLong() {
return buf.readLong();
}
@Override
public char readChar() {
return buf.readChar();
}
@Override
public float readFloat() {
return buf.readFloat();
}
@Override
public double readDouble() {
return buf.readDouble();
}
@Override
public ByteBuf readBytes(int length) {
return buf.readBytes(length);
}
@Override
public ByteBuf readSlice(int length) {
return buf.readSlice(length);
}
@Override
public ByteBuf readBytes(ByteBuf dst) {
buf.readBytes(dst);
return this;
}
@Override
public ByteBuf readBytes(ByteBuf dst, int length) {
buf.readBytes(dst, length);
return this;
}
@Override
public ByteBuf readBytes(ByteBuf dst, int dstIndex, int length) {
buf.readBytes(dst, dstIndex, length);
return this;
}
@Override
public ByteBuf readBytes(byte[] dst) {
buf.readBytes(dst);
return this;
}
@Override
public ByteBuf readBytes(byte[] dst, int dstIndex, int length) {
buf.readBytes(dst, dstIndex, length);
return this;
}
@Override
public ByteBuf readBytes(ByteBuffer dst) {
buf.readBytes(dst);
return this;
}
@Override
public ByteBuf readBytes(OutputStream out, int length) throws IOException {
buf.readBytes(out, length);
return this;
}
@Override
public int readBytes(GatheringByteChannel out, int length) throws IOException {
return buf.readBytes(out, length);
}
@Override
public ByteBuf skipBytes(int length) {
buf.skipBytes(length);
return this;
}
@Override
public ByteBuf writeBoolean(boolean value) {
buf.writeBoolean(value);
return this;
}
@Override
public ByteBuf writeByte(int value) {
buf.writeByte(value);
return this;
}
@Override
public ByteBuf writeShort(int value) {
buf.writeShort(value);
return this;
}
@Override
public ByteBuf writeMedium(int value) {
buf.writeMedium(value);
return this;
}
@Override
public ByteBuf writeInt(int value) {
buf.writeInt(value);
return this;
}
@Override
public ByteBuf writeLong(long value) {
buf.writeLong(value);
return this;
}
@Override
public ByteBuf writeChar(int value) {
buf.writeChar(value);
return this;
}
@Override
public ByteBuf writeFloat(float value) {
buf.writeFloat(value);
return this;
}
@Override
public ByteBuf writeDouble(double value) {
buf.writeDouble(value);
return this;
}
@Override
public ByteBuf writeBytes(ByteBuf src) {
buf.writeBytes(src);
return this;
}
@Override
public ByteBuf writeBytes(ByteBuf src, int length) {
buf.writeBytes(src, length);
return this;
}
@Override
public ByteBuf writeBytes(ByteBuf src, int srcIndex, int length) {
buf.writeBytes(src, srcIndex, length);
return this;
}
@Override
public ByteBuf writeBytes(byte[] src) {
buf.writeBytes(src);
return this;
}
@Override
public ByteBuf writeBytes(byte[] src, int srcIndex, int length) {
buf.writeBytes(src, srcIndex, length);
return this;
}
@Override
public ByteBuf writeBytes(ByteBuffer src) {
buf.writeBytes(src);
return this;
}
@Override
public int writeBytes(InputStream in, int length) throws IOException {
return buf.writeBytes(in, length);
}
@Override
public int writeBytes(ScatteringByteChannel in, int length) throws IOException {
return buf.writeBytes(in, length);
}
@Override
public ByteBuf writeZero(int length) {
buf.writeZero(length);
return this;
}
@Override
public int indexOf(int fromIndex, int toIndex, byte value) {
return buf.indexOf(fromIndex, toIndex, value);
}
@Override
public int bytesBefore(byte value) {
return buf.bytesBefore(value);
}
@Override
public int bytesBefore(int length, byte value) {
return buf.bytesBefore(length, value);
}
@Override
public int bytesBefore(int index, int length, byte value) {
return buf.bytesBefore(index, length, value);
}
@Override
public int forEachByte(ByteBufProcessor processor) {
return buf.forEachByte(processor);
}
@Override
public int forEachByte(int index, int length, ByteBufProcessor processor) {
return buf.forEachByte(index, length, processor);
}
@Override
public int forEachByteDesc(ByteBufProcessor processor) {
return buf.forEachByteDesc(processor);
}
@Override
public int forEachByteDesc(int index, int length, ByteBufProcessor processor) {
return buf.forEachByteDesc(index, length, processor);
}
@Override
public ByteBuf copy() {
return buf.copy();
}
@Override
public ByteBuf copy(int index, int length) {
return buf.copy(index, length);
}
@Override
public ByteBuf slice() {
return buf.slice();
}
@Override
public ByteBuf slice(int index, int length) {
return buf.slice(index, length);
}
@Override
public ByteBuf duplicate() {
return buf.duplicate();
}
@Override
public int nioBufferCount() {
return buf.nioBufferCount();
}
@Override
public ByteBuffer nioBuffer() {
return buf.nioBuffer();
}
@Override
public ByteBuffer nioBuffer(int index, int length) {
return buf.nioBuffer(index, length);
}
@Override
public ByteBuffer[] nioBuffers() {
return buf.nioBuffers();
}
@Override
public ByteBuffer[] nioBuffers(int index, int length) {
return buf.nioBuffers(index, length);
}
@Override
public ByteBuffer internalNioBuffer(int index, int length) {
return buf.internalNioBuffer(index, length);
}
@Override
public boolean hasArray() {
return buf.hasArray();
}
@Override
public byte[] array() {
return buf.array();
}
@Override
public int arrayOffset() {
return buf.arrayOffset();
}
@Override
public String toString(Charset charset) {
return buf.toString(charset);
}
@Override
public String toString(int index, int length, Charset charset) {
return buf.toString(index, length, charset);
}
@Override
public int hashCode() {
return buf.hashCode();
}
@Override
public boolean equals(Object obj) {
return buf.equals(obj);
}
@Override
public int compareTo(ByteBuf buffer) {
return buf.compareTo(buffer);
}
@Override
public String toString() {
return StringUtil.simpleClassName(this) + '(' + buf.toString() + ')';
}
@Override
public ByteBuf retain(int increment) {
buf.retain(increment);
return this;
}
@Override
public ByteBuf retain() {
buf.retain();
return this;
}
@Override
public ByteBuf touch() {
buf.touch();
return this;
}
@Override
public ByteBuf touch(Object hint) {
buf.touch(hint);
return this;
}
@Override
public boolean isReadable(int size) {
return buf.isReadable(size);
}
@Override
public boolean isWritable(int size) {
return buf.isWritable(size);
}
@Override
public int refCnt() {
return buf.refCnt();
}
@Override
public boolean release() {
return buf.release();
}
@Override
public boolean release(int decrement) {
return buf.release(decrement);
}
}
|
|
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.io.Writable;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/**
* Stores minimum and maximum timestamp values, it is [minimumTimestamp, maximumTimestamp] in
* interval notation.
* Use this class at write-time ONLY. Too much synchronization to use at read time
* Use {@link TimeRange} at read time instead of this. See toTimeRange() to make TimeRange to use.
* MemStores use this class to track minimum and maximum timestamps. The TimeRangeTracker made by
* the MemStore is passed to the StoreFile for it to write out as part a flush in the the file
* metadata. If no memstore involved -- i.e. a compaction -- then the StoreFile will calculate its
* own TimeRangeTracker as it appends. The StoreFile serialized TimeRangeTracker is used
* at read time via an instance of {@link TimeRange} to test if Cells fit the StoreFile TimeRange.
*/
@InterfaceAudience.Private
public abstract class TimeRangeTracker implements Writable {
public enum Type {
// thread-unsafe
NON_SYNC,
// thread-safe
SYNC
}
static final long INITIAL_MIN_TIMESTAMP = Long.MAX_VALUE;
static final long INITIAL_MAX_TIMESTAMP = -1L;
public static TimeRangeTracker create(Type type) {
switch (type) {
case NON_SYNC:
return new NonSyncTimeRangeTracker();
case SYNC:
return new SyncTimeRangeTracker();
default:
throw new UnsupportedOperationException("The type:" + type + " is unsupported");
}
}
public static TimeRangeTracker create(Type type, TimeRangeTracker trt) {
switch (type) {
case NON_SYNC:
return new NonSyncTimeRangeTracker(trt);
case SYNC:
return new SyncTimeRangeTracker(trt);
default:
throw new UnsupportedOperationException("The type:" + type + " is unsupported");
}
}
public static TimeRangeTracker create(Type type, long minimumTimestamp, long maximumTimestamp) {
switch (type) {
case NON_SYNC:
return new NonSyncTimeRangeTracker(minimumTimestamp, maximumTimestamp);
case SYNC:
return new SyncTimeRangeTracker(minimumTimestamp, maximumTimestamp);
default:
throw new UnsupportedOperationException("The type:" + type + " is unsupported");
}
}
protected abstract void setMax(long ts);
protected abstract void setMin(long ts);
protected abstract boolean compareAndSetMin(long expect, long update);
protected abstract boolean compareAndSetMax(long expect, long update);
/**
* Update the current TimestampRange to include the timestamp from <code>cell</code>.
* If the Key is of type DeleteColumn or DeleteFamily, it includes the
* entire time range from 0 to timestamp of the key.
* @param cell the Cell to include
*/
public void includeTimestamp(final Cell cell) {
includeTimestamp(cell.getTimestamp());
if (CellUtil.isDeleteColumnOrFamily(cell)) {
includeTimestamp(0);
}
}
/**
* If required, update the current TimestampRange to include timestamp
* @param timestamp the timestamp value to include
*/
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="MT_CORRECTNESS",
justification="Intentional")
void includeTimestamp(final long timestamp) {
long initialMinTimestamp = getMin();
if (timestamp < initialMinTimestamp) {
long curMinTimestamp = initialMinTimestamp;
while (timestamp < curMinTimestamp) {
if (!compareAndSetMin(curMinTimestamp, timestamp)) {
curMinTimestamp = getMin();
} else {
// successfully set minimumTimestamp, break.
break;
}
}
// When it reaches here, there are two possibilities:
// 1). timestamp >= curMinTimestamp, someone already sets the minimumTimestamp. In this case,
// it still needs to check if initialMinTimestamp == INITIAL_MIN_TIMESTAMP to see
// if it needs to update minimumTimestamp. Someone may already set both
// minimumTimestamp/minimumTimestamp to the same value(curMinTimestamp),
// need to check if maximumTimestamp needs to be updated.
// 2). timestamp < curMinTimestamp, it sets the minimumTimestamp successfully.
// In this case,it still needs to check if initialMinTimestamp == INITIAL_MIN_TIMESTAMP
// to see if it needs to set maximumTimestamp.
if (initialMinTimestamp != INITIAL_MIN_TIMESTAMP) {
// Someone already sets minimumTimestamp and timestamp is less than minimumTimestamp.
// In this case, no need to set maximumTimestamp as it will be set to at least
// initialMinTimestamp.
return;
}
}
long curMaxTimestamp = getMax();
if (timestamp > curMaxTimestamp) {
while (timestamp > curMaxTimestamp) {
if (!compareAndSetMax(curMaxTimestamp, timestamp)) {
curMaxTimestamp = getMax();
} else {
// successfully set maximumTimestamp, break
break;
}
}
}
}
/**
* Check if the range has ANY overlap with TimeRange
* @param tr TimeRange, it expects [minStamp, maxStamp)
* @return True if there is overlap, false otherwise
*/
public boolean includesTimeRange(final TimeRange tr) {
return (getMin() < tr.getMax() && getMax() >= tr.getMin());
}
/**
* @return the minimumTimestamp
*/
public abstract long getMin();
/**
* @return the maximumTimestamp
*/
public abstract long getMax();
public void write(final DataOutput out) throws IOException {
out.writeLong(getMin());
out.writeLong(getMax());
}
public void readFields(final DataInput in) throws IOException {
setMin(in.readLong());
setMax(in.readLong());
}
@Override
public String toString() {
return "[" + getMin() + "," + getMax() + "]";
}
/**
* @return An instance of NonSyncTimeRangeTracker filled w/ the content of serialized
* NonSyncTimeRangeTracker in <code>timeRangeTrackerBytes</code>.
* @throws IOException
*/
public static TimeRangeTracker getTimeRangeTracker(final byte [] timeRangeTrackerBytes)
throws IOException {
if (timeRangeTrackerBytes == null) return null;
TimeRangeTracker trt = TimeRangeTracker.create(Type.NON_SYNC);
Writables.copyWritable(timeRangeTrackerBytes, trt);
return trt;
}
/**
* @return An instance of a TimeRange made from the serialized TimeRangeTracker passed in
* <code>timeRangeTrackerBytes</code>.
* @throws IOException
*/
static TimeRange getTimeRange(final byte [] timeRangeTrackerBytes) throws IOException {
TimeRangeTracker trt = getTimeRangeTracker(timeRangeTrackerBytes);
return trt == null? null: trt.toTimeRange();
}
/**
* @return Make a TimeRange from current state of <code>this</code>.
*/
TimeRange toTimeRange() {
long min = getMin();
long max = getMax();
// Initial TimeRangeTracker timestamps are the opposite of what you want for a TimeRange. Fix!
if (min == INITIAL_MIN_TIMESTAMP) {
min = TimeRange.INITIAL_MIN_TIMESTAMP;
}
if (max == INITIAL_MAX_TIMESTAMP) {
max = TimeRange.INITIAL_MAX_TIMESTAMP;
}
return new TimeRange(min, max);
}
private static class NonSyncTimeRangeTracker extends TimeRangeTracker {
private long minimumTimestamp = INITIAL_MIN_TIMESTAMP;
private long maximumTimestamp = INITIAL_MAX_TIMESTAMP;
NonSyncTimeRangeTracker() {
}
NonSyncTimeRangeTracker(final TimeRangeTracker trt) {
this.minimumTimestamp = trt.getMin();
this.maximumTimestamp = trt.getMax();
}
NonSyncTimeRangeTracker(long minimumTimestamp, long maximumTimestamp) {
this.minimumTimestamp = minimumTimestamp;
this.maximumTimestamp = maximumTimestamp;
}
@Override
protected void setMax(long ts) {
maximumTimestamp = ts;
}
@Override
protected void setMin(long ts) {
minimumTimestamp = ts;
}
@Override
protected boolean compareAndSetMin(long expect, long update) {
if (minimumTimestamp != expect) {
return false;
}
minimumTimestamp = update;
return true;
}
@Override
protected boolean compareAndSetMax(long expect, long update) {
if (maximumTimestamp != expect) {
return false;
}
maximumTimestamp = update;
return true;
}
@Override
public long getMin() {
return minimumTimestamp;
}
@Override
public long getMax() {
return maximumTimestamp;
}
}
@VisibleForTesting
//In order to estimate the heap size, this inner class need to be accessible to TestHeapSize.
public static class SyncTimeRangeTracker extends TimeRangeTracker {
private final AtomicLong minimumTimestamp = new AtomicLong(INITIAL_MIN_TIMESTAMP);
private final AtomicLong maximumTimestamp = new AtomicLong(INITIAL_MAX_TIMESTAMP);
private SyncTimeRangeTracker() {
}
SyncTimeRangeTracker(final TimeRangeTracker trt) {
this.minimumTimestamp.set(trt.getMin());
this.maximumTimestamp.set(trt.getMax());
}
SyncTimeRangeTracker(long minimumTimestamp, long maximumTimestamp) {
this.minimumTimestamp.set(minimumTimestamp);
this.maximumTimestamp.set(maximumTimestamp);
}
@Override
protected void setMax(long ts) {
maximumTimestamp.set(ts);
}
@Override
protected void setMin(long ts) {
minimumTimestamp.set(ts);
}
@Override
protected boolean compareAndSetMin(long expect, long update) {
return minimumTimestamp.compareAndSet(expect, update);
}
@Override
protected boolean compareAndSetMax(long expect, long update) {
return maximumTimestamp.compareAndSet(expect, update);
}
@Override
public long getMin() {
return minimumTimestamp.get();
}
@Override
public long getMax() {
return maximumTimestamp.get();
}
}
}
|
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.workdocs.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/workdocs-2016-05-01/UpdateDocument" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UpdateDocumentRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* Amazon WorkDocs authentication token. Not required when using AWS administrator credentials to access the API.
* </p>
*/
private String authenticationToken;
/**
* <p>
* The ID of the document.
* </p>
*/
private String documentId;
/**
* <p>
* The name of the document.
* </p>
*/
private String name;
/**
* <p>
* The ID of the parent folder.
* </p>
*/
private String parentFolderId;
/**
* <p>
* The resource state of the document. Only ACTIVE and RECYCLED are supported.
* </p>
*/
private String resourceState;
/**
* <p>
* Amazon WorkDocs authentication token. Not required when using AWS administrator credentials to access the API.
* </p>
*
* @param authenticationToken
* Amazon WorkDocs authentication token. Not required when using AWS administrator credentials to access the
* API.
*/
public void setAuthenticationToken(String authenticationToken) {
this.authenticationToken = authenticationToken;
}
/**
* <p>
* Amazon WorkDocs authentication token. Not required when using AWS administrator credentials to access the API.
* </p>
*
* @return Amazon WorkDocs authentication token. Not required when using AWS administrator credentials to access the
* API.
*/
public String getAuthenticationToken() {
return this.authenticationToken;
}
/**
* <p>
* Amazon WorkDocs authentication token. Not required when using AWS administrator credentials to access the API.
* </p>
*
* @param authenticationToken
* Amazon WorkDocs authentication token. Not required when using AWS administrator credentials to access the
* API.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateDocumentRequest withAuthenticationToken(String authenticationToken) {
setAuthenticationToken(authenticationToken);
return this;
}
/**
* <p>
* The ID of the document.
* </p>
*
* @param documentId
* The ID of the document.
*/
public void setDocumentId(String documentId) {
this.documentId = documentId;
}
/**
* <p>
* The ID of the document.
* </p>
*
* @return The ID of the document.
*/
public String getDocumentId() {
return this.documentId;
}
/**
* <p>
* The ID of the document.
* </p>
*
* @param documentId
* The ID of the document.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateDocumentRequest withDocumentId(String documentId) {
setDocumentId(documentId);
return this;
}
/**
* <p>
* The name of the document.
* </p>
*
* @param name
* The name of the document.
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* The name of the document.
* </p>
*
* @return The name of the document.
*/
public String getName() {
return this.name;
}
/**
* <p>
* The name of the document.
* </p>
*
* @param name
* The name of the document.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateDocumentRequest withName(String name) {
setName(name);
return this;
}
/**
* <p>
* The ID of the parent folder.
* </p>
*
* @param parentFolderId
* The ID of the parent folder.
*/
public void setParentFolderId(String parentFolderId) {
this.parentFolderId = parentFolderId;
}
/**
* <p>
* The ID of the parent folder.
* </p>
*
* @return The ID of the parent folder.
*/
public String getParentFolderId() {
return this.parentFolderId;
}
/**
* <p>
* The ID of the parent folder.
* </p>
*
* @param parentFolderId
* The ID of the parent folder.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateDocumentRequest withParentFolderId(String parentFolderId) {
setParentFolderId(parentFolderId);
return this;
}
/**
* <p>
* The resource state of the document. Only ACTIVE and RECYCLED are supported.
* </p>
*
* @param resourceState
* The resource state of the document. Only ACTIVE and RECYCLED are supported.
* @see ResourceStateType
*/
public void setResourceState(String resourceState) {
this.resourceState = resourceState;
}
/**
* <p>
* The resource state of the document. Only ACTIVE and RECYCLED are supported.
* </p>
*
* @return The resource state of the document. Only ACTIVE and RECYCLED are supported.
* @see ResourceStateType
*/
public String getResourceState() {
return this.resourceState;
}
/**
* <p>
* The resource state of the document. Only ACTIVE and RECYCLED are supported.
* </p>
*
* @param resourceState
* The resource state of the document. Only ACTIVE and RECYCLED are supported.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ResourceStateType
*/
public UpdateDocumentRequest withResourceState(String resourceState) {
setResourceState(resourceState);
return this;
}
/**
* <p>
* The resource state of the document. Only ACTIVE and RECYCLED are supported.
* </p>
*
* @param resourceState
* The resource state of the document. Only ACTIVE and RECYCLED are supported.
* @see ResourceStateType
*/
public void setResourceState(ResourceStateType resourceState) {
withResourceState(resourceState);
}
/**
* <p>
* The resource state of the document. Only ACTIVE and RECYCLED are supported.
* </p>
*
* @param resourceState
* The resource state of the document. Only ACTIVE and RECYCLED are supported.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ResourceStateType
*/
public UpdateDocumentRequest withResourceState(ResourceStateType resourceState) {
this.resourceState = resourceState.toString();
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getAuthenticationToken() != null)
sb.append("AuthenticationToken: ").append("***Sensitive Data Redacted***").append(",");
if (getDocumentId() != null)
sb.append("DocumentId: ").append(getDocumentId()).append(",");
if (getName() != null)
sb.append("Name: ").append(getName()).append(",");
if (getParentFolderId() != null)
sb.append("ParentFolderId: ").append(getParentFolderId()).append(",");
if (getResourceState() != null)
sb.append("ResourceState: ").append(getResourceState());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof UpdateDocumentRequest == false)
return false;
UpdateDocumentRequest other = (UpdateDocumentRequest) obj;
if (other.getAuthenticationToken() == null ^ this.getAuthenticationToken() == null)
return false;
if (other.getAuthenticationToken() != null && other.getAuthenticationToken().equals(this.getAuthenticationToken()) == false)
return false;
if (other.getDocumentId() == null ^ this.getDocumentId() == null)
return false;
if (other.getDocumentId() != null && other.getDocumentId().equals(this.getDocumentId()) == false)
return false;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null && other.getName().equals(this.getName()) == false)
return false;
if (other.getParentFolderId() == null ^ this.getParentFolderId() == null)
return false;
if (other.getParentFolderId() != null && other.getParentFolderId().equals(this.getParentFolderId()) == false)
return false;
if (other.getResourceState() == null ^ this.getResourceState() == null)
return false;
if (other.getResourceState() != null && other.getResourceState().equals(this.getResourceState()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getAuthenticationToken() == null) ? 0 : getAuthenticationToken().hashCode());
hashCode = prime * hashCode + ((getDocumentId() == null) ? 0 : getDocumentId().hashCode());
hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode + ((getParentFolderId() == null) ? 0 : getParentFolderId().hashCode());
hashCode = prime * hashCode + ((getResourceState() == null) ? 0 : getResourceState().hashCode());
return hashCode;
}
@Override
public UpdateDocumentRequest clone() {
return (UpdateDocumentRequest) super.clone();
}
}
|
|
/*
* Copyright 2011-2012 Alfresco Software Limited.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This file is part of an unsupported extension to Alfresco.
*/
package org.alfresco.util.encryption.impl;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.security.spec.InvalidKeySpecException;
import org.junit.Test;
import static org.junit.Assert.*;
import org.alfresco.util.encryption.CannotDecryptException;
import org.alfresco.util.encryption.Encrypter;
/**
* This unit test exercises the </code>AES256PasswordBasedEncrypter</code>.
*
* @author Peter Monks ([email protected])
* @version $Id: TestAES256PasswordBasedEncrypter.java 41626 2012-09-14 23:59:00Z wabson $
* @see org.alfresco.util.encryption.impl.AES256PasswordBasedEncrypter
*
*/
public class TestAES256PasswordBasedEncrypter
{
@Test
public void testSingleEncrypterInstance()
throws Exception
{
char[] password = "opensesame".toCharArray();
String original = "The quick brown fox jumps over the lazy dog.";
Encrypter enc = new AES256PasswordBasedEncrypter(password);
String encrypted = enc.encrypt(original);
String decrypted = enc.decrypt(encrypted);
assertEquals(original, decrypted);
}
@Test
public void testSeparateEncrypterInstances()
throws Exception
{
char[] password = "opensesame".toCharArray();
String original = "Some other message.";
Encrypter enc1 = new AES256PasswordBasedEncrypter(password);
Encrypter enc2 = new AES256PasswordBasedEncrypter(password);
String encrypted = enc1.encrypt(original);
String decrypted = enc2.decrypt(encrypted);
assertEquals(original, decrypted);
}
@Test
public void testHardcodedEncryptedText()
throws Exception
{
char[] password = "opensesame".toCharArray();
Encrypter enc = new AES256PasswordBasedEncrypter(password);
String decrypted = enc.decrypt("NWhmZDA3OE53USsrWWtpdUtzQm5oUT09LGhPcTEyZ3RwZTR6TDFZRVRTaVFFV2svSmo2QkZEZmkzaFdmazNlMUhldlRPNzVjU1M3dmVzbzNUUEx6NHpBUko=");
assertEquals("The quick brown fox jumps over the lazy dog.", decrypted);
}
@Test
public void testUTF8Message()
throws Exception
{
char[] password = "opensesame".toCharArray();
String original = "\u79C1\u306F\u30AC\u30E9\u30B9\u3092\u98DF\u3079\u3089\u308C\u307E\u3059\u3002\u305D\u308C\u306F\u79C1\u3092\u50B7\u3064\u3051\u307E\u305B\u3093\u3002";
Encrypter enc = new AES256PasswordBasedEncrypter(password);
String encrypted = enc.encrypt(original);
String decrypted = enc.decrypt(encrypted);
assertEquals(original, decrypted);
}
@Test
public void testEmptyMessage()
throws Exception
{
char[] password = "opensesame".toCharArray();
String original = "";
Encrypter enc = new AES256PasswordBasedEncrypter(password);
String encrypted = enc.encrypt(original);
String decrypted = enc.decrypt(encrypted);
assertEquals(original, decrypted);
}
@Test
public void testEmptyPassword()
throws Exception
{
char[] password = "".toCharArray();
String original = "The quick brown fox jumps over the lazy dog.";
try
{
Encrypter enc = new AES256PasswordBasedEncrypter(password);
String encrypted = enc.encrypt(original);
String decrypted = enc.decrypt(encrypted);
assertEquals(original, decrypted);
// If we got this far, something went very very wrong
fail("Decryption apparently succeeded, even though the password was blank.");
}
catch (final InvalidKeySpecException ikse) // Thrown if assertions are not enabled
{
// Success!
}
catch(final AssertionError ae) // Thrown if assertions are enabled (eg. during Maven test)
{
// Success!
}
}
@Test
public void testMismatchedPasswords()
throws Exception
{
char[] password1 = "opensesame".toCharArray();
char[] password2 = "notopensesame".toCharArray();
String original = "The quick brown fox jumps over the lazy dog.";
try
{
Encrypter enc1 = new AES256PasswordBasedEncrypter(password1);
Encrypter enc2 = new AES256PasswordBasedEncrypter(password2);
String encrypted = enc1.encrypt(original);
String decrypted = enc2.decrypt(encrypted);
assertEquals(original, decrypted);
// If we got this far, something went very very wrong
fail("Decryption apparently succeeded, even though the passwords were different.");
}
catch (final CannotDecryptException cde)
{
// Success!
}
}
@Test
public void testMismatchedPasswordsOfSameLength()
throws Exception
{
char[] password1 = "opensesame".toCharArray();
char[] password2 = "emasesnepo".toCharArray();
String original = "The quick brown fox jumps over the lazy dog.";
try
{
Encrypter enc1 = new AES256PasswordBasedEncrypter(password1);
Encrypter enc2 = new AES256PasswordBasedEncrypter(password2);
String encrypted = enc1.encrypt(original);
String decrypted = enc2.decrypt(encrypted);
assertEquals(original, decrypted);
// If we got this far, something went very very wrong
fail("Decryption apparently succeeded, even though the passwords were different.");
}
catch (final CannotDecryptException cde)
{
// Success!
}
}
@Test
public void testEmptyEncryptedText()
throws Exception
{
char[] password = "opensesame".toCharArray();
Encrypter enc = new AES256PasswordBasedEncrypter(password);
try
{
String decrypted = enc.decrypt("");
fail("Decryption should have failed.");
}
catch (final CannotDecryptException cde)
{
// Success!
}
}
@Test
public void testSyntacticallyIncorrectEncryptedTest()
throws Exception
{
char[] password = "opensesame".toCharArray();
Encrypter enc = new AES256PasswordBasedEncrypter(password);
try
{
String decrypted = enc.decrypt("abcd1234");
fail("Decryption should have failed.");
}
catch (final CannotDecryptException cde)
{
// Success!
}
}
@Test
public void testWronglySeparatedEncryptedText()
throws Exception
{
char[] password = "opensesame".toCharArray();
Encrypter enc = new AES256PasswordBasedEncrypter(password);
try
{
String decrypted = enc.decrypt("y1hlDQgx2P75ksdxwcTaYQ==-jJqhtPCK7NhbpDehXPKqDVOQB3FLVYmLb2C5b8KJIzkuYIaXnroLXCq77I9W4Dd8"); // Note: wrong separator character (- instead of ,)
fail("Decryption should have failed.");
}
catch (final CannotDecryptException cde)
{
// Success!
}
}
/**
* This method is not a unit test - rather it provides a convenient way to generate encrypted text for use elsewhere.
*
* To enable it, uncomment the @Test annotation.
*/
// @Test
public void notATest_outputEncryptedText()
throws Exception
{
char[] password = "CHANGEME!".toCharArray();
String original = "admin";
Encrypter enc = new AES256PasswordBasedEncrypter(password);
String encrypted = enc.encrypt(original);
// Remember: System.out is not natively Unicode capable - hence why we wrap it in a PrintWriter
PrintWriter out = new PrintWriter(new OutputStreamWriter(System.out, "UTF-8"));
out.println(encrypted);
out.flush();
out.close();
}
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jctools.queues.atomic;
import java.util.AbstractQueue;
import java.util.Iterator;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReferenceArray;
import org.jctools.queues.QueueProgressIndicators;
import org.jctools.util.Pow2;
public class SpscUnboundedAtomicArrayQueue<E> extends AbstractQueue<E> implements QueueProgressIndicators{
static final int MAX_LOOK_AHEAD_STEP = Integer.getInteger("jctools.spsc.max.lookahead.step", 4096);
protected final AtomicLong producerIndex;
protected int producerLookAheadStep;
protected long producerLookAhead;
protected int producerMask;
protected AtomicReferenceArray<Object> producerBuffer;
protected int consumerMask;
protected AtomicReferenceArray<Object> consumerBuffer;
protected final AtomicLong consumerIndex;
private static final Object HAS_NEXT = new Object();
public SpscUnboundedAtomicArrayQueue(final int chunkSize) {
int p2ChunkSize = Math.max(Pow2.roundToPowerOfTwo(chunkSize), 16);
int mask = p2ChunkSize - 1;
AtomicReferenceArray<Object> buffer = new AtomicReferenceArray<Object>(p2ChunkSize + 1);
producerBuffer = buffer;
producerMask = mask;
adjustLookAheadStep(p2ChunkSize);
consumerBuffer = buffer;
consumerMask = mask;
producerLookAhead = mask - 1; // we know it's all empty to start with
producerIndex = new AtomicLong();
consumerIndex = new AtomicLong();
soProducerIndex(0L);
}
@Override
public final Iterator<E> iterator() {
throw new UnsupportedOperationException();
}
@Override
public String toString() {
return this.getClass().getName();
}
/**
* {@inheritDoc}
* <p>
* This implementation is correct for single producer thread use only.
*/
@Override
public final boolean offer(final E e) {
if (null == e) {
throw new NullPointerException();
}
// local load of field to avoid repeated loads after volatile reads
final AtomicReferenceArray<Object> buffer = producerBuffer;
final long index = lpProducerIndex();
final int mask = producerMask;
final int offset = calcWrappedOffset(index, mask);
if (index < producerLookAhead) {
return writeToQueue(buffer, e, index, offset);
} else {
final int lookAheadStep = producerLookAheadStep;
// go around the buffer or resize if full (unless we hit max capacity)
int lookAheadElementOffset = calcWrappedOffset(index + lookAheadStep, mask);
if (null == lvElement(buffer, lookAheadElementOffset)) {// LoadLoad
producerLookAhead = index + lookAheadStep - 1; // joy, there's plenty of room
return writeToQueue(buffer, e, index, offset);
} else if (null == lvElement(buffer, calcWrappedOffset(index + 1, mask))) { // buffer is not full
return writeToQueue(buffer, e, index, offset);
} else {
resize(buffer, index, offset, e, mask); // add a buffer and link old to new
return true;
}
}
}
private boolean writeToQueue(final AtomicReferenceArray<Object> buffer, final E e, final long index, final int offset) {
soElement(buffer, offset, e);// StoreStore
soProducerIndex(index + 1);// this ensures atomic write of long on 32bit platforms
return true;
}
private void resize(final AtomicReferenceArray<Object> oldBuffer, final long currIndex, final int offset, final E e,
final long mask) {
final int capacity = oldBuffer.length();
final AtomicReferenceArray<Object> newBuffer = new AtomicReferenceArray<Object>(capacity);
producerBuffer = newBuffer;
producerLookAhead = currIndex + mask - 1;
soElement(newBuffer, offset, e);// StoreStore
soNext(oldBuffer, newBuffer);
soElement(oldBuffer, offset, HAS_NEXT); // new buffer is visible after element is inserted
soProducerIndex(currIndex + 1);// this ensures correctness on 32bit platforms
}
private void soNext(AtomicReferenceArray<Object> curr, AtomicReferenceArray<Object> next) {
soElement(curr, calcDirectOffset(curr.length() - 1), next);
}
@SuppressWarnings("unchecked")
private AtomicReferenceArray<Object> lvNext(AtomicReferenceArray<Object> curr) {
return (AtomicReferenceArray<Object>)lvElement(curr, calcDirectOffset(curr.length() - 1));
}
/**
* {@inheritDoc}
* <p>
* This implementation is correct for single consumer thread use only.
*/
@SuppressWarnings("unchecked")
@Override
public final E poll() {
// local load of field to avoid repeated loads after volatile reads
final AtomicReferenceArray<Object> buffer = consumerBuffer;
final long index = lpConsumerIndex();
final int mask = consumerMask;
final int offset = calcWrappedOffset(index, mask);
final Object e = lvElement(buffer, offset);// LoadLoad
boolean isNextBuffer = e == HAS_NEXT;
if (null != e && !isNextBuffer) {
soConsumerIndex(index + 1);// this ensures correctness on 32bit platforms
soElement(buffer, offset, null);// StoreStore
return (E) e;
} else if (isNextBuffer) {
return newBufferPoll(buffer, index, mask);
}
return null;
}
@SuppressWarnings("unchecked")
private E newBufferPoll(AtomicReferenceArray<Object> buffer, final long index, final int mask) {
AtomicReferenceArray<Object> nextBuffer = lvNext(buffer);
consumerBuffer = nextBuffer;
final int offsetInNew = calcWrappedOffset(index, mask);
final E n = (E) lvElement(nextBuffer, offsetInNew);// LoadLoad
soConsumerIndex(index + 1);// this ensures correctness on 32bit platforms
soElement(nextBuffer, offsetInNew, null);// StoreStore
// prevent extended retention if the buffer is in old gen and the nextBuffer is in young gen
soNext(buffer, null);
return n;
}
/**
* {@inheritDoc}
* <p>
* This implementation is correct for single consumer thread use only.
*/
@SuppressWarnings("unchecked")
@Override
public final E peek() {
final AtomicReferenceArray<Object> buffer = consumerBuffer;
final long index = lpConsumerIndex();
final int mask = consumerMask;
final int offset = calcWrappedOffset(index, mask);
final Object e = lvElement(buffer, offset);// LoadLoad
if (e == HAS_NEXT) {
return newBufferPeek(lvNext(buffer), index, mask);
}
return (E) e;
}
@SuppressWarnings("unchecked")
private E newBufferPeek(AtomicReferenceArray<Object> nextBuffer, final long index, final int mask) {
consumerBuffer = nextBuffer;
final int offsetInNew = calcWrappedOffset(index, mask);
return (E) lvElement(nextBuffer, offsetInNew);// LoadLoad
}
@Override
public final int size() {
/*
* It is possible for a thread to be interrupted or reschedule between the read of the producer and
* consumer indices, therefore protection is required to ensure size is within valid range. In the
* event of concurrent polls/offers to this method the size is OVER estimated as we read consumer
* index BEFORE the producer index.
*/
long after = lvConsumerIndex();
while (true) {
final long before = after;
final long currentProducerIndex = lvProducerIndex();
after = lvConsumerIndex();
if (before == after) {
return (int) (currentProducerIndex - after);
}
}
}
private void adjustLookAheadStep(int capacity) {
producerLookAheadStep = Math.min(capacity / 4, MAX_LOOK_AHEAD_STEP);
}
private long lvProducerIndex() {
return producerIndex.get();
}
private long lvConsumerIndex() {
return consumerIndex.get();
}
private long lpProducerIndex() {
return producerIndex.get();
}
private long lpConsumerIndex() {
return consumerIndex.get();
}
private void soProducerIndex(long v) {
producerIndex.lazySet(v);
}
private void soConsumerIndex(long v) {
consumerIndex.lazySet(v);
}
private static int calcWrappedOffset(long index, int mask) {
return calcDirectOffset((int)index & mask);
}
private static int calcDirectOffset(int index) {
return index;
}
private static void soElement(AtomicReferenceArray<Object> buffer, int offset, Object e) {
buffer.lazySet(offset, e);
}
private static <E> Object lvElement(AtomicReferenceArray<Object> buffer, int offset) {
return buffer.get(offset);
}
@Override
public long currentProducerIndex() {
return lvProducerIndex();
}
@Override
public long currentConsumerIndex() {
return lvConsumerIndex();
}
}
|
|
/*
* Copyright 2016 Albert Tregnaghi
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*
*/
package de.jcup.egradle.integration;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import org.junit.rules.ExternalResource;
import de.jcup.egradle.codeassist.CodeCompletionRegistry;
import de.jcup.egradle.codeassist.GradleDSLProposalFactory;
import de.jcup.egradle.codeassist.ProposalFactoryContentProvider;
import de.jcup.egradle.codeassist.ProposalFactoryContentProviderException;
import de.jcup.egradle.codeassist.RelevantCodeCutter;
import de.jcup.egradle.codeassist.StaticOffsetProposalFactoryContentProvider;
import de.jcup.egradle.codeassist.dsl.ApiMappingImporter;
import de.jcup.egradle.codeassist.dsl.FilesystemFileLoader;
import de.jcup.egradle.codeassist.dsl.XMLPluginsImporter;
import de.jcup.egradle.codeassist.dsl.XMLTypeImporter;
import de.jcup.egradle.codeassist.dsl.gradle.GradleDSLCodeTemplateBuilder;
import de.jcup.egradle.codeassist.dsl.gradle.GradleDSLPluginLoader;
import de.jcup.egradle.codeassist.dsl.gradle.GradleDSLTypeProvider;
import de.jcup.egradle.codeassist.dsl.gradle.GradleFileType;
import de.jcup.egradle.codeassist.dsl.gradle.estimation.GradleLanguageElementEstimater;
import de.jcup.egradle.codeassist.hover.HoverSupport;
import de.jcup.egradle.core.ModelProvider;
import de.jcup.egradle.core.TestUtil;
import de.jcup.egradle.core.TextProvider;
import de.jcup.egradle.core.model.Model;
import de.jcup.egradle.core.model.ModelBuilder.ModelBuilderException;
import de.jcup.egradle.core.model.groovyantlr.AbstractGroovyModelBuilder;
import de.jcup.egradle.core.model.groovyantlr.GradleModelBuilder;
import de.jcup.egradle.core.util.ErrorHandler;
/**
* IntegrationTestComponents is the central point for integration tests
*
* @author Albert Tregnaghi
*
*/
public class IntegrationTestComponents extends ExternalResource {
private static IntegrationTestComponents INSTANCE = new IntegrationTestComponents();
private CodeCompletionRegistry codeCompletionRegistry;
private ErrorHandler errorHandler;
private GradleDSLTypeProvider gradleDslProvider;
private HoverSupport hoverSupport;
private RelevantCodeCutter relevantCodeCutter;
private GradleLanguageElementEstimater estimator;
private GradleDSLCodeTemplateBuilder gradleDslCodeBuilder;
private GradleDSLProposalFactory gradleDSLProposalFactory;
private XMLPluginsImporter pluginsImporter;
private FilesystemFileLoader fileLoader;
private static boolean showFullStacktraces;
static {
String property = System.getProperty("egradle.integration.test.stacktrace");
showFullStacktraces = Boolean.parseBoolean(property);
}
private IntegrationTestComponents() {
startSDKParts();
}
public static IntegrationTestComponents initialize() {
return INSTANCE;
}
public RelevantCodeCutter getRelevantCodeCutter() {
return relevantCodeCutter;
}
public ErrorHandler getErrorHandler() {
return errorHandler;
}
public HoverSupport getHoverSupport() {
return hoverSupport;
}
public GradleDSLTypeProvider getGradleDslProvider() {
return gradleDslProvider;
}
private void startSDKParts() {
relevantCodeCutter = new RelevantCodeCutter();
hoverSupport = new HoverSupport();
errorHandler = new ErrorHandler() {
@Override
public void handleError(String message) {
System.err.println(message);
}
@Override
public void handleError(String message, Throwable t) {
if (showFullStacktraces) {
handleError(message);
t.printStackTrace();
System.err.println(message + " - " + t.getMessage());
}
}
};
codeCompletionRegistry = new CodeCompletionRegistry();
XMLTypeImporter typeImporter = new XMLTypeImporter();
pluginsImporter = new XMLPluginsImporter();
ApiMappingImporter apiMappingImporter = new ApiMappingImporter();
fileLoader = new FilesystemFileLoader(typeImporter, pluginsImporter, apiMappingImporter);
fileLoader.setDSLFolder(new File(TestUtil.SDK__SRC_MAIN_RES_FOLDER, "sdk"));
gradleDslProvider = new GradleDSLTypeProvider(fileLoader);
gradleDslProvider.setErrorHandler(errorHandler);
GradleDSLPluginLoader pluginLoader = new GradleDSLPluginLoader(fileLoader);
/*
* install dsl type provider as service, so it must be definitely used shared...
*/
codeCompletionRegistry.registerService(GradleDSLTypeProvider.class, gradleDslProvider);
codeCompletionRegistry.registerService(GradleDSLPluginLoader.class, pluginLoader);
estimator = new GradleLanguageElementEstimater(gradleDslProvider);
gradleDslCodeBuilder = new GradleDSLCodeTemplateBuilder();
gradleDSLProposalFactory = new GradleDSLProposalFactory(gradleDslCodeBuilder, estimator);
codeCompletionRegistry.init();
}
public XMLPluginsImporter getPluginsImporter() {
return pluginsImporter;
}
public FilesystemFileLoader getFileLoader() {
return fileLoader;
}
public GradleDSLProposalFactory getGradleDSLProposalFactory() {
return gradleDSLProposalFactory;
}
public GradleDSLCodeTemplateBuilder getGradleDslCodeBuilder() {
return gradleDslCodeBuilder;
}
public Model buildModel(String text) {
InputStream is = new ByteArrayInputStream(text.getBytes());
AbstractGroovyModelBuilder builder = new GradleModelBuilder(is);
try {
return builder.build(null);
} catch (ModelBuilderException e) {
throw new IllegalStateException("Cannot build test model:\nReason:" + e.getMessage() + "\nText=" + text, e);
}
}
public GradleLanguageElementEstimater getEstimator() {
return estimator;
}
public String loadTestFile(String path) {
File file = new File(TestUtil.SRC_TEST_RES_FOLDER, path);
if (!file.exists()) {
throw new IllegalStateException("Testfile does not exist:" + file.getAbsolutePath());
}
try (BufferedReader br = new BufferedReader(new FileReader(file))) {
String line = "";
boolean firstLine = true;
StringBuilder sb = new StringBuilder();
while ((line = br.readLine()) != null) {
if (!firstLine) {
sb.append("\n");
}
firstLine = false;
sb.append(line);
}
return sb.toString();
} catch (IOException e) {
throw new IllegalStateException("Testfile reading failed:" + file.getAbsolutePath(), e);
}
}
public ProposalFactoryContentProvider buildContentProvider(String text, int offset) {
Model model = buildModel(text);
ModelProvider modelProvider = new ModelProvider() {
@Override
public Model getModel() {
return model;
}
};
TextProvider textProvider = new IntegrationTestTextProvider(text);
try {
StaticOffsetProposalFactoryContentProvider provider = new StaticOffsetProposalFactoryContentProvider(GradleFileType.GRADLE_BUILD_SCRIPT, modelProvider, textProvider, relevantCodeCutter,
offset);
return provider;
} catch (ProposalFactoryContentProviderException e) {
throw new IllegalStateException("Should not happen", e);
}
}
}
|
|
/**
* Holico : Proposition d'implementation du HomeBus Holico
*
* Module name: com.francetelecom.rd.sds.sds-impl
* Version: 0.4-SNAPSHOT
*
* Copyright (C) 2013 Orange
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of Orange nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* http://opensource.org/licenses/BSD-3-Clause
*/
import java.util.ArrayList;
import junit.framework.TestCase;
import com.francetelecom.rd.sds.Data;
import com.francetelecom.rd.sds.DataAccessException;
import com.francetelecom.rd.sds.Directory;
import com.francetelecom.rd.sds.Parameter;
import com.francetelecom.rd.sds.DataChangeListener;
import com.francetelecom.rd.sds.DataEvent;
import com.francetelecom.rd.sds.impl.HomeSharedDataImpl;
public class DirectoryTest extends TestCase {
// ==============================================================================
String paramPath = "my_path";
String dirPath = "my_dir";
Directory hsRoot;
// ==============================================================================
protected void setUp() throws Exception {
System.out
.println(" Global setup " + DirectoryTest.class.getName());
HomeSharedDataImpl hsd = HomeSharedDataImpl.getInstance();
hsRoot = hsd.getRootDirectory(true, null, null);
assertNotNull(hsRoot);
}
protected void tearDown() throws Exception {
System.out.println(" Global tearDown "
+ DirectoryTest.class.getName());
resetRoot();
}
private void resetRoot() {
if(hsRoot == null) return;
recursiveDelete(hsRoot);
hsRoot = null;
}
private void recursiveDelete(Directory dir) {
if(dir == null) return;
Data[] clildren = dir.getChildren();
for (int i = 0; i < clildren.length; ++i) {
if (clildren[i].getType() == Data.TYPE_GEN_DIR
|| clildren[i].getType() == Data.TYPE_SPE_DIR) {
recursiveDelete((Directory) clildren[i]);
try {
dir.deleteData(clildren[i].getName());
} catch (DataAccessException e) {
e.printStackTrace();
}
} else {
try {
dir.deleteData(clildren[i].getName());
} catch (DataAccessException e) {
e.printStackTrace();
}
}
}
}
// ==============================================================================
public void test_newData() throws Exception {
// create a new boolean data
Data myData = hsRoot.newData(paramPath, Data.TYPE_BOOL, true);
assertNotNull(myData);
// is the data path recoverable ?
String path = myData.getPathname();
assertEquals(path, path);
// is the data type recoverable ?
int type = myData.getType();
assertEquals(Data.TYPE_BOOL, type);
}
public void test_newDataOverwrite() throws Exception {
// create a new boolean data
Data myData1 = hsRoot.newData(paramPath, Data.TYPE_BOOL, true);
assertNotNull(myData1);
// overwrite the data with same path ans type
Data myData2 = hsRoot.newData(paramPath, Data.TYPE_BOOL, true);
assertNotNull(myData2);
// is always the same data ?
assertSame(myData1, myData2);
// overwrite the data with same path and diffrent type
try
{
Data myData3 = hsRoot.newData(paramPath, Data.TYPE_INT, true);
assertNotNull(myData3);
}
catch(Exception e)
{
// exception NOT expected.
assertTrue(false);
}
}
public void test_newDataNoOverwrite() throws Exception {
// create a new boolean data
Data myData1 = hsRoot.newData(paramPath, Data.TYPE_BOOL, true);
assertNotNull(myData1);
// create a new int data with same path
try
{
Data myData2 = hsRoot.newData(paramPath, Data.TYPE_BOOL, false);
assertTrue(false);
}
catch(Exception e)
{
// exception expected.
assertTrue(true);
}
}
public void test_deleteData() throws Exception {
// create a new boolean data
Data myData = hsRoot.newData(paramPath, Data.TYPE_BOOL, true);
assertNotNull(myData);
// is the data in the root ?
boolean isInRoot = hsRoot.contains(paramPath);
assertTrue(isInRoot);
// delete the data with wrong path
try
{
Data deletedData = hsRoot.deleteData(paramPath + "_wrong");
assertTrue(false);
}
catch(Exception e)
{
// exception expected.
assertTrue(true);
}
// delete the data
Data deletedData = hsRoot.deleteData(paramPath);
// is the deleted data the same as desired ?
assertSame(myData, deletedData);
// is the data in the root ?
isInRoot = hsRoot.contains(paramPath);
assertFalse(isInRoot);
}
// ==============================================================================
public void test_getDirectory() throws Exception
{
// create subDir
Data subDir = hsRoot.newData(dirPath, Data.TYPE_GEN_DIR, true);
assertNotNull(subDir);
// create a parameter
Data myData = hsRoot.newData(paramPath, Data.TYPE_BOOL, true);
assertNotNull(myData);
// retreive the sub dir
Directory dir = hsRoot.getDirectory(dirPath);
assertNotNull(dir);
assertSame(subDir, dir);
// retreive a bad Directory
try
{
Directory dir2 = hsRoot.getDirectory(paramPath);
assertTrue(false);
}
catch(Exception e)
{
// exception expected.
assertTrue(true);
}
}
public void test_getParameter() throws Exception {
// create subDir
Data subDir = hsRoot.newData(dirPath, Data.TYPE_GEN_DIR, true);
assertNotNull(subDir);
// create a new boolean data
Data myData1 = hsRoot.newData(paramPath, Data.TYPE_BOOL, true);
assertNotNull(myData1);
// retreive parameter as Object
Parameter param = hsRoot.getParameter(paramPath);
assertNotNull(param);
assertSame(myData1, param);
// retreive parameter with wrong path
try
{
Parameter param2 = hsRoot.getParameter(paramPath + "_wrong");
assertTrue(false);
}
catch(Exception e)
{
// exception expected.
assertTrue(true);
}
// retreive parameter as boolean
boolean paramBool = hsRoot.getParameterBooleanValue(paramPath);
assertEquals(false, paramBool);
// retreive parameter as int
try
{
int paramInt = hsRoot.getParameterIntValue(paramPath);
assertTrue(false);
}
catch(Exception e)
{
// exception expected.
assertTrue(true);
}
// retreive a bad Parameter
try
{
Parameter param3 = hsRoot.getParameter(dirPath);
assertTrue(false);
}
catch(Exception e)
{
// exception expected.
assertTrue(true);
}
}
public void test_setParameter() throws Exception {
// create a new boolean data
Data myData1 = hsRoot.newData(paramPath, Data.TYPE_BOOL, true);
assertNotNull(myData1);
// set a new value
hsRoot.setParameterValue(paramPath, Boolean.TRUE);
// retreive parameter as boolean
boolean paramBool = hsRoot.getParameterBooleanValue(paramPath);
assertEquals(true, paramBool);
// set a new value with wrong path
try
{
hsRoot.setParameterValue(paramPath + "_wrong", Boolean.TRUE);
assertTrue(false);
}
catch(Exception e)
{
// exception expected.
assertTrue(true);
}
// set a new value with wrong type
try
{
hsRoot.setParameterValue(paramPath, new Integer(0));
assertTrue(false);
}
catch(Exception e)
{
// exception expected.
assertTrue(true);
}
}
// ==============================================================================
public void test_contains() throws Exception
{
// create a new boolean data
Data myData = hsRoot.newData(paramPath, Data.TYPE_BOOL, true);
assertNotNull(myData);
// is the data in the root ?
boolean isInRoot = hsRoot.contains(paramPath);
assertTrue(isInRoot);
// is the data with wrong path in the root ?
isInRoot = hsRoot.contains(paramPath + "_wrong");
assertFalse(isInRoot);
}
// ==============================================================================
public void test_getChild() throws Exception
{
// create a new boolean data
Data myData = hsRoot.newData(paramPath, Data.TYPE_BOOL, true);
assertNotNull(myData);
// retreive the child
Data child = hsRoot.getChild(paramPath);
assertSame(myData, child);
// retreive the child with wrong path
child = hsRoot.getChild(paramPath + "_wrong");
assertNull(child);
}
public void test_getChildNames() throws Exception
{
// create subDir
Data subDir = hsRoot.newData(dirPath, Data.TYPE_GEN_DIR, true);
assertNotNull(subDir);
// create children from root
Data child1 = hsRoot.newData(dirPath + ".1", Data.TYPE_BOOL, true);
assertNotNull(child1);
// create children from subDir
Data child2 = ((Directory)subDir).newData("2", Data.TYPE_BOOL, true);
assertNotNull(child2);
// retreive child names
String[] childNames = hsRoot.getChildNames(dirPath);
assertEquals(2, childNames.length);
assertTrue((childNames[0].equals("1") && childNames[1].equals("2"))
|| (childNames[0].equals("2") && childNames[1].equals("1")));
}
public void test_getChildren() throws Exception
{
// create subDir
Data subDir = hsRoot.newData(dirPath, Data.TYPE_GEN_DIR, true);
assertNotNull(subDir);
// create children from root
Data child1 = hsRoot.newData(dirPath + ".1", Data.TYPE_BOOL, true);
assertNotNull(child1);
// create children from subDir
Data child2 = ((Directory)subDir).newData("2", Data.TYPE_BOOL, true);
assertNotNull(child2);
// retreive root children
Data[] rootChildren = hsRoot.getChildren();
assertEquals(1, rootChildren.length);
assertEquals(dirPath, rootChildren[0].getPathname());
// retreive sub dir children
Data[] subdirChildren = ((Directory)subDir).getChildren();
assertEquals(2, subdirChildren.length);
assertTrue((subdirChildren[0].getName().equals("1") && subdirChildren[1].getName().equals("2"))
|| (subdirChildren[0].getName().equals("2") && subdirChildren[1].getName().equals("1")));
}
// ==============================================================================
public void test_addValueChangeListenerCurrentDir() throws Exception
{
// declare custom listener for test
class CustomListener implements DataChangeListener{
int nbOfNotif = 0;
public void dataChange(ArrayList<DataEvent> events) {
nbOfNotif++;
}
public int getNbOfNotif() {
return nbOfNotif;
}
};
// create the listener
CustomListener listener = new CustomListener();
// register to notif on root
hsRoot.addDataChangeListener(listener);
// create a new boolean data
Data myData1 = hsRoot.newData(paramPath, Data.TYPE_BOOL, true);
assertNotNull(myData1);
// waiting for notif
Thread.sleep(200);
// the notif must has been fired
assertEquals(1, listener.getNbOfNotif());
// create a new boolean data
Data myData2 = hsRoot.newData(paramPath + "2", Data.TYPE_BOOL, true);
assertNotNull(myData2);
// waiting for notif
Thread.sleep(200);
// the second notif must has been fired
assertEquals(2, listener.getNbOfNotif());
}
public void test_addValueChangeListenerOnSubDir() throws Exception
{
// declare custom listener for test
class CustomListener implements DataChangeListener{
int nbOfNotif = 0;
public void dataChange(ArrayList<DataEvent> events) {
nbOfNotif++;
}
public int getNbOfNotif() {
return nbOfNotif;
}
};
// create the listener
CustomListener listener = new CustomListener();
// create subDir
Data subDir = hsRoot.newData(dirPath, Data.TYPE_GEN_DIR, true);
assertNotNull(subDir);
// register to notif on bad sub dir
try
{
hsRoot.addValueChangeListener(dirPath + "_wrong", listener);
assertTrue(false);
}
catch(Exception e)
{
// exception expected.
assertTrue(true);
}
// register to notif on sub dir
hsRoot.addValueChangeListener(dirPath, listener);
// create a new boolean data
Data myData1 = ((Directory)subDir).newData(paramPath, Data.TYPE_BOOL, true);
assertNotNull(myData1);
// waiting for notif
Thread.sleep(200);
// the notif must has been fired
assertEquals(1, listener.getNbOfNotif());
}
public void test_removeValueChangeListenerCurrentDir() throws Exception
{
// declare custom listener for test
class CustomListener implements DataChangeListener{
int nbOfNotif = 0;
public void dataChange(ArrayList<DataEvent> evt) {
nbOfNotif++;
}
public int getNbOfNotif() {
return nbOfNotif;
}
};
// create the listener
CustomListener listener = new CustomListener();
// register to notif on root
hsRoot.addDataChangeListener(listener);
// create a new boolean data
Data myData1 = hsRoot.newData(paramPath, Data.TYPE_BOOL, true);
assertNotNull(myData1);
// waiting for notif
Thread.sleep(200);
// the notif must has been fired
assertEquals(1, listener.getNbOfNotif());
// unregister to notif on root
hsRoot.removeDataChangeListener(listener);
// create a new boolean data
Data myData2 = hsRoot.newData(paramPath + "2", Data.TYPE_BOOL, true);
assertNotNull(myData2);
// waiting for notif
Thread.sleep(200);
// the second notif was not fired
assertEquals(1, listener.getNbOfNotif());
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.analysis.salsa.visualization;
import prefuse.data.io.sql.*;
import prefuse.data.Table;
import prefuse.data.expression.parser.*;
import prefuse.data.expression.*;
import prefuse.data.column.*;
import prefuse.data.query.*;
import prefuse.data.*;
import prefuse.action.*;
import prefuse.action.layout.*;
import prefuse.action.assignment.*;
import prefuse.visual.expression.*;
import prefuse.visual.*;
import prefuse.render.*;
import prefuse.util.*;
import prefuse.*;
import org.apache.hadoop.chukwa.hicc.OfflineTimeHandler;
import org.apache.hadoop.chukwa.hicc.TimeHandler;
import org.apache.hadoop.chukwa.util.DatabaseWriter;
import org.apache.hadoop.chukwa.database.Macro;
import org.apache.hadoop.chukwa.util.XssFilter;
import javax.servlet.http.*;
import javax.swing.BorderFactory;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.sql.*;
import java.util.*;
import java.text.NumberFormat;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.awt.Font;
import java.awt.geom.Rectangle2D;
import java.awt.Color;
/**
* Static image rendering for heatmap visualization of spatial HDFS
* activity patterns for scalable rendering on front-end (web-browser)
* Handles database data retrieval, transforming data to form for
* visualization elements, and initializing and calling visualization
* elements
*/
public class Heatmap {
/**
* Internal representation of all data needed to render heatmap;
* data-handling code populates this data structure
*/
protected static class HeatmapData {
public Table agg_tab;
public long [][] stats;
public long min;
public long max;
public int num_hosts;
public String [] hostnames;
public HeatmapData() {
}
}
private static Log log = LogFactory.getLog(Heatmap.class);
static final String START_FIELD_NAME = "start_time_num";
static final String END_FIELD_NAME = "finish_time_num";
int BOXWIDTH = 250;
int SIZE_X = 1600, SIZE_Y=1600;
final int [] BORDER = {200,150,150,150};
final int LEGEND_X_OFFSET = 10;
final int LEGEND_Y_OFFSET = 0;
final int LEGEND_TEXT_OFFSET = 10;
final int LEGEND_FONT_SIZE = 24;
final int AXIS_NAME_FONT_SIZE = 24;
protected boolean offline_use = true;
protected HttpServletRequest request;
// for offline use only
// keys that need to be filled:
// period (last1/2/3/6/12/24hr,last7d,last30d), time_type (range/last), start, end
protected HashMap<String, String> param_map;
protected String cluster;
protected String timezone;
protected String query_state;
protected String query_stat_type;
protected final String table = new String("filesystem_fsm");
protected boolean plot_legend = false; // controls whether to plot hostnames
protected boolean sort_nodes = true;
protected boolean plot_additional_info = true;
protected String add_info_extra = null;
protected Display dis;
protected Visualization viz;
protected Rectangle2D dataBound = new Rectangle2D.Double();
protected Rectangle2D xlabBound = new Rectangle2D.Double();
protected Rectangle2D ylabBound = new Rectangle2D.Double();
protected Rectangle2D labelBottomBound = new Rectangle2D.Double();
protected HashMap<String, String> prettyStateNames;
/* Different group names allow control of what Renderers to use */
final String maingroup = "Data";
final String othergroup = "Misc";
final String labelgroup = "Label";
final String legendgroup = "Legend";
final String legendshapegroup = "LegendShape";
final String addinfogroup = "AddInfo";
final String addinfoshapegroup = "AddInfoShape";
public Heatmap() {
this.cluster = new String("");
this.timezone = new String("");
this.query_state = new String("");
this.query_stat_type = new String("");
param_map = new HashMap<String, String>();
}
/**
* @brief Constructor for Swimlanes visualization object
* @param timezone Timezone string from environment
* @param cluster Cluster name from environment
* @param event_type Whether to display shuffles or not
* @param valmap HashMap of key/value pairs simulating parameters from a HttpRequest
*/
public Heatmap
(String timezone, String cluster, String event_type,
String query_stat_type,
HashMap<String, String> valmap)
{
this.cluster = new String(cluster);
if (timezone != null) {
this.timezone = new String(timezone);
} else {
this.timezone = null;
}
this.query_state = new String(event_type);
this.query_stat_type = new String(query_stat_type);
/* This should "simulate" an HttpServletRequest
* Need to have "start" and "end" in seconds since Epoch
*/
this.param_map = valmap;
}
public Heatmap
(String timezone, String cluster, String query_state,
String query_stat_type,
HashMap<String, String> valmap, String shuffles)
{
this.cluster = new String(cluster);
if (timezone != null) {
this.timezone = new String(timezone);
} else {
this.timezone = null;
}
this.query_state = new String(query_state);
this.query_stat_type = new String(query_stat_type);
/* This should "simulate" an HttpServletRequest
* Need to have "start" and "end" in seconds since Epoch
*/
this.param_map = valmap;
}
public Heatmap
(String timezone, String cluster, String query_state,
String query_stat_type,
HashMap<String, String> valmap,
int w, int h)
{
this.cluster = new String(cluster);
if (timezone != null) {
this.timezone = new String(timezone);
} else {
this.timezone = null;
}
this.query_state = new String(query_state);
this.query_stat_type = new String(query_stat_type);
/* This should "simulate" an HttpServletRequest
* Need to have "start" and "end" in seconds since Epoch
*/
this.param_map = valmap;
this.SIZE_X = w;
this.SIZE_Y = h;
}
public Heatmap(HttpServletRequest request) {
XssFilter xf = new XssFilter(request);
this.offline_use = false;
this.request = request;
HttpSession session = request.getSession();
this.cluster = session.getAttribute("cluster").toString();
String query_state = xf.getParameter("query_state");
if (query_state != null) {
this.query_state = new String(query_state);
} else {
this.query_state = new String("read");
}
String query_stat_type = xf.getParameter("query_stat_type");
if (query_stat_type != null) {
this.query_stat_type = new String(query_stat_type);
} else {
this.query_stat_type = new String("transaction_count");
}
this.timezone = session.getAttribute("time_zone").toString();
}
/**
* Set dimensions of image to be generated
* Call before calling @see #run
*/
public void setDimensions(int width, int height) {
this.SIZE_X=width;
this.SIZE_Y=height;
}
/**
* Specify whether to print labels of hosts along axes
* Call before calling @see #run
*/
public void setLegend(boolean legendopt) {
if (legendopt) {
this.plot_legend = true;
} else {
this.plot_legend = false;
}
}
/**
* Generates image in specified format, and writes image as binary
* output to supplied output stream
*/
public boolean getImage(java.io.OutputStream output, String img_fmt, double scale) {
dis = new Display(this.viz);
dis.setSize(SIZE_X,SIZE_Y);
dis.setHighQuality(true);
dis.setFont(new Font(Font.SANS_SERIF,Font.PLAIN,24));
return dis.saveImage(output, img_fmt, scale);
}
protected void setupRenderer() {
this.viz.setRendererFactory(new RendererFactory(){
AbstractShapeRenderer sr = new ShapeRenderer();
ShapeRenderer sr_big = new ShapeRenderer(BOXWIDTH);
Renderer arY = new AxisRenderer(Constants.LEFT, Constants.TOP);
Renderer arX = new AxisRenderer(Constants.CENTER, Constants.BOTTOM);
PolygonRenderer pr = new PolygonRenderer(Constants.POLY_TYPE_LINE);
LabelRenderer lr = new LabelRenderer("label");
LabelRenderer lr_legend = new LabelRenderer("label");
public Renderer getRenderer(VisualItem item) {
lr_legend.setHorizontalAlignment(Constants.LEFT);
lr_legend.setVerticalAlignment(Constants.CENTER);
lr.setHorizontalAlignment(Constants.CENTER);
lr.setVerticalAlignment(Constants.CENTER);
if (item.isInGroup(maingroup)) {
return sr_big;
} else if (item.isInGroup(legendgroup)) {
return lr_legend;
} else if (item.isInGroup(addinfogroup)) {
return lr;
}
return sr;
}
});
}
// setup columns: add additional time fields
protected HeatmapData setupDataTable() {
HeatmapData hd = this.getData();
return hd;
}
protected void setupHeatmap(VisualTable vtab, HeatmapData hd)
{
long [][] stats = hd.stats;
int i, j, curr_idx;
long curr_val;
int num_hosts = hd.num_hosts;
ColorMap cm = new ColorMap(
ColorLib.getInterpolatedPalette(
ColorLib.color(ColorLib.getColor(32,0,0)),
ColorLib.color(Color.WHITE)
),
(double)hd.min,(double)hd.max
);
for (i = 0; i < num_hosts; i++) {
for (j = 0; j < num_hosts; j++) {
curr_idx = j+(i*num_hosts);
curr_val = stats[i][j];
if (curr_val >= hd.min) {
vtab.setFillColor(curr_idx, cm.getColor((double)curr_val));
} else if (curr_val == 0) {
vtab.setFillColor(curr_idx, ColorLib.color(Color.BLACK));
}
}
}
// gridlayout puts tiles on row-wise (row1, followed by row2, etc.)
GridLayout gl = new GridLayout(maingroup, num_hosts, num_hosts);
gl.setLayoutBounds(this.dataBound);
ActionList gl_list = new ActionList();
gl_list.add(gl);
this.viz.putAction("gridlayout",gl_list);
this.viz.run("gridlayout");
}
protected void addHostLabels(HeatmapData hd) {
Table legend_labels_table = new Table();
legend_labels_table.addColumn("label",String.class);
legend_labels_table.addRows(hd.hostnames.length);
for (int i = 0; i < hd.hostnames.length; i++) {
legend_labels_table.setString(i,"label",hd.hostnames[i]);
}
float start_x = LEGEND_X_OFFSET;
float start_y = LEGEND_Y_OFFSET + BORDER[1] + (BOXWIDTH/2);
float incr = this.BOXWIDTH;
VisualTable legend_labels_table_viz = this.viz.addTable(legendgroup, legend_labels_table);
for (int i = 0; i < hd.hostnames.length; i++) {
legend_labels_table_viz.setFloat(i, VisualItem.X, start_x + LEGEND_TEXT_OFFSET);
legend_labels_table_viz.setFloat(i, VisualItem.Y, start_y + (i * incr));
legend_labels_table_viz.setTextColor(i,ColorLib.color(java.awt.Color.BLACK));
legend_labels_table_viz.setFont(i,new Font(Font.SANS_SERIF,Font.PLAIN,LEGEND_FONT_SIZE));
}
}
protected void addAddlInfo(HeatmapData hd) {
Table legend_labels_table = new Table();
legend_labels_table.addColumn("label",String.class);
legend_labels_table.addRows(3);
String hostnumstring = "Number of hosts: " + hd.num_hosts;
if (sort_nodes) {
hostnumstring += " (nodes sorted)";
} else {
hostnumstring += " (nodes not sorted)";
}
if (add_info_extra != null) hostnumstring += add_info_extra;
legend_labels_table.setString(0,"label",hostnumstring);
legend_labels_table.setString(1,"label","Src. Hosts");
legend_labels_table.setString(2,"label","Dest. Hosts");
float start_x = LEGEND_X_OFFSET;
float start_y = LEGEND_Y_OFFSET + BORDER[1] + (BOXWIDTH/2);
float incr = this.BOXWIDTH;
VisualTable legend_labels_table_viz = this.viz.addTable(addinfogroup, legend_labels_table);
legend_labels_table_viz.setFloat(0, VisualItem.X, this.SIZE_X/2);
legend_labels_table_viz.setFloat(0, VisualItem.Y, BORDER[1]/2);
legend_labels_table_viz.setTextColor(0,ColorLib.color(java.awt.Color.BLACK));
legend_labels_table_viz.setFont(0,new Font(Font.SANS_SERIF,Font.PLAIN,LEGEND_FONT_SIZE));
legend_labels_table_viz.setFloat(1, VisualItem.X, this.SIZE_X/2);
legend_labels_table_viz.setFloat(1, VisualItem.Y, BORDER[1] + (BOXWIDTH*hd.num_hosts) + BORDER[3]/2);
legend_labels_table_viz.setTextColor(1,ColorLib.color(java.awt.Color.BLACK));
legend_labels_table_viz.setFont(1,new Font(Font.SANS_SERIF,Font.PLAIN,LEGEND_FONT_SIZE));
legend_labels_table_viz.setFloat(2, VisualItem.X, BORDER[0] + (BOXWIDTH*hd.num_hosts) + BORDER[2]/2);
legend_labels_table_viz.setFloat(2, VisualItem.Y, this.SIZE_Y/2);
legend_labels_table_viz.setTextColor(2,ColorLib.color(java.awt.Color.BLACK));
legend_labels_table_viz.setFont(2,new Font(Font.SANS_SERIF,Font.PLAIN,LEGEND_FONT_SIZE));
}
protected void initPrettyNames() {
this.prettyStateNames = new HashMap<String, String>();
prettyStateNames.put("read","Block Reads");
prettyStateNames.put("write","Block Writes");
prettyStateNames.put("read_local", "Local Block Reads");
prettyStateNames.put("write_local", "Local Block Writes");
prettyStateNames.put("read_remote", "Remote Block Reads");
prettyStateNames.put("write_remote", "Remote Block Writes");
prettyStateNames.put("write_replicated", "Replicated Block Writes");
}
/**
* Actual code that calls data, generates heatmap, and saves it
*/
public void run() {
initPrettyNames();
// setup visualization
this.viz = new Visualization();
// add table to visualization
HeatmapData hd = this.setupDataTable();
// setup bounds
int width, realwidth;
if (SIZE_X-BORDER[0]-BORDER[2] < SIZE_Y-BORDER[1]-BORDER[3]) {
BOXWIDTH = (SIZE_X-BORDER[0]-BORDER[2]) / hd.num_hosts;
} else {
BOXWIDTH = (SIZE_Y-BORDER[1]-BORDER[3]) / hd.num_hosts;
}
width = hd.num_hosts * BOXWIDTH;
this.dataBound.setRect(
BORDER[0]+BOXWIDTH/2,
BORDER[1]+BOXWIDTH/2,
width-BOXWIDTH,width-BOXWIDTH
);
this.SIZE_X = BORDER[0] + BORDER[2] + (hd.num_hosts * BOXWIDTH);
this.SIZE_Y = BORDER[1] + BORDER[3] + (hd.num_hosts * BOXWIDTH);
log.debug("width total: " + width + " width per state: " + BOXWIDTH + " xstart: "
+ (BORDER[0]+BOXWIDTH/2)
+ " ystart: " + (BORDER[1]+BOXWIDTH/2) + " (num hosts: "+hd.num_hosts+")");
log.debug("X size: " + this.SIZE_X + " Y size: " + this.SIZE_Y);
this.setupRenderer();
VisualTable data_tab_viz = viz.addTable(maingroup, hd.agg_tab);
setupHeatmap(data_tab_viz, hd);
ShapeAction legend_sa1 = null, legend_sa2 = null;
SpecifiedLayout legendlabels_sl1 = null, legendlabels_sl2 = null;
if (plot_legend) {
addHostLabels(hd);
legend_sa1 = new ShapeAction(legendshapegroup);
legendlabels_sl1 = new SpecifiedLayout(legendgroup, VisualItem.X, VisualItem.Y);
ActionList legenddraw = new ActionList();
legenddraw.add(legend_sa1);
this.viz.putAction(legendshapegroup, legenddraw);
ActionList legendlabelsdraw = new ActionList();
legendlabelsdraw.add(legendlabels_sl1);
this.viz.putAction(legendgroup,legendlabelsdraw);
}
if (plot_additional_info) {
addAddlInfo(hd);
legend_sa2 = new ShapeAction(addinfoshapegroup);
legendlabels_sl2 = new SpecifiedLayout(addinfogroup, VisualItem.X, VisualItem.Y);
ActionList legenddraw = new ActionList();
legenddraw.add(legend_sa2);
this.viz.putAction(addinfoshapegroup, legenddraw);
ActionList legendlabelsdraw = new ActionList();
legendlabelsdraw.add(legendlabels_sl2);
this.viz.putAction(addinfogroup,legendlabelsdraw);
}
}
protected boolean checkDone(int [] clustId) {
for (int i = 1; i < clustId.length; i++) {
if (clustId[i] != clustId[0]) return false;
}
return true;
}
/**
* Sort data for better visualization of patterns
*/
protected int [] hClust (long [][] stat)
{
int statlen = stat.length;
long [] rowSums = new long[statlen];
int [] permute = new int[statlen];
int i,j,k;
// initialize permutation
for (i = 0; i < statlen; i++) {
permute[i] = i;
}
for (i = 0; i < statlen; i++) {
rowSums[i] = 0;
for (j = 0; j < statlen; j++) {
rowSums[i] += stat[i][j];
}
}
// insertion sort
for (i = 0; i < statlen-1; i++) {
long val = rowSums[i];
int thispos = permute[i];
j = i-1;
while (j >= 0 && rowSums[j] > val) {
rowSums[j+1] = rowSums[j];
permute[j+1] = permute[j];
j--;
}
rowSums[j+1] = val;
permute[j+1] = thispos;
}
return permute;
}
/**
* Reorder rows (and columns) according to a given ordering
* Maintains same ordering along rows and columns
*/
protected long [][] doPermute (long [][] stat, int [] permute) {
int statlen = stat.length;
int i, j, curr_pos;
long [][] stat2 = new long[statlen][statlen];
assert(stat.length == permute.length);
for (i = 0; i < statlen; i++) {
curr_pos = permute[i];
for (j = 0; j < statlen; j++) {
stat2[i][j] = stat[curr_pos][permute[j]];
}
}
return stat2;
}
/**
* Interfaces with database to get data and
* populate data structures for rendering
*/
public HeatmapData getData() {
// preliminary setup
OfflineTimeHandler time_offline;
TimeHandler time_online;
long start, end, min, max;
if (offline_use) {
time_offline = new OfflineTimeHandler(param_map, this.timezone);
start = time_offline.getStartTime();
end = time_offline.getEndTime();
} else {
time_online = new TimeHandler(this.request, this.timezone);
start = time_online.getStartTime();
end = time_online.getEndTime();
}
DatabaseWriter dbw = new DatabaseWriter(this.cluster);
// setup query
String query;
if (this.query_state != null && this.query_state.equals("read")) {
query = "select block_id,start_time,finish_time,start_time_millis,finish_time_millis,status,state_name,hostname,other_host,bytes from ["+table+"] where finish_time between '[start]' and '[end]' and (state_name like 'read_local' or state_name like 'read_remote')";
} else if (this.query_state != null && this.query_state.equals("write")) {
query = "select block_id,start_time,finish_time,start_time_millis,finish_time_millis,status,state_name,hostname,other_host,bytes from ["+table+"] where finish_time between '[start]' and '[end]' and (state_name like 'write_local' or state_name like 'write_remote' or state_name like 'write_replicated')";
} else {
query = "select block_id,start_time,finish_time,start_time_millis,finish_time_millis,status,state_name,hostname,other_host,bytes from ["+table+"] where finish_time between '[start]' and '[end]' and state_name like '" + query_state + "'";
}
Macro mp = new Macro(start,end,query);
query = mp.toString() + " order by start_time";
ArrayList<HashMap<String, Object>> events = new ArrayList<HashMap<String, Object>>();
ResultSet rs = null;
log.debug("Query: " + query);
// run query, extract results
try {
rs = dbw.query(query);
ResultSetMetaData rmeta = rs.getMetaData();
int col = rmeta.getColumnCount();
while (rs.next()) {
HashMap<String, Object> event = new HashMap<String, Object>();
long event_time=0;
for(int i=1;i<=col;i++) {
if(rmeta.getColumnType(i)==java.sql.Types.TIMESTAMP) {
event.put(rmeta.getColumnName(i),rs.getTimestamp(i).getTime());
} else {
event.put(rmeta.getColumnName(i),rs.getString(i));
}
}
events.add(event);
}
} catch (SQLException ex) {
// handle any errors
log.error("SQLException: " + ex.getMessage());
log.error("SQLState: " + ex.getSQLState());
log.error("VendorError: " + ex.getErrorCode());
} finally {
dbw.close();
}
SimpleDateFormat format = new SimpleDateFormat("MMM dd yyyy HH:mm:ss");
log.info(events.size() + " results returned.");
HashSet<String> host_set = new HashSet<String>();
HashMap<String, Integer> host_indices = new HashMap<String, Integer>();
HashMap<Integer, String> host_rev_indices = new HashMap<Integer, String>();
// collect hosts, name unique hosts
for(int i = 0; i < events.size(); i++) {
HashMap<String, Object> event = events.get(i);
String curr_host = (String) event.get("hostname");
String other_host = (String) event.get("other_host");
host_set.add(curr_host);
host_set.add(other_host);
}
int num_hosts = host_set.size();
Iterator<String> host_iter = host_set.iterator();
for (int i = 0; i < num_hosts && host_iter.hasNext(); i++) {
String curr_host = host_iter.next();
host_indices.put(curr_host, new Integer(i));
host_rev_indices.put(new Integer(i),curr_host);
}
System.out.println("Number of hosts: " + num_hosts);
long stats[][] = new long[num_hosts][num_hosts];
long count[][] = new long[num_hosts][num_hosts]; // used for averaging
int start_millis = 0, end_millis = 0;
// deliberate design choice to duplicate code PER possible operation
// otherwise we have to do the mode check N times, for N states returned
//
// compute aggregate statistics
log.info("Query statistic type: "+this.query_stat_type);
if (this.query_stat_type.equals("transaction_count")) {
for(int i=0;i<events.size();i++) {
HashMap<String, Object> event = events.get(i);
start=(Long)event.get("start_time");
end=(Long)event.get("finish_time");
start_millis = Integer.parseInt(((String)event.get("start_time_millis")));
end_millis = Integer.parseInt(((String)event.get("finish_time_millis")));
String cell = (String) event.get("state_name");
String this_host = (String) event.get("hostname");
String other_host = (String) event.get("other_host");
int this_host_idx = host_indices.get(this_host).intValue();
int other_host_idx = host_indices.get(other_host).intValue();
// to, from
stats[other_host_idx][this_host_idx] += 1;
}
} else if (this.query_stat_type.equals("avg_duration")) {
for(int i=0;i<events.size();i++) {
HashMap<String, Object> event = events.get(i);
start=(Long)event.get("start_time");
end=(Long)event.get("finish_time");
start_millis = Integer.parseInt(((String)event.get("start_time_millis")));
end_millis = Integer.parseInt(((String)event.get("finish_time_millis")));
String cell = (String) event.get("state_name");
String this_host = (String) event.get("hostname");
String other_host = (String) event.get("other_host");
int this_host_idx = host_indices.get(this_host).intValue();
int other_host_idx = host_indices.get(other_host).intValue();
long curr_val = end_millis - start_millis + ((end - start)*1000);
// to, from
stats[other_host_idx][this_host_idx] += curr_val;
count[other_host_idx][this_host_idx] += 1;
}
for (int i = 0; i < num_hosts; i++) {
for (int j = 0; j < num_hosts; j++) {
if (count[i][j] > 0) stats[i][j] = stats[i][j] / count[i][j];
}
}
} else if (this.query_stat_type.equals("avg_volume")) {
for(int i=0;i<events.size();i++) {
HashMap<String, Object> event = events.get(i);
start=(Long)event.get("start_time");
end=(Long)event.get("finish_time");
start_millis = Integer.parseInt(((String)event.get("start_time_millis")));
end_millis = Integer.parseInt(((String)event.get("finish_time_millis")));
String cell = (String) event.get("state_name");
String this_host = (String) event.get("hostname");
String other_host = (String) event.get("other_host");
int this_host_idx = host_indices.get(this_host).intValue();
int other_host_idx = host_indices.get(other_host).intValue();
long curr_val = Long.parseLong((String)event.get("bytes"));
// to, from
stats[other_host_idx][this_host_idx] += curr_val;
count[other_host_idx][this_host_idx] += 1;
}
for (int i = 0; i < num_hosts; i++) {
for (int j = 0; j < num_hosts; j++) {
if (count[i][j] > 0) stats[i][j] = stats[i][j] / count[i][j];
}
}
} else if (this.query_stat_type.equals("total_duration")) {
for(int i=0;i<events.size();i++) {
HashMap<String, Object> event = events.get(i);
start=(Long)event.get("start_time");
end=(Long)event.get("finish_time");
start_millis = Integer.parseInt(((String)event.get("start_time_millis")));
end_millis = Integer.parseInt(((String)event.get("finish_time_millis")));
String cell = (String) event.get("state_name");
String this_host = (String) event.get("hostname");
String other_host = (String) event.get("other_host");
int this_host_idx = host_indices.get(this_host).intValue();
int other_host_idx = host_indices.get(other_host).intValue();
double curr_val = end_millis - start_millis + ((end - start)*1000);
// to, from
stats[other_host_idx][this_host_idx] += curr_val;
}
} else if (this.query_stat_type.equals("total_volume")) {
for(int i=0;i<events.size();i++) {
HashMap<String, Object> event = events.get(i);
start=(Long)event.get("start_time");
end=(Long)event.get("finish_time");
start_millis = Integer.parseInt(((String)event.get("start_time_millis")));
end_millis = Integer.parseInt(((String)event.get("finish_time_millis")));
String cell = (String) event.get("state_name");
String this_host = (String) event.get("hostname");
String other_host = (String) event.get("other_host");
int this_host_idx = host_indices.get(this_host).intValue();
int other_host_idx = host_indices.get(other_host).intValue();
long curr_val = Long.parseLong((String)event.get("bytes"));
// to, from
stats[other_host_idx][this_host_idx] += curr_val;
}
}
int [] permute = null;
if (sort_nodes) {
permute = hClust(stats);
stats = doPermute(stats,permute);
}
Table agg_tab = new Table();
agg_tab.addColumn("stat", long.class);
min = Long.MAX_VALUE;
max = Long.MIN_VALUE;
agg_tab.addRows(num_hosts*num_hosts);
// row-wise placement (row1, followed by row2, etc.)
for (int i = 0; i < num_hosts; i++) {
for (int j = 0; j < num_hosts; j++) {
agg_tab.setLong((i*num_hosts)+j,"stat",stats[i][j]);
if (stats[i][j] > max) max = stats[i][j];
if (stats[i][j] > 0 && stats[i][j] < min) min = stats[i][j];
}
}
if (min == Long.MAX_VALUE) min = 0;
log.info(agg_tab);
// collate data
HeatmapData hd = new HeatmapData();
hd.stats = new long[num_hosts][num_hosts];
hd.stats = stats;
hd.min = min;
hd.max = max;
hd.num_hosts = num_hosts;
hd.agg_tab = agg_tab;
this.add_info_extra = new String("\nState: "+this.prettyStateNames.get(this.query_state)+
" ("+events.size()+" "+this.query_state+"'s ["+this.query_stat_type+"])\n" +
"Plotted value range: ["+hd.min+","+hd.max+"] (Zeros in black)");
hd.hostnames = new String [num_hosts];
for (int i = 0; i < num_hosts; i++) {
String curr_host = host_rev_indices.get(new Integer(permute[i]));
if (sort_nodes) {
hd.hostnames[i] = new String(curr_host);
} else {
hd.hostnames[i] = new String(curr_host);
}
}
return hd;
}
}
|
|
/* Copyright (c) 2001-2008, The HSQL Development Group
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the HSQL Development Group nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG,
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hsqldb.scriptio;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import org.hsqldb.HsqlNameManager.HsqlName;
import org.hsqldb.Database;
import org.hsqldb.DatabaseManager;
import org.hsqldb.DatabaseScript;
import org.hsqldb.HsqlException;
import org.hsqldb.NumberSequence;
import org.hsqldb.Result;
import org.hsqldb.Session;
import org.hsqldb.Table;
import org.hsqldb.Token;
import org.hsqldb.Trace;
import org.hsqldb.index.RowIterator;
import org.hsqldb.lib.FileAccess;
import org.hsqldb.lib.FileUtil;
import org.hsqldb.lib.HsqlTimer;
import org.hsqldb.lib.Iterator;
import org.hsqldb.lib.SimpleLog;
//import org.hsqldb.lib.StopWatch;
// todo - can lock the database engine as readonly in a wrapper for this when
// used at checkpoint
/**
* Handles all logging to file operations. A log consists of three blocks:<p>
*
* DDL BLOCK: definition of DB objects, users and rights at startup time<br>
* DATA BLOCK: all data for MEMORY tables at startup time<br>
* LOG BLOCK: SQL statements logged since startup or the last CHECKPOINT<br>
*
* The implementation of this class and its subclasses support the formats
* used for writing the data. In versions up to 1.7.2, this data is written
* to the *.script file for the database. Since 1.7.2 the data can also be
* written as binray in order to speed up shutdown and startup.<p>
*
* In 1.7.2, two separate files are used, one for the DDL + DATA BLOCK and
* the other for the LOG BLOCK.<p>
*
* A related use for this class is for saving a current snapshot of the
* database data to a user-defined file. This happens in the SHUTDOWN COMPACT
* process or done as a result of the SCRIPT command. In this case, the
* DATA block contains the CACHED table data as well.<p>
*
* DatabaseScriptReader and its subclasses read back the data at startup time.
*
* @author fredt@users
* @version 1.8.0
* @since 1.7.2
*/
public abstract class ScriptWriterBase implements Runnable {
Database database;
String outFile;
OutputStream fileStreamOut;
FileAccess.FileSync outDescriptor;
int tableRowCount;
HsqlName schemaToLog;
/**
* this determines if the script is the normal script (false) used
* internally by the engine or a user-initiated snapshot of the DB (true)
*/
boolean isDump;
boolean includeCachedData;
long byteCount;
volatile boolean needsSync;
volatile boolean forceSync;
volatile boolean busyWriting;
private int syncCount;
static final int INSERT = 0;
static final int INSERT_WITH_SCHEMA = 1;
/** the last schema for last sessionId */
Session currentSession;
public static final String[] LIST_SCRIPT_FORMATS = new String[] {
Token.T_TEXT, Token.T_BINARY, null, Token.T_COMPRESSED
};
public static final int SCRIPT_TEXT_170 = 0;
public static final int SCRIPT_BINARY_172 = 1;
public static final int SCRIPT_ZIPPED_BINARY_172 = 3;
public static ScriptWriterBase newScriptWriter(Database db, String file,
boolean includeCachedData, boolean newFile,
int scriptType) throws HsqlException {
if (scriptType == SCRIPT_TEXT_170) {
return new ScriptWriterText(db, file, includeCachedData, newFile,
false);
} else if (scriptType == SCRIPT_BINARY_172) {
return new ScriptWriterBinary(db, file, includeCachedData,
newFile);
} else {
return new ScriptWriterZipped(db, file, includeCachedData,
newFile);
}
}
ScriptWriterBase() {}
ScriptWriterBase(Database db, String file, boolean includeCachedData,
boolean isNewFile, boolean isDump) throws HsqlException {
this.isDump = isDump;
initBuffers();
boolean exists = false;
if (isDump) {
exists = FileUtil.getDefaultInstance().exists(file);
} else {
exists = db.getFileAccess().isStreamElement(file);
}
if (exists && isNewFile) {
throw Trace.error(Trace.FILE_IO_ERROR, file);
}
this.database = db;
this.includeCachedData = includeCachedData;
outFile = file;
currentSession = database.sessionManager.getSysSession();
// start with neutral schema - no SET SCHEMA to log
schemaToLog = currentSession.loggedSchema =
currentSession.currentSchema;
openFile();
}
public void reopen() throws HsqlException {
openFile();
}
protected abstract void initBuffers();
/**
* Called internally or externally in write delay intervals.
*/
public synchronized void sync() {
if (needsSync && fileStreamOut != null) {
if (busyWriting) {
forceSync = true;
return;
}
try {
fileStreamOut.flush();
outDescriptor.sync();
syncCount++;
} catch (IOException e) {
Trace.printSystemOut("flush() or sync() error: "
+ e.toString());
}
needsSync = false;
forceSync = false;
}
}
public void close() throws HsqlException {
stop();
try {
if (fileStreamOut != null) {
fileStreamOut.flush();
outDescriptor.sync();
fileStreamOut.close();
fileStreamOut = null;
}
} catch (IOException e) {
throw Trace.error(Trace.FILE_IO_ERROR);
}
byteCount = 0;
}
public long size() {
return byteCount;
}
public void writeAll() throws HsqlException {
try {
writeDDL();
writeExistingData();
finishStream();
} catch (IOException e) {
throw Trace.error(Trace.FILE_IO_ERROR);
}
}
/**
* File is opened in append mode although in current usage the file
* never pre-exists
*/
protected void openFile() throws HsqlException {
try {
FileAccess fa = isDump ? FileUtil.getDefaultInstance()
: database.getFileAccess();
OutputStream fos = fa.openOutputStreamElement(outFile);
outDescriptor = fa.getFileSync(fos);
fileStreamOut = new BufferedOutputStream(fos, 2 << 12);
} catch (IOException e) {
throw Trace.error(Trace.FILE_IO_ERROR, Trace.Message_Pair,
new Object[] {
e.toString(), outFile
});
}
}
/**
* This is not really useful in the current usage but may be if this
* class is used in a different way.
*/
protected void finishStream() throws IOException {}
protected void writeDDL() throws IOException, HsqlException {
Result ddlPart = DatabaseScript.getScript(database,
!includeCachedData);
writeSingleColumnResult(ddlPart);
}
protected void writeExistingData() throws HsqlException, IOException {
// start with blank schema - SET SCHEMA to log
currentSession.loggedSchema = null;
Iterator schemas = database.schemaManager.userSchemaNameIterator();
while (schemas.hasNext()) {
String schema = (String) schemas.next();
Iterator tables = database.schemaManager.tablesIterator(schema);
while (tables.hasNext()) {
Table t = (Table) tables.next();
// write all memory table data
// write cached table data unless index roots have been written
// write all text table data apart from readonly text tables
// unless index roots have been written
boolean script = false;
switch (t.getTableType()) {
case Table.MEMORY_TABLE :
script = true;
break;
case Table.CACHED_TABLE :
script = includeCachedData;
break;
case Table.TEXT_TABLE :
script = includeCachedData &&!t.isReadOnly();
break;
}
int rowCount = 0;
try {
if (script) {
schemaToLog = t.getName().schema;
writeTableInit(t);
RowIterator it = t.rowIterator(currentSession);
rowCount = 0;
while (it.hasNext()) {
writeRow(currentSession, t, it.next().getData());
rowCount++;
}
writeTableTerm(t);
}
} catch (Throwable e) {
this.database.logger.appLog.logContext(
SimpleLog.LOG_ERROR,
t.getName().name + " problem after row " + rowCount);
System.gc();
throw Trace.error(Trace.ASSERT_FAILED, e.toString());
}
}
}
writeDataTerm();
}
protected void writeTableInit(Table t)
throws HsqlException, IOException {}
protected void writeTableTerm(Table t) throws HsqlException, IOException {
if (t.isDataReadOnly() &&!t.isTemp() &&!t.isText()) {
StringBuffer a = new StringBuffer("SET TABLE ");
a.append(t.getName().statementName);
a.append(" READONLY TRUE");
writeLogStatement(currentSession, a.toString());
}
}
protected void writeSingleColumnResult(Result r)
throws HsqlException, IOException {
Iterator it = r.iterator();
while (it.hasNext()) {
Object[] data = (Object[]) it.next();
writeLogStatement(currentSession, (String) data[0]);
}
}
abstract void writeRow(Session session, Table table,
Object[] data) throws HsqlException, IOException;
protected abstract void writeDataTerm() throws IOException;
protected abstract void addSessionId(Session session) throws IOException;
public abstract void writeLogStatement(Session session,
String s)
throws IOException, HsqlException;
public abstract void writeInsertStatement(Session session, Table table,
Object[] data) throws HsqlException, IOException;
public abstract void writeDeleteStatement(Session session, Table table,
Object[] data) throws HsqlException, IOException;
public abstract void writeSequenceStatement(Session session,
NumberSequence seq) throws HsqlException, IOException;
public abstract void writeCommitStatement(Session session)
throws HsqlException, IOException;
//
private Object timerTask;
// long write delay for scripts : 60s
protected volatile int writeDelay = 60000;
public void run() {
try {
if (writeDelay != 0) {
sync();
}
// todo: try to do Cache.cleanUp() here, too
} catch (Exception e) {
// ignore exceptions
// may be InterruptedException or IOException
if (Trace.TRACE) {
Trace.printSystemOut(e.toString());
}
}
}
public void setWriteDelay(int delay) {
writeDelay = delay;
int period = writeDelay == 0 ? 1000
: writeDelay;
HsqlTimer.setPeriod(timerTask, period);
}
public void start() {
int period = writeDelay == 0 ? 1000
: writeDelay;
timerTask = DatabaseManager.getTimer().schedulePeriodicallyAfter(0,
period, this, false);
}
public void stop() {
if (timerTask != null) {
HsqlTimer.cancel(timerTask);
timerTask = null;
}
}
public int getWriteDelay() {
return writeDelay;
}
}
|
|
/*
* Copyright (c) 2008, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.registry.extensions.handlers;
import org.apache.axiom.om.OMElement;
import org.apache.commons.io.FileUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.uddi.api_v3.AuthToken;
import org.wso2.carbon.registry.common.utils.artifact.manager.ArtifactManager;
import org.wso2.carbon.registry.core.*;
import org.wso2.carbon.registry.core.exceptions.RegistryException;
import org.wso2.carbon.registry.core.jdbc.handlers.Handler;
import org.wso2.carbon.registry.core.jdbc.handlers.RequestContext;
import org.wso2.carbon.registry.core.utils.RegistryUtils;
import org.wso2.carbon.registry.extensions.beans.BusinessServiceInfo;
import org.wso2.carbon.registry.extensions.handlers.utils.*;
import org.wso2.carbon.registry.extensions.utils.CommonConstants;
import org.wso2.carbon.registry.extensions.utils.CommonUtil;
import org.wso2.carbon.registry.extensions.utils.WSDLValidationInfo;
import org.wso2.carbon.registry.uddi.utils.UDDIUtil;
import javax.xml.namespace.QName;
import java.io.*;
import java.util.*;
@SuppressWarnings("unused")
public class WSDLMediaTypeHandler extends Handler {
private static final Log log = LogFactory.getLog(WSDLMediaTypeHandler.class);
protected String locationTag = "location";
private String wsdlLocation = "/wsdls/"; // location will always has a leading '/' and trailing '/'
private OMElement wsdlLocationConfiguration;
protected String schemaLocation = "/schema/"; // location will always has a leading '/' and trailing '/'
private OMElement schemaLocationConfiguration;
protected String policyLocation = "/policy/"; // location will always has a leading '/' and trailing '/'
private OMElement policyLocationConfiguration;
private boolean disableSymlinkCreation = true;
private String defaultWsdlVersion = CommonConstants.WSDL_VERSION_DEFAULT_VALUE;
public boolean getCreateService() {
return createService;
}
public void setCreateService(String createService) {
this.createService = Boolean.valueOf(createService);
}
private boolean createService = true;
private boolean disableWSDLValidation = false;
public OMElement getWsdlLocationConfiguration() {
return wsdlLocationConfiguration;
}
public boolean isDisableSymlinkCreation() {
return disableSymlinkCreation;
}
public void setDisableSymlinkCreation(String disableSymlinkCreation) {
this.disableSymlinkCreation = Boolean.toString(true).equals(disableSymlinkCreation);
}
public void setDefaultServiceVersion(String defaultWsdlVersion) {
this.defaultWsdlVersion = defaultWsdlVersion;
}
public void setWsdlLocationConfiguration(OMElement locationConfiguration) throws RegistryException {
Iterator confElements = locationConfiguration.getChildElements();
while (confElements.hasNext()) {
OMElement confElement = (OMElement)confElements.next();
if (confElement.getQName().equals(new QName(locationTag))) {
wsdlLocation = confElement.getText();
if (!wsdlLocation.startsWith(RegistryConstants.PATH_SEPARATOR)) {
wsdlLocation = RegistryConstants.PATH_SEPARATOR + wsdlLocation;
}
if (!wsdlLocation.endsWith(RegistryConstants.PATH_SEPARATOR)) {
wsdlLocation = wsdlLocation + RegistryConstants.PATH_SEPARATOR;
}
}
}
WSDLProcessor.setCommonWSDLLocation(wsdlLocation);
this.wsdlLocationConfiguration = locationConfiguration;
}
public OMElement getSchemaLocationConfiguration() {
return schemaLocationConfiguration;
}
public void setSchemaLocationConfiguration(OMElement locationConfiguration) throws RegistryException {
Iterator confElements = locationConfiguration.getChildElements();
while (confElements.hasNext()) {
OMElement confElement = (OMElement)confElements.next();
if (confElement.getQName().equals(new QName(locationTag))) {
schemaLocation = confElement.getText();
if (!schemaLocation.startsWith(RegistryConstants.PATH_SEPARATOR)) {
schemaLocation = RegistryConstants.PATH_SEPARATOR + schemaLocation;
}
if (!schemaLocation.endsWith(RegistryConstants.PATH_SEPARATOR)) {
schemaLocation = schemaLocation + RegistryConstants.PATH_SEPARATOR;
}
}
}
WSDLProcessor.setCommonSchemaLocation(schemaLocation);
this.schemaLocationConfiguration = locationConfiguration;
}
public OMElement getPolicyLocationConfiguration() {
return policyLocationConfiguration;
}
public void setPolicyLocationConfiguration(OMElement locationConfiguration) throws RegistryException {
Iterator confElements = locationConfiguration.getChildElements();
while (confElements.hasNext()) {
OMElement confElement = (OMElement)confElements.next();
if (confElement.getQName().equals(new QName(locationTag))) {
policyLocation = confElement.getText();
if (!policyLocation.startsWith(RegistryConstants.PATH_SEPARATOR)) {
policyLocation = RegistryConstants.PATH_SEPARATOR + policyLocation;
}
if (!policyLocation.endsWith(RegistryConstants.PATH_SEPARATOR)) {
policyLocation = policyLocation + RegistryConstants.PATH_SEPARATOR;
}
}
}
WSDLProcessor.setCommonPolicyLocation(policyLocation);
this.policyLocationConfiguration = locationConfiguration;
}
public void makeDir(File file) throws IOException {
if (file != null && !file.exists() && !file.mkdir()) {
log.warn("Failed to create directory at path: " + file.getAbsolutePath());
}
}
public void makeDirs(File file) throws IOException {
if (file != null && !file.exists() && !file.mkdirs()) {
log.warn("Failed to create directories at path: " + file.getAbsolutePath());
}
}
public void delete(File file) throws IOException {
if (file != null && file.exists() && !file.delete()) {
log.warn("Failed to delete file/directory at path: " + file.getAbsolutePath());
}
}
/**
* Method that will executed after the put operation has been done.
*
* @param path the path of the resource.
* @param addedResources the resources that have been added to the registry.
* @param otherResources the resources that have not been added to the registry.
* @param requestContext the request context for the put operation.
* @throws RegistryException if the operation failed.
*/
@SuppressWarnings("unused")
protected void onPutCompleted(String path, Map<String, String> addedResources,
List<String> otherResources, RequestContext requestContext)
throws RegistryException {
}
public void put(RequestContext requestContext) throws RegistryException {
if (!CommonUtil.isUpdateLockAvailable()) {
return;
}
CommonUtil.acquireUpdateLock();
WSDLProcessor wsdlProcessor=null;
try {
Resource metadata = requestContext.getResource();
String path = requestContext.getResourcePath().getPath();
try {
// If the WSDL is already there, we don't need to re-run this handler unless the content is changed.
// Re-running this handler causes issues with downstream handlers and other behaviour (ex:- lifecycles).
// If you need to do a replace pragmatically, delete-then-replace.
if (metadata == null) {
// will go with the default processing
return;
}
Registry registry = requestContext.getRegistry();
// This is to distinguish operations on xsd and wsdl on remote mounting.
String remotePut = metadata.getProperty(RegistryConstants.REMOTE_MOUNT_OPERATION);
if (remotePut != null) {
CommonUtil.releaseUpdateLock();
metadata.removeProperty(RegistryConstants.REMOTE_MOUNT_OPERATION);
registry.put(path, metadata);
requestContext.setProcessingComplete(true);
ArtifactManager.getArtifactManager().getTenantArtifactRepository().
addArtifact(path);
return;
}
if (registry.resourceExists(path)) {
// logic to compare content, and return only if the content didn't change.
Object newContent = metadata.getContent();
Resource oldResource = registry.get(path);
//if the oldResource is a SymLink, then we need to obtain the actual resource path, rather than the
//path of the symlink,
if("true".equals(oldResource.getProperty("registry.link"))) {
path = oldResource.getProperty("registry.actualpath");
}
Object oldContent = oldResource.getContent();
String newContentString = null;
String oldContentString = null;
if (newContent != null) {
if (newContent instanceof String) {
newContentString = (String) newContent;
} else {
newContentString = RegistryUtils.decodeBytes((byte[]) newContent);
}
}
if (oldContent != null) {
if (oldContent instanceof String) {
oldContentString = (String) oldContent;
} else {
oldContentString = RegistryUtils.decodeBytes((byte[]) oldContent);
}
}
if ((newContent == null && oldContent == null) ||
(newContentString != null && newContentString.equals(oldContentString))) {
// this will continue adding from the default path.
return;
}
// so we creating temp files for the wsdl and all the dependencies.
Set<String> registryPaths = new LinkedHashSet<String>();
// the first path is the current resource path.
registryPaths.add(path);
// get the associations.
Association[] dependencies = CommonUtil.getDependenciesRecursively(registry, path);
if (dependencies != null) {
for (Association dependency: dependencies) {
String targetPath = dependency.getDestinationPath();
if (targetPath.startsWith(RegistryConstants.ROOT_PATH)) {
registryPaths.add(targetPath);
}
}
}
File referenceTempFile = File.createTempFile("wsdl", ".ref");
File tempDir = new File(referenceTempFile.getAbsolutePath().substring(0,
referenceTempFile.getAbsolutePath().length() - ".ref".length()));
String tempDirPath = tempDir.getAbsolutePath();
// now add each of the registry paths to the the tempDir
List<File> tempFiles = new ArrayList<File>();
for (String registryPath: registryPaths) {
if (!registryPath.startsWith(RegistryConstants.ROOT_PATH)) {
continue;
}
String filePath = tempDirPath + registryPath;
File tempFile = new File(filePath);
makeDirs(tempFile.getParentFile());
Object resourceContent;
if (registryPath.equals(path)) {
// this is the wsdl we want to update.
resourceContent = metadata.getContent();
} else {
if (!registry.resourceExists(registryPath)) {
continue;
}
Resource r = registry.get(registryPath);
if (r == null) {
continue;
}
resourceContent = r.getContent();
}
byte[] resourceContentBytes;
if (resourceContent == null) {
resourceContentBytes = new byte[0];
} else if (resourceContent instanceof byte[]) {
resourceContentBytes = (byte[])resourceContent;
} else if (resourceContent instanceof String) {
resourceContentBytes = RegistryUtils.encodeString(((String)resourceContent));
} else {
String msg = "Unknown type for the content path: " + path + ", content type: " +
resourceContent.getClass().getName() + ".";
log.error(msg);
throw new RegistryException(msg);
}
InputStream in = new ByteArrayInputStream(resourceContentBytes);
BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(tempFile));
byte[] contentChunk = new byte[1024];
int byteCount;
while ((byteCount = in.read(contentChunk)) != -1) {
out.write(contentChunk, 0, byteCount);
}
out.flush();
out.close();
tempFiles.add(tempFile);
}
if (tempFiles.size() == 0) {
// unreachable state, anyway better log and return.
String msg = "Temporary files count is zero, when updating a wsdl. " +
"wsdl path: " + path + ".";
log.error(msg);
// we are just returning, as the put operation will continue in its default path.
return;
}
File tempFile = tempFiles.get(0);
String uri = tempFile.toURI().toString();
if (uri.startsWith("file:")) {
uri = uri.substring(5);
}
while (uri.startsWith("/")) {
uri = uri.substring(1);
}
uri = "file:///" + uri;
String wsdlPath = null;
if (uri != null) {
requestContext.setSourceURL(uri);
requestContext.setResource(metadata);
wsdlProcessor = buildWSDLProcessor(requestContext);
wsdlPath = processWSDLImport(requestContext, wsdlProcessor, metadata, uri);
}
// now we will delete each temp files, ref file and the temp directory.
for (File temp : tempFiles) {
FileUtils.forceDelete(temp);
}
FileUtils.deleteDirectory(tempDir);
FileUtils.forceDelete(referenceTempFile);
if (wsdlPath != null) {
onPutCompleted(path, Collections.singletonMap(uri, wsdlPath),
Collections.<String>emptyList(), requestContext);
requestContext.setActualPath(wsdlPath);
}
requestContext.setProcessingComplete(true);
ArtifactManager.getArtifactManager().getTenantArtifactRepository().
addArtifact(path);
return;
}
} catch (IOException e) {
String msg = "Error in updating the wsdl. wsdl path: " + path + ".";
log.error(msg, e);
throw new RegistryException(msg, e);
}
try {
Object resourceContent = metadata.getContent();
byte[] resourceContentBytes;
if (resourceContent == null) {
resourceContentBytes = new byte[0];
} else if (resourceContent instanceof byte[]) {
resourceContentBytes = (byte[])resourceContent;
} else if (resourceContent instanceof String) {
resourceContentBytes = RegistryUtils.encodeString(((String)resourceContent));
} else {
String msg = "Unknown type for the content path: " + path + ", content type: " +
resourceContent.getClass().getName() + ".";
log.error(msg);
throw new RegistryException(msg);
}
InputStream in = new ByteArrayInputStream(resourceContentBytes);
File tempFile = File.createTempFile("wsdl", ".wsdl");
BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(tempFile));
byte[] contentChunk = new byte[1024];
int byteCount;
while ((byteCount = in.read(contentChunk)) != -1) {
out.write(contentChunk, 0, byteCount);
}
out.flush();
out.close();
String uri = tempFile.toURI().toString();
if (uri.startsWith("file:")) {
uri = uri.substring(5);
}
while (uri.startsWith("/")) {
uri = uri.substring(1);
}
uri = "file:///" + uri;
String wsdlPath = null;
if (uri != null) {
requestContext.setSourceURL(uri);
requestContext.setResource(metadata);
wsdlProcessor = buildWSDLProcessor(requestContext);
wsdlPath = processWSDLImport(requestContext, wsdlProcessor, metadata, uri);
}
delete(tempFile);
if (wsdlPath != null) {
onPutCompleted(path, Collections.singletonMap(uri, wsdlPath),
Collections.<String>emptyList(), requestContext);
requestContext.setActualPath(wsdlPath);
}
/*WSDLProcessor wsdlProcessor = buildWSDLProcessor(requestContext);
wsdlProcessor
.addWSDLToRegistry(
requestContext, null,
metadata, true, true);*/
} catch (IOException e) {
throw new RegistryException("An error occurred while uploading WSDL file", e);
}
requestContext.setProcessingComplete(true);
if (wsdlProcessor != null && CommonConstants.ENABLE.equals(System.getProperty(CommonConstants.UDDI_SYSTEM_PROPERTY))
&& !org.wso2.carbon.registry.common.CommonConstants.isExternalUDDIInvoke.get()) {
AuthToken authToken = UDDIUtil.getPublisherAuthToken();
if(authToken ==null){
return;
}
BusinessServiceInfo businessServiceInfo = new BusinessServiceInfo();
WSDLInfo wsdlInfo = wsdlProcessor.getMasterWSDLInfo();
businessServiceInfo.setServiceWSDLInfo(wsdlInfo);
UDDIPublisher publisher = new UDDIPublisher();
publisher.publishBusinessService(authToken,businessServiceInfo);
}
} finally {
CommonUtil.releaseUpdateLock();
}
}
/**
* Method to customize the WSDL Processor.
* @param requestContext the request context for the import/put operation
* @return the WSDL Processor instance.
*/
@SuppressWarnings("unused")
protected WSDLProcessor buildWSDLProcessor(RequestContext requestContext) {
WSDLProcessor wsdlProcessor = new WSDLProcessor(requestContext);
wsdlProcessor.setCreateService(getCreateService());
return wsdlProcessor;
}
/**
* Method to customize the WSDL Processor.
* @param requestContext the request context for the import/put operation
* @param useOriginalSchema whether the schema to be original
* @return the WSDL Processor instance.
*/
@SuppressWarnings("unused")
protected WSDLProcessor buildWSDLProcessor(RequestContext requestContext, boolean useOriginalSchema) {
WSDLProcessor wsdlProcessor = new WSDLProcessor(requestContext, useOriginalSchema);
wsdlProcessor.setCreateService(getCreateService());
return wsdlProcessor;
}
/**
* Method to customize the Schema Processor.
* @param requestContext the request context for the import/put operation.
* @param validationInfo the WSDL validation information.
* @return the Schema Processor instance.
*/
@SuppressWarnings("unused")
protected SchemaProcessor buildSchemaProcessor(RequestContext requestContext,
WSDLValidationInfo validationInfo) {
return new SchemaProcessor(requestContext, validationInfo);
}
/**
* Method to customize the Schema Processor.
* @param requestContext the request context for the import/put operation.
* @param validationInfo the WSDL validation information.
* @param useOriginalSchema whether the schema to be original
* @return the Schema Processor instance.
*/
@SuppressWarnings("unused")
protected SchemaProcessor buildSchemaProcessor(RequestContext requestContext,
WSDLValidationInfo validationInfo, boolean useOriginalSchema) {
return new SchemaProcessor(requestContext, validationInfo, useOriginalSchema);
}
public void importResource(RequestContext requestContext) throws RegistryException {
if (!CommonUtil.isUpdateLockAvailable()) {
return;
}
CommonUtil.acquireUpdateLock();
WSDLProcessor wsdlProcessor=null;
try {
Resource metadata = requestContext.getResource();
String sourceURL = requestContext.getSourceURL();
if (requestContext.getSourceURL() != null &&
requestContext.getSourceURL().toLowerCase().startsWith("file:")) {
String msg = "The source URL must not be file in the server's local file system";
throw new RegistryException(msg);
}
try {
wsdlProcessor = buildWSDLProcessor(requestContext);
String wsdlPath =
processWSDLImport(requestContext, wsdlProcessor, metadata, sourceURL);
ResourcePath resourcePath = requestContext.getResourcePath();
String path = null;
if (resourcePath != null) {
path = resourcePath.getPath();
}
onPutCompleted(path,
Collections.singletonMap(sourceURL, wsdlPath),
Collections.<String>emptyList(), requestContext);
requestContext.setActualPath(wsdlPath);
} catch (Exception e) {
throw new RegistryException(e.getMessage(), e);
}
requestContext.setProcessingComplete(true);
if (wsdlProcessor != null && CommonConstants.ENABLE.equals(System.getProperty(CommonConstants.UDDI_SYSTEM_PROPERTY))
&& !org.wso2.carbon.registry.common.CommonConstants.isExternalUDDIInvoke.get()) {
AuthToken authToken = UDDIUtil.getPublisherAuthToken();
if(authToken == null){
return;
}
BusinessServiceInfo businessServiceInfo = new BusinessServiceInfo();
businessServiceInfo.setServiceWSDLInfo(wsdlProcessor.getMasterWSDLInfo());
UDDIPublisher publisher = new UDDIPublisher();
publisher.publishBusinessService(authToken, businessServiceInfo);
}
} finally {
CommonUtil.releaseUpdateLock();
}
}
/**
* Method that runs the WSDL import/upload procedure.
*
* @param requestContext the request context for the import/put operation
* @param metadata the resource metadata
* @param sourceURL the URL from which the WSDL is imported
* @param wsdlProcessor the WSDL Processor instance, used for upload and validation
*
* @return the path at which the WSDL was uploaded to
*
* @throws RegistryException if the operation failed.
*/
protected String processWSDLImport(RequestContext requestContext, WSDLProcessor wsdlProcessor,
Resource metadata, String sourceURL)
throws RegistryException {
return wsdlProcessor.addWSDLToRegistry(requestContext, sourceURL, metadata, false, true,
disableWSDLValidation,disableSymlinkCreation);
}
public void setDisableWSDLValidation(String disableWSDLValidation) {
this.disableWSDLValidation = Boolean.toString(true).equals(disableWSDLValidation);
}
@Override
public void delete(RequestContext requestContext) throws RegistryException {
if (!CommonUtil.isUpdateLockAvailable()) {
return;
}
CommonUtil.acquireUpdateLock();
try {
Registry registry = requestContext.getRegistry();
ResourcePath resourcePath = requestContext.getResourcePath();
if (resourcePath == null) {
throw new RegistryException("The resource path is not available.");
}
Resource resource = registry.get(resourcePath.getPath());
} finally {
CommonUtil.releaseUpdateLock();
}
}
}
|
|
package xeadModeler;
/*
* Copyright (c) 2014 WATANABE kozo <[email protected]>,
* All rights reserved.
*
* This file is part of XEAD Modeler.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the XEAD Project nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
import java.awt.*;
import java.awt.event.*;
import java.io.*;
import java.util.*;
import javax.swing.*;
import javax.swing.event.*;
import org.apache.poi.xssf.usermodel.*;
import org.w3c.dom.*;
public class DialogMatrixList extends JDialog {
private static final long serialVersionUID = 1L;
private static ResourceBundle res = ResourceBundle.getBundle("xeadModeler.Res");
private Modeler frame_;
private String fileName_;
private String xlsFileName;
private JPanel jPanelMain = new JPanel();
private JPanel jPanelSouth = new JPanel();
private JPanel jPanelCenter = new JPanel();
private BorderLayout borderLayout1 = new BorderLayout();
private GridLayout gridLayout1 = new GridLayout();
private JCheckBox jCheckBoxSubjectAreaAndTask = new JCheckBox();
private JCheckBox jCheckBoxSubjectAreaAndFunction = new JCheckBox();
private JCheckBox jCheckBoxTaskAndFunction = new JCheckBox();
private JCheckBox jCheckBoxTaskAndTable = new JCheckBox();
private JCheckBox jCheckBoxTableAndFunction = new JCheckBox();
private JButton jButtonStart = new JButton();
private JProgressBar jProgressBar = new JProgressBar();
private JButton jButtonCloseDialog = new JButton();
private NodeList subjectAreaList = null;
private NodeList taskList = null;
private NodeList taskTypeList = null;
private NodeList roleList = null;
private NodeList subsystemList = null;
private NodeList tableList = null;
private NodeList tableTypeList = null;
private NodeList functionList = null;
private NodeList functionTypeList = null;
private int currentRowNumber = 0;
private int countOfErrors = 0;
private xeadModeler.Modeler.SortableDomElementListModel sortableDomElementListModel0 = null;
private xeadModeler.Modeler.SortableDomElementListModel sortableDomElementListModel1 = null;
private xeadModeler.Modeler.SortableDomElementListModel sortableDomElementListModel2 = null;
private xeadModeler.Modeler.SortableDomElementListModel sortableDomElementListModel3 = null;
private ArrayList<String> keyList = new ArrayList<String>();
private HashMap<String, String> hashKeyList = new HashMap<String, String>();
private XSSFFont fontTitle = null;
private XSSFFont fontHeader1 = null;
private XSSFFont fontHeader2 = null;
private XSSFFont fontValue = null;
private XSSFWorkbook workBook = null;
private XSSFCellStyle styleTitle = null;
private XSSFCellStyle styleHeaderRotated = null;
private XSSFCellStyle styleHeaderNormal = null;
private XSSFCellStyle styleHeaderNumber = null;
private XSSFCellStyle styleValue = null;
private XSSFCellStyle styleValueNumber = null;
private XSSFCellStyle styleCheck = null;
public DialogMatrixList(Modeler frame, String title, boolean modal) {
super(frame, title, modal);
try {
frame_ = frame;
jbInit();
pack();
}
catch(Exception ex) {
ex.printStackTrace();
}
}
public DialogMatrixList(Modeler frame) {
this(frame, "", true);
}
private void jbInit() throws Exception {
jPanelMain.setLayout(borderLayout1);
jPanelMain.setPreferredSize(new Dimension(300, 180));
jPanelMain.setBorder(null);
jPanelSouth.setBorder(BorderFactory.createEtchedBorder());
jPanelSouth.setLayout(null);
jPanelSouth.setPreferredSize(new Dimension(800, 45));
jPanelCenter.setLayout(gridLayout1);
gridLayout1.setColumns(1);
gridLayout1.setRows(5);
jPanelCenter.setLayout(gridLayout1);
jCheckBoxSubjectAreaAndTask.setFont(new java.awt.Font(frame_.mainFontName, 0, Modeler.MAIN_FONT_SIZE));
jCheckBoxSubjectAreaAndTask.setText(res.getString("S75"));
jCheckBoxSubjectAreaAndTask.addChangeListener(new DialogMatrixList_jCheckBox_changeAdapter(this));
jCheckBoxSubjectAreaAndFunction.setFont(new java.awt.Font(frame_.mainFontName, 0, Modeler.MAIN_FONT_SIZE));
jCheckBoxSubjectAreaAndFunction.setText(res.getString("S76"));
jCheckBoxSubjectAreaAndFunction.addChangeListener(new DialogMatrixList_jCheckBox_changeAdapter(this));
jCheckBoxTaskAndFunction.setFont(new java.awt.Font(frame_.mainFontName, 0, Modeler.MAIN_FONT_SIZE));
jCheckBoxTaskAndFunction.setText(res.getString("S77"));
jCheckBoxTaskAndFunction.addChangeListener(new DialogMatrixList_jCheckBox_changeAdapter(this));
jCheckBoxTaskAndTable.setFont(new java.awt.Font(frame_.mainFontName, 0, Modeler.MAIN_FONT_SIZE));
jCheckBoxTaskAndTable.setText(res.getString("S78"));
jCheckBoxTaskAndTable.addChangeListener(new DialogMatrixList_jCheckBox_changeAdapter(this));
jCheckBoxTableAndFunction.setFont(new java.awt.Font(frame_.mainFontName, 0, Modeler.MAIN_FONT_SIZE));
jCheckBoxTableAndFunction.setText(res.getString("S79"));
jCheckBoxTableAndFunction.addChangeListener(new DialogMatrixList_jCheckBox_changeAdapter(this));
getContentPane().add(jPanelMain);
jPanelMain.add(jPanelSouth, BorderLayout.SOUTH);
jPanelMain.add(jPanelCenter, BorderLayout.CENTER);
jPanelCenter.add(jCheckBoxSubjectAreaAndTask, null);
jPanelCenter.add(jCheckBoxSubjectAreaAndFunction, null);
jPanelCenter.add(jCheckBoxTaskAndFunction, null);
jPanelCenter.add(jCheckBoxTaskAndTable, null);
jPanelCenter.add(jCheckBoxTableAndFunction, null);
jButtonCloseDialog.setBounds(new Rectangle(13, 8, 110, 27));
jButtonCloseDialog.setFont(new java.awt.Font(frame_.mainFontName, 0, Modeler.MAIN_FONT_SIZE));
jButtonCloseDialog.setText(res.getString("DialogDocuments08"));
jButtonCloseDialog.addActionListener(new DialogMatrixList_jButtonCloseDialog_actionAdapter(this));
jButtonStart.setBounds(new Rectangle(170, 8, 120, 27));
jButtonStart.setFont(new java.awt.Font(frame_.mainFontName, 0, Modeler.MAIN_FONT_SIZE));
jButtonStart.setText(res.getString("DialogDocuments07"));
jButtonStart.addActionListener(new DialogMatrixList_jButtonStart_actionAdapter(this));
jProgressBar.setBounds(new Rectangle(170, 8, 120, 27));
jProgressBar.setVisible(false);
jProgressBar.setStringPainted(true);
jPanelSouth.add(jButtonCloseDialog, null);
jPanelSouth.add(jButtonStart, null);
jPanelSouth.add(jProgressBar, null);
sortableDomElementListModel0 = frame_.new SortableDomElementListModel();
sortableDomElementListModel1 = frame_.new SortableDomElementListModel();
sortableDomElementListModel2 = frame_.new SortableDomElementListModel();
sortableDomElementListModel3 = frame_.new SortableDomElementListModel();
this.setResizable(false);
this.setTitle(res.getString("S74"));
Dimension scrSize = Toolkit.getDefaultToolkit().getScreenSize();
Dimension dlgSize = this.getPreferredSize();
this.setLocation((scrSize.width - dlgSize.width)/2 , (scrSize.height - dlgSize.height)/2);
this.pack();
}
public String request(String fileName) {
fileName_ = fileName;
xlsFileName = "";
subsystemList = frame_.domDocument.getElementsByTagName("Subsystem");
subjectAreaList = frame_.domDocument.getElementsByTagName("SubjectArea");
taskTypeList = frame_.domDocument.getElementsByTagName("TaskType");
taskList = frame_.domDocument.getElementsByTagName("Task");
roleList = frame_.domDocument.getElementsByTagName("Role");
tableList = frame_.domDocument.getElementsByTagName("Table");
tableTypeList = frame_.domDocument.getElementsByTagName("TableType");
functionList = frame_.domDocument.getElementsByTagName("Function");
functionTypeList = frame_.domDocument.getElementsByTagName("FunctionType");
jCheckBoxSubjectAreaAndTask.setSelected(false);
jCheckBoxSubjectAreaAndFunction.setSelected(false);
jCheckBoxTaskAndFunction.setSelected(false);
jCheckBoxTaskAndTable.setSelected(false);
jCheckBoxTableAndFunction.setSelected(false);
jButtonStart.setEnabled(false);
super.setVisible(true);
return xlsFileName;
}
void jButtonStart_actionPerformed(ActionEvent e) {
int countOfDefinitions = 0;
try{
setCursor(new Cursor(Cursor.WAIT_CURSOR));
jProgressBar.setVisible(true);
jButtonStart.setVisible(false);
countOfErrors = 0;
File file = new File(fileName_);
xlsFileName = file.getParent() + File.separator + "MatrixList" + getStringValueOfDateAndTime() + ".xlsx";
if (jCheckBoxSubjectAreaAndTask.isSelected()) {
countOfDefinitions = countOfDefinitions + taskList.getLength();
}
if (jCheckBoxSubjectAreaAndFunction.isSelected()) {
countOfDefinitions = countOfDefinitions + functionList.getLength() + subjectAreaList.getLength();
}
if (jCheckBoxTaskAndFunction.isSelected()) {
countOfDefinitions = countOfDefinitions + functionList.getLength() + taskList.getLength();
}
if (jCheckBoxTaskAndTable.isSelected()) {
countOfDefinitions = countOfDefinitions + tableList.getLength() + taskList.getLength();
}
if (jCheckBoxTableAndFunction.isSelected()) {
countOfDefinitions = countOfDefinitions + tableList.getLength() + functionList.getLength();
}
jProgressBar.setValue(0);
jProgressBar.setMaximum(countOfDefinitions);
jProgressBar.paintImmediately(0,0,jProgressBar.getWidth(),jProgressBar.getHeight());
FileOutputStream fileOut = new FileOutputStream(xlsFileName);
createWorkBookAndStyles();
if (jCheckBoxSubjectAreaAndTask.isSelected()) {
generateSheetForSubjectAreaAndTask();
}
if (jCheckBoxSubjectAreaAndFunction.isSelected()) {
generateSheetForSubjectAreaAndFunction();
}
if (jCheckBoxTaskAndFunction.isSelected()) {
generateSheetForTaskAndFunction();
}
if (jCheckBoxTaskAndTable.isSelected()) {
generateSheetForTaskAndTable();
}
if (jCheckBoxTableAndFunction.isSelected()) {
generateSheetForTableAndFunction();
}
workBook.write(fileOut);
fileOut.close();
} catch(Exception ex1) {
ex1.printStackTrace();
} finally {
jProgressBar.setVisible(false);
jButtonStart.setVisible(true);
setCursor(new Cursor(Cursor.DEFAULT_CURSOR));
if (countOfErrors == 0) {
//JOptionPane.showMessageDialog(this.getContentPane(), res.getString("S81") + "\n" + xlsFileName);
File workXlsFile = new File(xlsFileName);
try {
setCursor(new Cursor(Cursor.WAIT_CURSOR));
frame_.desktop.open(workXlsFile);
} catch (Exception ex) {
} finally {
setCursor(new Cursor(Cursor.DEFAULT_CURSOR));
}
} else {
countOfDefinitions = countOfDefinitions - countOfErrors;
JOptionPane.showMessageDialog(this.getContentPane(), "Generating Matrix List has failed possibly because that too many columns for a Excel sheet has been demanded.");
}
super.setVisible(false);
}
}
void generateSheetForSubjectAreaAndTask() {
try {
currentRowNumber = 0;
countOfErrors = 0;
XSSFSheet sheet = workBook.createSheet(res.getString("S75"));
sheet.setDefaultRowHeight((short)300);
sheet.setDefaultColumnWidth(9);
sheet.setColumnWidth(0, 1100);
sheet.setColumnWidth(1, 6000); // Task Type //
sheet.setColumnWidth(2, 6000); // Role //
sheet.setColumnWidth(3, 8000); // Task //
XSSFRow topRow = sheet.createRow(currentRowNumber);
topRow.setHeight((short)2500);
XSSFCell cellSequence = topRow.createCell(0);
cellSequence.setCellStyle(styleHeaderNumber);
cellSequence.setCellValue(new XSSFRichTextString("No"));
XSSFCell cellTaskType = topRow.createCell(1);
cellTaskType.setCellStyle(styleHeaderNormal);
cellTaskType.setCellValue(new XSSFRichTextString(res.getString("S362")));
XSSFCell cellRoleName = topRow.createCell(2);
cellRoleName.setCellStyle(styleHeaderNormal);
cellRoleName.setCellValue(new XSSFRichTextString(res.getString("S348")));
XSSFCell cellTaskName = topRow.createCell(3);
cellTaskName.setCellStyle(styleHeaderNormal);
cellTaskName.setCellValue(new XSSFRichTextString(res.getString("S360")));
setupSubjectAreaColumnsForTask(sheet, topRow);
setupTaskRowsForSubjectAreas(sheet);
} catch (Exception ex) {
countOfErrors++;
}
}
void generateSheetForSubjectAreaAndFunction() {
try {
currentRowNumber = 0;
countOfErrors = 0;
XSSFSheet sheet = workBook.createSheet(res.getString("S76"));
sheet.setDefaultRowHeight((short)300);
sheet.setDefaultColumnWidth(9);
sheet.setColumnWidth(0, 1100);
sheet.setColumnWidth(1, 6000); // Subsystem //
sheet.setColumnWidth(2, 12000); // Function //
sheet.setColumnWidth(3, 6000); // Function Type //
XSSFRow topRow = sheet.createRow(currentRowNumber);
topRow.setHeight((short)2500);
XSSFCell cellSequence = topRow.createCell(0);
cellSequence.setCellStyle(styleHeaderNumber);
cellSequence.setCellValue(new XSSFRichTextString("No"));
XSSFCell cellTaskType = topRow.createCell(1);
cellTaskType.setCellStyle(styleHeaderNormal);
cellTaskType.setCellValue(new XSSFRichTextString(res.getString("S413")));
XSSFCell cellRoleName = topRow.createCell(2);
cellRoleName.setCellStyle(styleHeaderNormal);
cellRoleName.setCellValue(new XSSFRichTextString(res.getString("S415")));
XSSFCell cellTaskName = topRow.createCell(3);
cellTaskName.setCellStyle(styleHeaderNormal);
cellTaskName.setCellValue(new XSSFRichTextString(res.getString("S417")));
setupSubjectAreaColumnsForFunction(sheet, topRow);
setupFunctionRowsForSubjectAreas(sheet);
} catch (Exception ex) {
countOfErrors++;
}
}
void generateSheetForTaskAndFunction() {
try {
currentRowNumber = 0;
countOfErrors = 0;
XSSFSheet sheet = workBook.createSheet(res.getString("S77"));
sheet.setDefaultRowHeight((short)300);
sheet.setDefaultColumnWidth(9);
sheet.setColumnWidth(0, 1100);
sheet.setColumnWidth(1, 6000); // Subsystem //
sheet.setColumnWidth(2, 12000); // Function //
sheet.setColumnWidth(3, 6000); // Function Type //
XSSFRow topRow = sheet.createRow(currentRowNumber);
topRow.setHeight((short)2500);
XSSFCell cellSequence = topRow.createCell(0);
cellSequence.setCellStyle(styleHeaderNumber);
cellSequence.setCellValue(new XSSFRichTextString("No"));
XSSFCell cellTaskType = topRow.createCell(1);
cellTaskType.setCellStyle(styleHeaderNormal);
cellTaskType.setCellValue(new XSSFRichTextString(res.getString("S413")));
XSSFCell cellRoleName = topRow.createCell(2);
cellRoleName.setCellStyle(styleHeaderNormal);
cellRoleName.setCellValue(new XSSFRichTextString(res.getString("S415")));
XSSFCell cellTaskName = topRow.createCell(3);
cellTaskName.setCellStyle(styleHeaderNormal);
cellTaskName.setCellValue(new XSSFRichTextString(res.getString("S417")));
setupTaskColumnsForFunction(sheet, topRow);
setupFunctionRowsForTasks(sheet);
} catch (Exception ex) {
countOfErrors++;
}
}
void generateSheetForTaskAndTable() {
try {
currentRowNumber = 0;
countOfErrors = 0;
XSSFSheet sheet = workBook.createSheet(res.getString("S78"));
sheet.setDefaultRowHeight((short)300);
sheet.setDefaultColumnWidth(9);
sheet.setColumnWidth(0, 1100);
sheet.setColumnWidth(1, 6000); // Subsystem //
sheet.setColumnWidth(2, 8000); // Table //
sheet.setColumnWidth(3, 4500); // Table Type //
XSSFRow topRow = sheet.createRow(currentRowNumber);
topRow.setHeight((short)2500);
XSSFCell cellSequence = topRow.createCell(0);
cellSequence.setCellStyle(styleHeaderNumber);
cellSequence.setCellValue(new XSSFRichTextString("No"));
XSSFCell cellTaskType = topRow.createCell(1);
cellTaskType.setCellStyle(styleHeaderNormal);
cellTaskType.setCellValue(new XSSFRichTextString(res.getString("S413")));
XSSFCell cellRoleName = topRow.createCell(2);
cellRoleName.setCellStyle(styleHeaderNormal);
cellRoleName.setCellValue(new XSSFRichTextString(res.getString("S471")));
XSSFCell cellTaskName = topRow.createCell(3);
cellTaskName.setCellStyle(styleHeaderNormal);
cellTaskName.setCellValue(new XSSFRichTextString(res.getString("S477")));
setupTaskColumnsForTable(sheet, topRow);
setupTableRowsForTask(sheet);
} catch (Exception ex) {
countOfErrors++;
}
}
void generateSheetForTableAndFunction() {
try {
currentRowNumber = 0;
countOfErrors = 0;
XSSFSheet sheet = workBook.createSheet(res.getString("S79"));
sheet.setDefaultRowHeight((short)300);
sheet.setDefaultColumnWidth(9);
sheet.setColumnWidth(0, 1100);
sheet.setColumnWidth(1, 6000); // Subsystem //
sheet.setColumnWidth(2, 12000); // Function //
sheet.setColumnWidth(3, 6000); // Function Type //
XSSFRow topRow = sheet.createRow(currentRowNumber);
topRow.setHeight((short)2500);
XSSFCell cellSequence = topRow.createCell(0);
cellSequence.setCellStyle(styleHeaderNumber);
cellSequence.setCellValue(new XSSFRichTextString("No"));
XSSFCell cellTaskType = topRow.createCell(1);
cellTaskType.setCellStyle(styleHeaderNormal);
cellTaskType.setCellValue(new XSSFRichTextString(res.getString("S413")));
XSSFCell cellRoleName = topRow.createCell(2);
cellRoleName.setCellStyle(styleHeaderNormal);
cellRoleName.setCellValue(new XSSFRichTextString(res.getString("S415")));
XSSFCell cellTaskName = topRow.createCell(3);
cellTaskName.setCellStyle(styleHeaderNormal);
cellTaskName.setCellValue(new XSSFRichTextString(res.getString("S417")));
setupTableColumnsForFunction(sheet, topRow);
setupFunctionRowsForTable(sheet);
} catch (Exception ex) {
countOfErrors++;
}
}
void setupSubjectAreaColumnsForTask(XSSFSheet sheet, XSSFRow topRow) {
org.w3c.dom.Element workElement1 = null;
String workString = "";
sortableDomElementListModel0.removeAllElements();
for (int i = 0; i < subjectAreaList.getLength(); i++) {
sortableDomElementListModel0.addElement((Object)subjectAreaList.item(i));
}
sortableDomElementListModel0.sortElements();
for (int i = 0; i < sortableDomElementListModel0.getSize(); i++) {
workElement1 = (org.w3c.dom.Element)sortableDomElementListModel0.getElementAt(i);
XSSFCell cell = topRow.createCell(i+4);
cell.setCellStyle(styleHeaderRotated);
workString = workElement1.getAttribute("SortKey") + " " + workElement1.getAttribute("Name");
cell.setCellValue(new XSSFRichTextString(workString));
sheet.setColumnWidth(i+4, 1000);
}
if (sortableDomElementListModel0.getSize() == 0) {
XSSFCell cell = topRow.createCell(3);
cell.setCellStyle(styleHeaderNormal);
cell.setCellValue(new XSSFRichTextString("N/A"));
}
}
void setupSubjectAreaColumnsForFunction(XSSFSheet sheet, XSSFRow topRow) {
org.w3c.dom.Element workElement1, workElement2, workElement3, workElement4;
String workString = "";
NodeList nodeList1, nodeList2;
keyList.clear();
sortableDomElementListModel0.removeAllElements();
for (int i = 0; i < subjectAreaList.getLength(); i++) {
sortableDomElementListModel0.addElement((Object)subjectAreaList.item(i));
}
sortableDomElementListModel0.sortElements();
for (int i = 0; i < sortableDomElementListModel0.getSize(); i++) {
workElement1 = (org.w3c.dom.Element)sortableDomElementListModel0.getElementAt(i);
XSSFCell cell = topRow.createCell(i+4);
cell.setCellStyle(styleHeaderRotated);
workString = workElement1.getAttribute("SortKey") + " " + workElement1.getAttribute("Name");
cell.setCellValue(new XSSFRichTextString(workString));
sheet.setColumnWidth(i+4, 1000);
nodeList1 = workElement1.getElementsByTagName("DataflowNode");
for (int j = 0; j < nodeList1.getLength(); j++) {
workElement2 = (org.w3c.dom.Element)nodeList1.item(j);
if (workElement2.getAttribute("Type").equals("Process")) {
for (int k = 0; k < taskList.getLength(); k++) {
workElement3 = (org.w3c.dom.Element)taskList.item(k);
if (workElement3.getAttribute("ID").equals(workElement2.getAttribute("TaskID"))) {
nodeList2 = workElement3.getElementsByTagName("TaskFunctionIO");
for (int l = 0; l < nodeList2.getLength(); l++) {
workElement4 = (org.w3c.dom.Element)nodeList2.item(l);
workString = workElement1.getAttribute("ID") + "," + workElement4.getAttribute("FunctionID");
if (!keyList.contains(workString)) {
keyList.add(workString);
}
}
break;
}
}
}
}
jProgressBar.setValue(jProgressBar.getValue() + 1);
jProgressBar.paintImmediately(0,0,jProgressBar.getWidth(),jProgressBar.getHeight());
}
if (sortableDomElementListModel0.getSize() == 0) {
XSSFCell cell = topRow.createCell(3);
cell.setCellStyle(styleHeaderNormal);
cell.setCellValue(new XSSFRichTextString("N/A"));
}
}
void setupTaskColumnsForFunction(XSSFSheet sheet, XSSFRow topRow) {
org.w3c.dom.Element workElement1, workElement2;
String workString = "";
NodeList nodeList1;
keyList.clear();
sortableDomElementListModel0.removeAllElements();
for (int i = 0; i < taskList.getLength(); i++) {
sortableDomElementListModel0.addElement((Object)taskList.item(i));
}
sortableDomElementListModel0.sortElements();
for (int i = 0; i < sortableDomElementListModel0.getSize(); i++) {
workElement1 = (org.w3c.dom.Element)sortableDomElementListModel0.getElementAt(i);
XSSFCell cell = topRow.createCell(i+4);
cell.setCellStyle(styleHeaderRotated);
workString = workElement1.getAttribute("SortKey") + " " + workElement1.getAttribute("Name");
cell.setCellValue(new XSSFRichTextString(workString));
sheet.setColumnWidth(i+4, 1000);
nodeList1 = workElement1.getElementsByTagName("TaskFunctionIO");
for (int j = 0; j < nodeList1.getLength(); j++) {
workElement2 = (org.w3c.dom.Element)nodeList1.item(j);
workString = workElement1.getAttribute("ID") + "," + workElement2.getAttribute("FunctionID");
if (!keyList.contains(workString)) {
keyList.add(workString);
}
}
jProgressBar.setValue(jProgressBar.getValue() + 1);
jProgressBar.paintImmediately(0,0,jProgressBar.getWidth(),jProgressBar.getHeight());
}
if (sortableDomElementListModel0.getSize() == 0) {
XSSFCell cell = topRow.createCell(3);
cell.setCellStyle(styleHeaderNormal);
cell.setCellValue(new XSSFRichTextString("N/A"));
}
}
void setupTaskColumnsForTable(XSSFSheet sheet, XSSFRow topRow) {
org.w3c.dom.Element workElement1, workElement2, workElement3, workElement4;
String workString = "";
String crudMark = "";
NodeList nodeList1, nodeList2;
hashKeyList.clear();
sortableDomElementListModel0.removeAllElements();
for (int i = 0; i < taskList.getLength(); i++) {
sortableDomElementListModel0.addElement((Object)taskList.item(i));
}
sortableDomElementListModel0.sortElements();
for (int i = 0; i < sortableDomElementListModel0.getSize(); i++) {
workElement1 = (org.w3c.dom.Element)sortableDomElementListModel0.getElementAt(i);
XSSFCell cell = topRow.createCell(i+4);
cell.setCellStyle(styleHeaderRotated);
workString = workElement1.getAttribute("SortKey") + " " + workElement1.getAttribute("Name");
cell.setCellValue(new XSSFRichTextString(workString));
sheet.setColumnWidth(i+4, 1400);
nodeList1 = workElement1.getElementsByTagName("TaskFunctionIO");
for (int j = 0; j < nodeList1.getLength(); j++) {
workElement2 = (org.w3c.dom.Element)nodeList1.item(j);
for (int k = 0; k < functionList.getLength(); k++) {
workElement3 = (org.w3c.dom.Element)functionList.item(k);
if (workElement3.getAttribute("ID").equals(workElement2.getAttribute("FunctionID"))) {
nodeList2 = workElement3.getElementsByTagName("IOTable");
for (int l = 0; l < nodeList2.getLength(); l++) {
workElement4 = (org.w3c.dom.Element)nodeList2.item(l);
crudMark = "";
workString = workElement1.getAttribute("ID") + "," + workElement4.getAttribute("TableID");
if (hashKeyList.containsKey(workString)) {
crudMark = hashKeyList.get(workString);
}
if (workElement4.getAttribute("OpC").equals("+") && !crudMark.contains("C")) {
crudMark = crudMark + "C";
}
if (workElement4.getAttribute("OpR").equals("+") && !crudMark.contains("R")) {
crudMark = crudMark + "R";
}
if (workElement4.getAttribute("OpU").equals("+") && !crudMark.contains("U")) {
crudMark = crudMark + "U";
}
if (workElement4.getAttribute("OpD").equals("+") && !crudMark.contains("D")) {
crudMark = crudMark + "D";
}
hashKeyList.put(workString, crudMark);
}
break;
}
}
}
jProgressBar.setValue(jProgressBar.getValue() + 1);
jProgressBar.paintImmediately(0,0,jProgressBar.getWidth(),jProgressBar.getHeight());
}
if (sortableDomElementListModel0.getSize() == 0) {
XSSFCell cell = topRow.createCell(3);
cell.setCellStyle(styleHeaderNormal);
cell.setCellValue(new XSSFRichTextString("N/A"));
}
}
void setupTableColumnsForFunction(XSSFSheet sheet, XSSFRow topRow) {
org.w3c.dom.Element workElement1;
String workString = "";
hashKeyList.clear();
sortableDomElementListModel0.removeAllElements();
for (int i = 0; i < tableList.getLength(); i++) {
sortableDomElementListModel0.addElement((Object)tableList.item(i));
}
sortableDomElementListModel0.sortElements();
for (int i = 0; i < sortableDomElementListModel0.getSize(); i++) {
workElement1 = (org.w3c.dom.Element)sortableDomElementListModel0.getElementAt(i);
XSSFCell cell = topRow.createCell(i+4);
cell.setCellStyle(styleHeaderRotated);
workString = workElement1.getAttribute("SortKey") + " " + workElement1.getAttribute("Name");
cell.setCellValue(new XSSFRichTextString(workString));
sheet.setColumnWidth(i+4, 1400);
jProgressBar.setValue(jProgressBar.getValue() + 1);
jProgressBar.paintImmediately(0,0,jProgressBar.getWidth(),jProgressBar.getHeight());
}
if (sortableDomElementListModel0.getSize() == 0) {
XSSFCell cell = topRow.createCell(3);
cell.setCellStyle(styleHeaderNormal);
cell.setCellValue(new XSSFRichTextString("N/A"));
}
}
void setupTaskRowsForSubjectAreas(XSSFSheet sheet) {
org.w3c.dom.Element workElement0, workElement1, workElement2, workElement3, workElement4;
String workString = "";
NodeList nodeList;
boolean hasThisTask;
sortableDomElementListModel1.removeAllElements();
sortableDomElementListModel2.removeAllElements();
sortableDomElementListModel3.removeAllElements();
for (int i = 0; i < taskTypeList.getLength(); i++) {
sortableDomElementListModel1.addElement((Object)taskTypeList.item(i));
}
for (int i = 0; i < roleList.getLength(); i++) {
sortableDomElementListModel2.addElement((Object)roleList.item(i));
}
for (int i = 0; i < taskList.getLength(); i++) {
sortableDomElementListModel3.addElement((Object)taskList.item(i));
}
sortableDomElementListModel1.sortElements();
sortableDomElementListModel2.sortElements();
sortableDomElementListModel3.sortElements();
if (sortableDomElementListModel1.getSize() > 1) {
for (int i = 0; i < sortableDomElementListModel1.getSize(); i++) {
workElement1 = (org.w3c.dom.Element)sortableDomElementListModel1.getElementAt(i);
for (int j = 0; j < sortableDomElementListModel2.getSize(); j++) {
workElement2 = (org.w3c.dom.Element)sortableDomElementListModel2.getElementAt(j);
for (int k = 0; k < sortableDomElementListModel3.getSize(); k++) {
workElement3 = (org.w3c.dom.Element)sortableDomElementListModel3.getElementAt(k);
if (workElement3.getAttribute("RoleID").equals(workElement2.getAttribute("ID"))
&& workElement3.getAttribute("TaskTypeID").equals(workElement1.getAttribute("ID"))) {
currentRowNumber++;
XSSFRow row = sheet.createRow(currentRowNumber);
XSSFCell cellSequence = row.createCell(0);
cellSequence.setCellStyle(styleValueNumber);
cellSequence.setCellValue(currentRowNumber);
XSSFCell cellTaskType = row.createCell(1);
cellTaskType.setCellStyle(styleValue);
workString = workElement1.getAttribute("SortKey") + " " + workElement1.getAttribute("Name");
cellTaskType.setCellValue(new XSSFRichTextString(workString));
XSSFCell cellRole = row.createCell(2);
cellRole.setCellStyle(styleValue);
workString = workElement2.getAttribute("SortKey") + " " + workElement2.getAttribute("Name");
cellRole.setCellValue(new XSSFRichTextString(workString));
XSSFCell cellTask = row.createCell(3);
cellTask.setCellStyle(styleValue);
workString = workElement3.getAttribute("SortKey") + " " + workElement3.getAttribute("Name");
cellTask.setCellValue(new XSSFRichTextString(workString));
for (int m = 0; m < sortableDomElementListModel0.getSize(); m++) {
workElement0 = (org.w3c.dom.Element)sortableDomElementListModel0.getElementAt(m);
hasThisTask = false;
nodeList = workElement0.getElementsByTagName("DataflowNode");
for (int n = 0; n < nodeList.getLength(); n++) {
workElement4 = (org.w3c.dom.Element)nodeList.item(n);
if (workElement4.getAttribute("TaskID").equals(workElement3.getAttribute("ID"))) {
hasThisTask = true;
break;
}
}
XSSFCell cellMatrix = row.createCell(m+4);
cellMatrix.setCellStyle(styleCheck);
if (hasThisTask) {
cellMatrix.setCellValue(new XSSFRichTextString(res.getString("S80")));
}
}
jProgressBar.setValue(jProgressBar.getValue() + 1);
jProgressBar.paintImmediately(0,0,jProgressBar.getWidth(),jProgressBar.getHeight());
}
}
}
}
} else {
for (int j = 0; j < sortableDomElementListModel2.getSize(); j++) {
workElement2 = (org.w3c.dom.Element)sortableDomElementListModel2.getElementAt(j);
for (int k = 0; k < sortableDomElementListModel3.getSize(); k++) {
workElement3 = (org.w3c.dom.Element)sortableDomElementListModel3.getElementAt(k);
if (workElement3.getAttribute("RoleID").equals(workElement2.getAttribute("ID"))) {
currentRowNumber++;
XSSFRow row = sheet.createRow(currentRowNumber);
XSSFCell cellSequence = row.createCell(0);
cellSequence.setCellStyle(styleValueNumber);
cellSequence.setCellValue(currentRowNumber);
XSSFCell cellTaskType = row.createCell(1);
cellTaskType.setCellStyle(styleValue);
cellTaskType.setCellValue(new XSSFRichTextString("N/A"));
XSSFCell cellRole = row.createCell(2);
cellRole.setCellStyle(styleValue);
workString = workElement2.getAttribute("SortKey") + " / " + workElement2.getAttribute("Name");
cellRole.setCellValue(new XSSFRichTextString(workString));
XSSFCell cellTask = row.createCell(3);
cellTask.setCellStyle(styleValue);
workString = workElement3.getAttribute("SortKey") + " / " + workElement3.getAttribute("Name");
cellTask.setCellValue(new XSSFRichTextString(workString));
for (int m = 0; m < sortableDomElementListModel0.getSize(); m++) {
workElement0 = (org.w3c.dom.Element)sortableDomElementListModel0.getElementAt(m);
hasThisTask = false;
nodeList = workElement0.getElementsByTagName("DataflowNode");
for (int n = 0; n < nodeList.getLength(); n++) {
workElement4 = (org.w3c.dom.Element)nodeList.item(n);
if (workElement4.getAttribute("TaskID").equals(workElement3.getAttribute("ID"))) {
hasThisTask = true;
}
}
XSSFCell cellMatrix = row.createCell(m+4);
cellMatrix.setCellStyle(styleCheck);
if (hasThisTask) {
cellMatrix.setCellValue(new XSSFRichTextString(res.getString("S80")));
}
}
jProgressBar.setValue(jProgressBar.getValue() + 1);
jProgressBar.paintImmediately(0,0,jProgressBar.getWidth(),jProgressBar.getHeight());
}
}
}
}
}
void setupFunctionRowsForSubjectAreas(XSSFSheet sheet) {
org.w3c.dom.Element workElement0, workElement1, workElement2, workElement3;
String workString = "";
sortableDomElementListModel1.removeAllElements();
sortableDomElementListModel2.removeAllElements();
for (int i = 0; i < subsystemList.getLength(); i++) {
sortableDomElementListModel1.addElement((Object)subsystemList.item(i));
}
for (int i = 0; i < functionList.getLength(); i++) {
sortableDomElementListModel2.addElement((Object)functionList.item(i));
}
sortableDomElementListModel1.sortElements();
sortableDomElementListModel2.sortElements();
for (int i = 0; i < sortableDomElementListModel1.getSize(); i++) {
workElement1 = (org.w3c.dom.Element)sortableDomElementListModel1.getElementAt(i);
for (int j = 0; j < sortableDomElementListModel2.getSize(); j++) {
workElement2 = (org.w3c.dom.Element)sortableDomElementListModel2.getElementAt(j);
if (workElement2.getAttribute("SubsystemID").equals(workElement1.getAttribute("ID"))) {
currentRowNumber++;
XSSFRow row = sheet.createRow(currentRowNumber);
XSSFCell cellSequence = row.createCell(0);
cellSequence.setCellStyle(styleValueNumber);
cellSequence.setCellValue(currentRowNumber);
XSSFCell cellTaskType = row.createCell(1);
cellTaskType.setCellStyle(styleValue);
workString = workElement1.getAttribute("SortKey") + " " + workElement1.getAttribute("Name");
cellTaskType.setCellValue(new XSSFRichTextString(workString));
XSSFCell cellRole = row.createCell(2);
cellRole.setCellStyle(styleValue);
workString = workElement2.getAttribute("SortKey") + " " + workElement2.getAttribute("Name");
cellRole.setCellValue(new XSSFRichTextString(workString));
XSSFCell cellTask = row.createCell(3);
cellTask.setCellStyle(styleValue);
for (int k = 0; k < functionTypeList.getLength(); k++) {
workElement3 = (org.w3c.dom.Element)functionTypeList.item(k);
if (workElement2.getAttribute("FunctionTypeID").equals(workElement3.getAttribute("ID"))) {
workString = workElement3.getAttribute("SortKey") + " " + workElement3.getAttribute("Name");
break;
}
}
cellTask.setCellValue(new XSSFRichTextString(workString));
for (int m = 0; m < sortableDomElementListModel0.getSize(); m++) {
workElement0 = (org.w3c.dom.Element)sortableDomElementListModel0.getElementAt(m);
XSSFCell cellMatrix = row.createCell(m+4);
cellMatrix.setCellStyle(styleCheck);
workString = workElement0.getAttribute("ID") + "," + workElement2.getAttribute("ID");
if (keyList.contains(workString)) {
cellMatrix.setCellValue(new XSSFRichTextString(res.getString("S80")));
}
}
jProgressBar.setValue(jProgressBar.getValue() + 1);
jProgressBar.paintImmediately(0,0,jProgressBar.getWidth(),jProgressBar.getHeight());
}
}
}
}
void setupFunctionRowsForTasks(XSSFSheet sheet) {
org.w3c.dom.Element workElement0, workElement1, workElement2, workElement3;
String workString = "";
sortableDomElementListModel1.removeAllElements();
sortableDomElementListModel2.removeAllElements();
for (int i = 0; i < subsystemList.getLength(); i++) {
sortableDomElementListModel1.addElement((Object)subsystemList.item(i));
}
for (int i = 0; i < functionList.getLength(); i++) {
sortableDomElementListModel2.addElement((Object)functionList.item(i));
}
sortableDomElementListModel1.sortElements();
sortableDomElementListModel2.sortElements();
for (int i = 0; i < sortableDomElementListModel1.getSize(); i++) {
workElement1 = (org.w3c.dom.Element)sortableDomElementListModel1.getElementAt(i);
for (int j = 0; j < sortableDomElementListModel2.getSize(); j++) {
workElement2 = (org.w3c.dom.Element)sortableDomElementListModel2.getElementAt(j);
if (workElement2.getAttribute("SubsystemID").equals(workElement1.getAttribute("ID"))) {
currentRowNumber++;
XSSFRow row = sheet.createRow(currentRowNumber);
XSSFCell cellSequence = row.createCell(0);
cellSequence.setCellStyle(styleValueNumber);
cellSequence.setCellValue(currentRowNumber);
XSSFCell cellTaskType = row.createCell(1);
cellTaskType.setCellStyle(styleValue);
workString = workElement1.getAttribute("SortKey") + " " + workElement1.getAttribute("Name");
cellTaskType.setCellValue(new XSSFRichTextString(workString));
XSSFCell cellRole = row.createCell(2);
cellRole.setCellStyle(styleValue);
workString = workElement2.getAttribute("SortKey") + " " + workElement2.getAttribute("Name");
cellRole.setCellValue(new XSSFRichTextString(workString));
XSSFCell cellTask = row.createCell(3);
cellTask.setCellStyle(styleValue);
for (int k = 0; k < functionTypeList.getLength(); k++) {
workElement3 = (org.w3c.dom.Element)functionTypeList.item(k);
if (workElement2.getAttribute("FunctionTypeID").equals(workElement3.getAttribute("ID"))) {
workString = workElement3.getAttribute("SortKey") + " " + workElement3.getAttribute("Name");
break;
}
}
cellTask.setCellValue(new XSSFRichTextString(workString));
for (int m = 0; m < sortableDomElementListModel0.getSize(); m++) {
workElement0 = (org.w3c.dom.Element)sortableDomElementListModel0.getElementAt(m);
XSSFCell cellMatrix = row.createCell(m+4);
cellMatrix.setCellStyle(styleCheck);
workString = workElement0.getAttribute("ID") + "," + workElement2.getAttribute("ID");
if (keyList.contains(workString)) {
cellMatrix.setCellValue(new XSSFRichTextString(res.getString("S80")));
}
}
jProgressBar.setValue(jProgressBar.getValue() + 1);
jProgressBar.paintImmediately(0,0,jProgressBar.getWidth(),jProgressBar.getHeight());
}
}
}
}
void setupTableRowsForTask(XSSFSheet sheet) {
org.w3c.dom.Element workElement0, workElement1, workElement2, workElement3;
String workString = "";
String crudMark = "";
String crudCheck = "";
sortableDomElementListModel1.removeAllElements();
sortableDomElementListModel2.removeAllElements();
for (int i = 0; i < subsystemList.getLength(); i++) {
sortableDomElementListModel1.addElement((Object)subsystemList.item(i));
}
for (int i = 0; i < tableList.getLength(); i++) {
sortableDomElementListModel2.addElement((Object)tableList.item(i));
}
sortableDomElementListModel1.sortElements();
sortableDomElementListModel2.sortElements();
for (int i = 0; i < sortableDomElementListModel1.getSize(); i++) {
workElement1 = (org.w3c.dom.Element)sortableDomElementListModel1.getElementAt(i);
for (int j = 0; j < sortableDomElementListModel2.getSize(); j++) {
workElement2 = (org.w3c.dom.Element)sortableDomElementListModel2.getElementAt(j);
if (workElement2.getAttribute("SubsystemID").equals(workElement1.getAttribute("ID"))) {
currentRowNumber++;
XSSFRow row = sheet.createRow(currentRowNumber);
XSSFCell cellSequence = row.createCell(0);
cellSequence.setCellStyle(styleValueNumber);
cellSequence.setCellValue(currentRowNumber);
XSSFCell cellTaskType = row.createCell(1);
cellTaskType.setCellStyle(styleValue);
workString = workElement1.getAttribute("SortKey") + " " + workElement1.getAttribute("Name");
cellTaskType.setCellValue(new XSSFRichTextString(workString));
XSSFCell cellRole = row.createCell(2);
cellRole.setCellStyle(styleValue);
workString = workElement2.getAttribute("SortKey") + " " + workElement2.getAttribute("Name");
cellRole.setCellValue(new XSSFRichTextString(workString));
XSSFCell cellTask = row.createCell(3);
cellTask.setCellStyle(styleValue);
for (int k = 0; k < tableTypeList.getLength(); k++) {
workElement3 = (org.w3c.dom.Element)tableTypeList.item(k);
if (workElement2.getAttribute("TableTypeID").equals(workElement3.getAttribute("ID"))) {
workString = workElement3.getAttribute("SortKey") + " " + workElement3.getAttribute("Name");
break;
}
}
cellTask.setCellValue(new XSSFRichTextString(workString));
for (int m = 0; m < sortableDomElementListModel0.getSize(); m++) {
workElement0 = (org.w3c.dom.Element)sortableDomElementListModel0.getElementAt(m);
XSSFCell cellMatrix = row.createCell(m+4);
cellMatrix.setCellStyle(styleCheck);
workString = workElement0.getAttribute("ID") + "," + workElement2.getAttribute("ID");
if (hashKeyList.containsKey(workString)) {
crudMark = hashKeyList.get(workString);
crudCheck = "";
if (crudMark.contains("C")) {
crudCheck = crudCheck + "C";
}
if (crudMark.contains("R")) {
crudCheck = crudCheck + "R";
}
if (crudMark.contains("U")) {
crudCheck = crudCheck + "U";
}
if (crudMark.contains("D")) {
crudCheck = crudCheck + "D";
}
cellMatrix.setCellValue(new XSSFRichTextString(crudCheck));
}
}
jProgressBar.setValue(jProgressBar.getValue() + 1);
jProgressBar.paintImmediately(0,0,jProgressBar.getWidth(),jProgressBar.getHeight());
}
}
}
}
void setupFunctionRowsForTable(XSSFSheet sheet) {
org.w3c.dom.Element workElement0, workElement1, workElement2, workElement3;
String workString = "";
String crudMark = "";
String crudCheck = "";
NodeList nodeList1;
sortableDomElementListModel1.removeAllElements();
sortableDomElementListModel2.removeAllElements();
for (int i = 0; i < subsystemList.getLength(); i++) {
sortableDomElementListModel1.addElement((Object)subsystemList.item(i));
}
for (int i = 0; i < functionList.getLength(); i++) {
sortableDomElementListModel2.addElement((Object)functionList.item(i));
}
sortableDomElementListModel1.sortElements();
sortableDomElementListModel2.sortElements();
for (int i = 0; i < sortableDomElementListModel1.getSize(); i++) {
workElement1 = (org.w3c.dom.Element)sortableDomElementListModel1.getElementAt(i);
for (int j = 0; j < sortableDomElementListModel2.getSize(); j++) {
workElement2 = (org.w3c.dom.Element)sortableDomElementListModel2.getElementAt(j);
if (workElement2.getAttribute("SubsystemID").equals(workElement1.getAttribute("ID"))) {
currentRowNumber++;
XSSFRow row = sheet.createRow(currentRowNumber);
XSSFCell cellSequence = row.createCell(0);
cellSequence.setCellStyle(styleValueNumber);
cellSequence.setCellValue(currentRowNumber);
XSSFCell cellTaskType = row.createCell(1);
cellTaskType.setCellStyle(styleValue);
workString = workElement1.getAttribute("SortKey") + " " + workElement1.getAttribute("Name");
cellTaskType.setCellValue(new XSSFRichTextString(workString));
XSSFCell cellRole = row.createCell(2);
cellRole.setCellStyle(styleValue);
workString = workElement2.getAttribute("SortKey") + " " + workElement2.getAttribute("Name");
cellRole.setCellValue(new XSSFRichTextString(workString));
XSSFCell cellTask = row.createCell(3);
cellTask.setCellStyle(styleValue);
for (int k = 0; k < functionTypeList.getLength(); k++) {
workElement3 = (org.w3c.dom.Element)functionTypeList.item(k);
if (workElement2.getAttribute("FunctionTypeID").equals(workElement3.getAttribute("ID"))) {
workString = workElement3.getAttribute("SortKey") + " " + workElement3.getAttribute("Name");
break;
}
}
cellTask.setCellValue(new XSSFRichTextString(workString));
nodeList1 = workElement2.getElementsByTagName("IOTable");
for (int m = 0; m < sortableDomElementListModel0.getSize(); m++) {
workElement0 = (org.w3c.dom.Element)sortableDomElementListModel0.getElementAt(m);
XSSFCell cellMatrix = row.createCell(m+4);
cellMatrix.setCellStyle(styleCheck);
crudMark = "";
for (int p = 0; p < nodeList1.getLength(); p++) {
workElement3 = (org.w3c.dom.Element)nodeList1.item(p);
if (workElement3.getAttribute("TableID").equals(workElement0.getAttribute("ID"))) {
if (workElement3.getAttribute("OpC").equals("+") && !crudMark.contains("C")) {
crudMark = crudMark + "C";
}
if (workElement3.getAttribute("OpR").equals("+") && !crudMark.contains("R")) {
crudMark = crudMark + "R";
}
if (workElement3.getAttribute("OpU").equals("+") && !crudMark.contains("U")) {
crudMark = crudMark + "U";
}
if (workElement3.getAttribute("OpD").equals("+") && !crudMark.contains("D")) {
crudMark = crudMark + "D";
}
}
}
crudCheck = "";
if (crudMark.contains("C")) {
crudCheck = crudCheck + "C";
}
if (crudMark.contains("R")) {
crudCheck = crudCheck + "R";
}
if (crudMark.contains("U")) {
crudCheck = crudCheck + "U";
}
if (crudMark.contains("D")) {
crudCheck = crudCheck + "D";
}
cellMatrix.setCellValue(new XSSFRichTextString(crudCheck));
}
jProgressBar.setValue(jProgressBar.getValue() + 1);
jProgressBar.paintImmediately(0,0,jProgressBar.getWidth(),jProgressBar.getHeight());
}
}
}
}
void createWorkBookAndStyles() {
workBook = new XSSFWorkbook();
fontTitle = workBook.createFont();
fontTitle.setFontName(res.getString("DialogDocuments13"));
fontTitle.setFontHeightInPoints((short)14);
fontTitle.setColor(new XSSFColor(Color.WHITE));
fontTitle.setBoldweight(XSSFFont.BOLDWEIGHT_BOLD);
fontHeader1 = workBook.createFont();
fontHeader1.setFontName(res.getString("DialogDocuments13"));
fontHeader1.setFontHeightInPoints((short)10);
fontHeader1.setItalic(true);
fontHeader1.setColor(new XSSFColor(Color.WHITE));
fontHeader2 = workBook.createFont();
fontHeader2.setFontName(res.getString("DialogDocuments13"));
fontHeader2.setFontHeightInPoints((short)10);
fontValue = workBook.createFont();
fontValue.setFontName(res.getString("DialogDocuments14"));
fontValue.setFontHeightInPoints((short)10);
styleTitle = workBook.createCellStyle();
styleTitle.setBorderBottom(XSSFCellStyle.BORDER_THIN);
styleTitle.setBorderLeft(XSSFCellStyle.BORDER_THIN);
styleTitle.setBorderRight(XSSFCellStyle.BORDER_THIN);
styleTitle.setBorderTop(XSSFCellStyle.BORDER_THIN);
styleTitle.setFillForegroundColor(new XSSFColor(Color.DARK_GRAY));
styleTitle.setFillPattern(XSSFCellStyle.SOLID_FOREGROUND);
styleTitle.setAlignment(XSSFCellStyle.ALIGN_CENTER);
styleTitle.setVerticalAlignment(XSSFCellStyle.VERTICAL_CENTER);
styleTitle.setFont(fontTitle);
styleHeaderRotated = workBook.createCellStyle();
styleHeaderRotated.setBorderBottom(XSSFCellStyle.BORDER_THIN);
styleHeaderRotated.setBorderLeft(XSSFCellStyle.BORDER_THIN);
styleHeaderRotated.setBorderRight(XSSFCellStyle.BORDER_THIN);
styleHeaderRotated.setBorderTop(XSSFCellStyle.BORDER_THIN);
styleHeaderRotated.setAlignment(XSSFCellStyle.ALIGN_LEFT);
styleHeaderRotated.setVerticalAlignment(XSSFCellStyle.VERTICAL_TOP);
styleHeaderRotated.setAlignment(XSSFCellStyle.ALIGN_CENTER);
styleHeaderRotated.setRotation((short)180);
styleHeaderRotated.setFont(fontValue);
styleHeaderNormal = workBook.createCellStyle();
styleHeaderNormal.setBorderBottom(XSSFCellStyle.BORDER_THIN);
styleHeaderNormal.setBorderLeft(XSSFCellStyle.BORDER_THIN);
styleHeaderNormal.setBorderRight(XSSFCellStyle.BORDER_THIN);
styleHeaderNormal.setBorderTop(XSSFCellStyle.BORDER_THIN);
styleHeaderNormal.setAlignment(XSSFCellStyle.ALIGN_LEFT);
styleHeaderNormal.setVerticalAlignment(XSSFCellStyle.VERTICAL_CENTER);
styleHeaderNormal.setFont(fontValue);
styleHeaderNumber = workBook.createCellStyle();
styleHeaderNumber.setBorderBottom(XSSFCellStyle.BORDER_THIN);
styleHeaderNumber.setBorderLeft(XSSFCellStyle.BORDER_THIN);
styleHeaderNumber.setBorderRight(XSSFCellStyle.BORDER_THIN);
styleHeaderNumber.setBorderTop(XSSFCellStyle.BORDER_THIN);
styleHeaderNumber.setAlignment(XSSFCellStyle.ALIGN_RIGHT);
styleHeaderNumber.setVerticalAlignment(XSSFCellStyle.VERTICAL_CENTER);
styleHeaderNumber.setFont(fontValue);
styleValue = workBook.createCellStyle();
styleValue.setBorderBottom(XSSFCellStyle.BORDER_THIN);
styleValue.setBorderLeft(XSSFCellStyle.BORDER_THIN);
styleValue.setBorderRight(XSSFCellStyle.BORDER_THIN);
styleValue.setBorderTop(XSSFCellStyle.BORDER_THIN);
styleValue.setAlignment(XSSFCellStyle.ALIGN_LEFT);
styleValue.setVerticalAlignment(XSSFCellStyle.VERTICAL_TOP);
styleValue.setFont(fontValue);
styleValue.setWrapText(true);
styleCheck = workBook.createCellStyle();
styleCheck.setBorderBottom(XSSFCellStyle.BORDER_THIN);
styleCheck.setBorderLeft(XSSFCellStyle.BORDER_THIN);
styleCheck.setBorderRight(XSSFCellStyle.BORDER_THIN);
styleCheck.setBorderTop(XSSFCellStyle.BORDER_THIN);
styleCheck.setAlignment(XSSFCellStyle.ALIGN_CENTER);
styleCheck.setVerticalAlignment(XSSFCellStyle.VERTICAL_CENTER);
styleCheck.setFont(fontValue);
styleValueNumber = workBook.createCellStyle();
styleValueNumber.setBorderBottom(XSSFCellStyle.BORDER_THIN);
styleValueNumber.setBorderLeft(XSSFCellStyle.BORDER_THIN);
styleValueNumber.setBorderRight(XSSFCellStyle.BORDER_THIN);
styleValueNumber.setBorderTop(XSSFCellStyle.BORDER_THIN);
styleValueNumber.setAlignment(XSSFCellStyle.ALIGN_RIGHT);
styleValueNumber.setVerticalAlignment(XSSFCellStyle.VERTICAL_TOP);
styleValueNumber.setFont(fontValue);
}
void jButtonCloseDialog_actionPerformed(ActionEvent e) {
this.setVisible(false);
}
void jCheckBox_stateChanged(ChangeEvent e) {
jButtonStart.setEnabled(false);
if (jCheckBoxSubjectAreaAndTask.isSelected()
|| jCheckBoxSubjectAreaAndFunction.isSelected()
|| jCheckBoxTaskAndFunction.isSelected()
|| jCheckBoxTaskAndTable.isSelected()
|| jCheckBoxTableAndFunction.isSelected()) {
jButtonStart.setEnabled(true);
this.getRootPane().setDefaultButton(jButtonStart);
}
}
String getStringValueOfDateAndTime() {
String returnValue = "";
GregorianCalendar calendar = new GregorianCalendar();
//
int year = calendar.get(Calendar.YEAR);
//
int month = calendar.get(Calendar.MONTH) + 1;
String monthStr = "";
if (month < 10) {
monthStr = "0" + Integer.toString(month);
} else {
monthStr = Integer.toString(month);
}
//
int day = calendar.get(Calendar.DAY_OF_MONTH);
String dayStr = "";
if (day < 10) {
dayStr = "0" + Integer.toString(day);
} else {
dayStr = Integer.toString(day);
}
//
int hour = calendar.get(Calendar.HOUR_OF_DAY);
String hourStr = "";
if (hour < 10) {
hourStr = "0" + Integer.toString(hour);
} else {
hourStr = Integer.toString(hour);
}
//
int minute = calendar.get(Calendar.MINUTE);
String minStr = "";
if (minute < 10) {
minStr = "0" + Integer.toString(minute);
} else {
minStr = Integer.toString(minute);
}
//
int second = calendar.get(Calendar.SECOND);
String secStr = "";
if (second < 10) {
secStr = "0" + Integer.toString(second);
} else {
secStr = Integer.toString(second);
}
//
returnValue = Integer.toString(year) + monthStr + dayStr + hourStr + minStr + secStr;
return returnValue;
}
}
class DialogMatrixList_jCheckBox_changeAdapter implements ChangeListener {
DialogMatrixList adaptee;
DialogMatrixList_jCheckBox_changeAdapter(DialogMatrixList adaptee) {
this.adaptee = adaptee;
}
public void stateChanged(ChangeEvent e) {
adaptee.jCheckBox_stateChanged(e);
}
}
class DialogMatrixList_jButtonStart_actionAdapter implements java.awt.event.ActionListener {
DialogMatrixList adaptee;
DialogMatrixList_jButtonStart_actionAdapter(DialogMatrixList adaptee) {
this.adaptee = adaptee;
}
public void actionPerformed(ActionEvent e) {
adaptee.jButtonStart_actionPerformed(e);
}
}
class DialogMatrixList_jButtonCloseDialog_actionAdapter implements java.awt.event.ActionListener {
DialogMatrixList adaptee;
DialogMatrixList_jButtonCloseDialog_actionAdapter(DialogMatrixList adaptee) {
this.adaptee = adaptee;
}
public void actionPerformed(ActionEvent e) {
adaptee.jButtonCloseDialog_actionPerformed(e);
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager.webapp;
import com.google.common.collect.ImmutableMap;
import com.google.inject.Guice;
import com.google.inject.servlet.ServletModule;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.ClientResponse.Status;
import com.sun.jersey.api.client.GenericType;
import com.sun.jersey.api.client.UniformInterfaceException;
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
import com.sun.jersey.test.framework.WebAppDescriptor;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.http.JettyUtils;
import org.apache.hadoop.util.VersionInfo;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.AsyncDispatcher;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.logaggregation.ContainerLogAggregationType;
import org.apache.hadoop.yarn.logaggregation.ContainerLogFileInfo;
import org.apache.hadoop.yarn.logaggregation.TestContainerLogsUtils;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService;
import org.apache.hadoop.yarn.server.nodemanager.NodeManager;
import org.apache.hadoop.yarn.server.nodemanager.ResourceView;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationImpl;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.resourceplugin.ResourcePlugin;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.resourceplugin.ResourcePluginManager;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.resourceplugin.gpu.AssignedGpuDevice;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.resourceplugin.gpu.GpuDevice;
import org.apache.hadoop.yarn.server.nodemanager.health.NodeHealthCheckerService;
import org.apache.hadoop.yarn.server.nodemanager.webapp.WebServer.NMWebApp;
import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.NMResourceInfo;
import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.gpu.GpuDeviceInformation;
import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.gpu.NMGpuResourceInfo;
import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.gpu.PerGpuDeviceInformation;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.server.webapp.YarnWebServiceParams;
import org.apache.hadoop.yarn.server.webapp.dao.ContainerLogsInfo;
import org.apache.hadoop.yarn.util.YarnVersionInfo;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.GuiceServletConfig;
import org.apache.hadoop.yarn.webapp.JerseyTestBase;
import org.apache.hadoop.yarn.webapp.WebApp;
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Test;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.core.MediaType;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import java.io.File;
import java.io.PrintWriter;
import java.io.StringReader;
import java.net.HttpURLConnection;
import java.net.URI;
import java.net.URL;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
import static org.apache.hadoop.yarn.webapp.WebServicesTestUtils.assertResponseStatusCode;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* Test the nodemanager node info web services api's
*/
public class TestNMWebServices extends JerseyTestBase {
private static final long NM_RESOURCE_VALUE = 1000L;
private static NodeManager.NMContext nmContext;
private static ResourceView resourceView;
private static ApplicationACLsManager aclsManager;
private static LocalDirsHandlerService dirsHandler;
private static WebApp nmWebApp;
private static final String LOGSERVICEWSADDR = "test:1234";
private static final String LOG_MESSAGE = "log message\n";
private static final File testRootDir = new File("target",
TestNMWebServices.class.getSimpleName());
private static File testLogDir = new File("target",
TestNMWebServices.class.getSimpleName() + "LogDir");
private static File testRemoteLogDir = new File("target",
TestNMWebServices.class.getSimpleName() + "remote-log-dir");
private static class WebServletModule extends ServletModule {
@Override
protected void configureServlets() {
Configuration conf = new Configuration();
conf.set(YarnConfiguration.NM_LOCAL_DIRS, testRootDir.getAbsolutePath());
conf.set(YarnConfiguration.NM_LOG_DIRS, testLogDir.getAbsolutePath());
conf.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, true);
conf.set(YarnConfiguration.NM_REMOTE_APP_LOG_DIR,
testRemoteLogDir.getAbsolutePath());
conf.set(YarnConfiguration.YARN_LOG_SERVER_WEBSERVICE_URL,
LOGSERVICEWSADDR);
dirsHandler = new LocalDirsHandlerService();
NodeHealthCheckerService healthChecker =
new NodeHealthCheckerService(dirsHandler);
healthChecker.init(conf);
aclsManager = new ApplicationACLsManager(conf);
nmContext = new NodeManager.NMContext(null, null, dirsHandler,
aclsManager, null, false, conf);
NodeId nodeId = NodeId.newInstance("testhost.foo.com", 8042);
((NodeManager.NMContext)nmContext).setNodeId(nodeId);
resourceView = new ResourceView() {
@Override
public long getVmemAllocatedForContainers() {
// 15.5G in bytes
return new Long("16642998272");
}
@Override
public long getPmemAllocatedForContainers() {
// 16G in bytes
return new Long("17179869184");
}
@Override
public long getVCoresAllocatedForContainers() {
return new Long("4000");
}
@Override
public boolean isVmemCheckEnabled() {
return true;
}
@Override
public boolean isPmemCheckEnabled() {
return true;
}
};
nmWebApp = new NMWebApp(resourceView, aclsManager, dirsHandler);
bind(JAXBContextResolver.class);
bind(NMWebServices.class);
bind(GenericExceptionHandler.class);
bind(Context.class).toInstance(nmContext);
bind(WebApp.class).toInstance(nmWebApp);
bind(ResourceView.class).toInstance(resourceView);
bind(ApplicationACLsManager.class).toInstance(aclsManager);
bind(LocalDirsHandlerService.class).toInstance(dirsHandler);
serve("/*").with(GuiceContainer.class);
}
};
static {
GuiceServletConfig.setInjector(
Guice.createInjector(new WebServletModule()));
}
private void setupMockPluginsWithNmResourceInfo() throws YarnException {
ResourcePlugin mockPlugin1 = mock(ResourcePlugin.class);
NMResourceInfo nmResourceInfo1 = new NMResourceInfo() {
private long a = NM_RESOURCE_VALUE;
public long getA() {
return a;
}
};
when(mockPlugin1.getNMResourceInfo()).thenReturn(nmResourceInfo1);
ResourcePluginManager pluginManager = createResourceManagerWithPlugins(
ImmutableMap.<String, ResourcePlugin>builder()
.put("resource-1", mockPlugin1)
.put("yarn.io/resource-1", mockPlugin1)
.put("resource-2", mock(ResourcePlugin.class))
.build()
);
nmContext.setResourcePluginManager(pluginManager);
}
private void setupMockPluginsWithGpuResourceInfo() throws YarnException {
GpuDeviceInformation gpuDeviceInformation = new GpuDeviceInformation();
gpuDeviceInformation.setDriverVersion("1.2.3");
gpuDeviceInformation.setGpus(Arrays.asList(new PerGpuDeviceInformation()));
ResourcePlugin mockPlugin1 = mock(ResourcePlugin.class);
List<GpuDevice> totalGpuDevices = Arrays.asList(
new GpuDevice(1, 1), new GpuDevice(2, 2), new GpuDevice(3, 3));
List<AssignedGpuDevice> assignedGpuDevices = Arrays.asList(
new AssignedGpuDevice(2, 2, createContainerId(1)),
new AssignedGpuDevice(3, 3, createContainerId(2)));
NMResourceInfo nmResourceInfo1 = new NMGpuResourceInfo(gpuDeviceInformation,
totalGpuDevices,
assignedGpuDevices);
when(mockPlugin1.getNMResourceInfo()).thenReturn(nmResourceInfo1);
ResourcePluginManager pluginManager = createResourceManagerWithPlugins(
ImmutableMap.<String, ResourcePlugin>builder()
.put("resource-1", mockPlugin1)
.put("yarn.io/resource-1", mockPlugin1)
.put("resource-2", mock(ResourcePlugin.class))
.build()
);
nmContext.setResourcePluginManager(pluginManager);
}
private ResourcePluginManager createResourceManagerWithPlugins(
Map<String, ResourcePlugin> plugins) {
ResourcePluginManager pluginManager = mock(ResourcePluginManager.class);
when(pluginManager.getNameToPlugins()).thenReturn(plugins);
return pluginManager;
}
private void assertNMResourceInfoResponse(ClientResponse response, long value)
throws JSONException {
assertEquals("MediaType of the response is not the expected!",
MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("Unexpected value in the json response!", (int) value,
json.get("a"));
}
private void assertEmptyNMResourceInfo(ClientResponse response) {
assertEquals("MediaType of the response is not the expected!",
MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("Unexpected value in the json response!",
0, json.length());
}
private ClientResponse getNMResourceResponse(WebResource resource,
String resourceName) {
return resource.path("ws").path("v1").path("node").path("resources")
.path(resourceName).accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
}
@Before
@Override
public void setUp() throws Exception {
super.setUp();
testRemoteLogDir.mkdir();
testRootDir.mkdirs();
testLogDir.mkdir();
GuiceServletConfig.setInjector(
Guice.createInjector(new WebServletModule()));
}
@AfterClass
static public void stop() {
FileUtil.fullyDelete(testRootDir);
FileUtil.fullyDelete(testLogDir);
FileUtil.fullyDelete(testRemoteLogDir);
}
public TestNMWebServices() {
super(new WebAppDescriptor.Builder(
"org.apache.hadoop.yarn.server.nodemanager.webapp")
.contextListenerClass(GuiceServletConfig.class)
.filterClass(com.google.inject.servlet.GuiceFilter.class)
.contextPath("jersey-guice-filter").servletPath("/").build());
}
@Test
public void testInvalidUri() throws JSONException, Exception {
WebResource r = resource();
String responseStr = "";
try {
responseStr = r.path("ws").path("v1").path("node").path("bogus")
.accept(MediaType.APPLICATION_JSON).get(String.class);
fail("should have thrown exception on invalid uri");
} catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse();
assertResponseStatusCode(Status.NOT_FOUND, response.getStatusInfo());
WebServicesTestUtils.checkStringMatch(
"error string exists and shouldn't", "", responseStr);
}
}
@Test
public void testInvalidAccept() throws JSONException, Exception {
WebResource r = resource();
String responseStr = "";
try {
responseStr = r.path("ws").path("v1").path("node")
.accept(MediaType.TEXT_PLAIN).get(String.class);
fail("should have thrown exception on invalid uri");
} catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse();
assertResponseStatusCode(Status.INTERNAL_SERVER_ERROR,
response.getStatusInfo());
WebServicesTestUtils.checkStringMatch(
"error string exists and shouldn't", "", responseStr);
}
}
@Test
public void testInvalidUri2() throws JSONException, Exception {
WebResource r = resource();
String responseStr = "";
try {
responseStr = r.accept(MediaType.APPLICATION_JSON).get(String.class);
fail("should have thrown exception on invalid uri");
} catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse();
assertResponseStatusCode(Status.NOT_FOUND, response.getStatusInfo());
WebServicesTestUtils.checkStringMatch(
"error string exists and shouldn't", "", responseStr);
}
}
@Test
public void testNode() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("node")
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
verifyNodeInfo(json);
}
@Test
public void testNodeSlash() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("node/")
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
verifyNodeInfo(json);
}
// make sure default is json output
@Test
public void testNodeDefault() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("node")
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
verifyNodeInfo(json);
}
@Test
public void testNodeInfo() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("node").path("info")
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
verifyNodeInfo(json);
}
@Test
public void testNodeInfoSlash() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("node")
.path("info/").accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
verifyNodeInfo(json);
}
// make sure default is json output
@Test
public void testNodeInfoDefault() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("node").path("info")
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
verifyNodeInfo(json);
}
@Test
public void testSingleNodesXML() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("node")
.path("info/").accept(MediaType.APPLICATION_XML)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML+ "; " + JettyUtils.UTF_8,
response.getType().toString());
String xml = response.getEntity(String.class);
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList nodes = dom.getElementsByTagName("nodeInfo");
assertEquals("incorrect number of elements", 1, nodes.getLength());
verifyNodesXML(nodes);
}
@Test (timeout = 5000)
public void testContainerLogsWithNewAPI() throws Exception {
ContainerId containerId0 = BuilderUtils.newContainerId(0, 0, 0, 0);
WebResource r0 = resource();
r0 = r0.path("ws").path("v1").path("node").path("containers")
.path(containerId0.toString()).path("logs");
testContainerLogs(r0, containerId0, LOG_MESSAGE);
ContainerId containerId1 = BuilderUtils.newContainerId(0, 0, 0, 1);
WebResource r1 = resource();
r1 = r1.path("ws").path("v1").path("node").path("containers")
.path(containerId1.toString()).path("logs");
testContainerLogs(r1, containerId1, "");
}
@Test (timeout = 5000)
public void testContainerLogsWithOldAPI() throws Exception {
final ContainerId containerId2 = BuilderUtils.newContainerId(1, 1, 0, 2);
WebResource r = resource();
r = r.path("ws").path("v1").path("node").path("containerlogs")
.path(containerId2.toString());
testContainerLogs(r, containerId2, LOG_MESSAGE);
}
@Test (timeout = 10000)
public void testNMRedirect() {
ApplicationId noExistAppId = ApplicationId.newInstance(
System.currentTimeMillis(), 2000);
ApplicationAttemptId noExistAttemptId = ApplicationAttemptId.newInstance(
noExistAppId, 150);
ContainerId noExistContainerId = ContainerId.newContainerId(
noExistAttemptId, 250);
String fileName = "syslog";
WebResource r = resource();
// check the old api
URI requestURI = r.path("ws").path("v1").path("node")
.path("containerlogs").path(noExistContainerId.toString())
.path(fileName).queryParam("user.name", "user")
.queryParam(YarnWebServiceParams.NM_ID, "localhost:1111")
.getURI();
String redirectURL = getRedirectURL(requestURI.toString());
assertTrue(redirectURL != null);
assertTrue(redirectURL.contains(LOGSERVICEWSADDR));
assertTrue(redirectURL.contains(noExistContainerId.toString()));
assertTrue(redirectURL.contains("/logs/" + fileName));
assertTrue(redirectURL.contains("user.name=" + "user"));
assertTrue(redirectURL.contains(
YarnWebServiceParams.REDIRECTED_FROM_NODE + "=true"));
assertFalse(redirectURL.contains(YarnWebServiceParams.NM_ID));
// check the new api
requestURI = r.path("ws").path("v1").path("node")
.path("containers").path(noExistContainerId.toString())
.path("logs").path(fileName).queryParam("user.name", "user")
.queryParam(YarnWebServiceParams.NM_ID, "localhost:1111")
.getURI();
redirectURL = getRedirectURL(requestURI.toString());
assertTrue(redirectURL != null);
assertTrue(redirectURL.contains(LOGSERVICEWSADDR));
assertTrue(redirectURL.contains(noExistContainerId.toString()));
assertTrue(redirectURL.contains("/logs/" + fileName));
assertTrue(redirectURL.contains("user.name=" + "user"));
assertTrue(redirectURL.contains(
YarnWebServiceParams.REDIRECTED_FROM_NODE + "=true"));
assertFalse(redirectURL.contains(YarnWebServiceParams.NM_ID));
requestURI = r.path("ws").path("v1").path("node")
.path("containers").path(noExistContainerId.toString())
.path("logs").queryParam("user.name", "user")
.queryParam(YarnWebServiceParams.NM_ID, "localhost:1111")
.getURI();
redirectURL = getRedirectURL(requestURI.toString());
assertTrue(redirectURL != null);
assertTrue(redirectURL.contains(LOGSERVICEWSADDR));
assertTrue(redirectURL.contains(noExistContainerId.toString()));
assertTrue(redirectURL.contains("user.name=" + "user"));
assertTrue(redirectURL.contains(
YarnWebServiceParams.REDIRECTED_FROM_NODE + "=true"));
assertFalse(redirectURL.contains(YarnWebServiceParams.NM_ID));
}
@Test
public void testGetNMResourceInfoSuccessful()
throws YarnException, JSONException {
setupMockPluginsWithNmResourceInfo();
WebResource r = resource();
ClientResponse response = getNMResourceResponse(r, "resource-1");
assertNMResourceInfoResponse(response, NM_RESOURCE_VALUE);
}
@Test
public void testGetNMResourceInfoEncodedIsSuccessful()
throws YarnException, JSONException {
setupMockPluginsWithNmResourceInfo();
//test encoded yarn.io/resource-1 path
WebResource r = resource();
ClientResponse response = getNMResourceResponse(r, "yarn.io%2Fresource-1");
assertNMResourceInfoResponse(response, NM_RESOURCE_VALUE);
}
@Test
public void testGetNMResourceInfoFailBecauseOfEmptyResourceInfo()
throws YarnException {
setupMockPluginsWithNmResourceInfo();
WebResource r = resource();
ClientResponse response = getNMResourceResponse(r, "resource-2");
assertEmptyNMResourceInfo(response);
}
@Test
public void testGetNMResourceInfoWhenPluginIsUnknown()
throws YarnException {
setupMockPluginsWithNmResourceInfo();
WebResource r = resource();
ClientResponse response = getNMResourceResponse(r, "resource-3");
assertEmptyNMResourceInfo(response);
}
private ContainerId createContainerId(int id) {
ApplicationId appId = ApplicationId.newInstance(0, 0);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 1);
return ContainerId.newContainerId(appAttemptId, id);
}
@Test
public void testGetYarnGpuResourceInfo()
throws YarnException, JSONException {
setupMockPluginsWithGpuResourceInfo();
WebResource r = resource();
ClientResponse response = getNMResourceResponse(r, "resource-1");
assertEquals("MediaType of the response is not the expected!",
MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("Unexpected driverVersion in the json response!",
"1.2.3",
json.getJSONObject("gpuDeviceInformation").get("driverVersion"));
assertEquals("Unexpected totalGpuDevices in the json response!",
3, json.getJSONArray("totalGpuDevices").length());
assertEquals("Unexpected assignedGpuDevices in the json response!",
2, json.getJSONArray("assignedGpuDevices").length());
}
private void testContainerLogs(WebResource r, ContainerId containerId,
String logMessage) throws Exception {
final String containerIdStr = containerId.toString();
final ApplicationAttemptId appAttemptId = containerId
.getApplicationAttemptId();
final ApplicationId appId = appAttemptId.getApplicationId();
final String appIdStr = appId.toString();
final String filename = "logfile1";
nmContext.getApplications().put(appId, new ApplicationImpl(null, "user",
appId, null, nmContext));
MockContainer container = new MockContainer(appAttemptId,
new AsyncDispatcher(), new Configuration(), "user", appId, 1);
container.setState(ContainerState.RUNNING);
nmContext.getContainers().put(containerId, container);
// write out log file
Path path = dirsHandler.getLogPathForWrite(
ContainerLaunch.getRelativeContainerLogDir(
appIdStr, containerIdStr) + "/" + filename, false);
File logFile = new File(path.toUri().getPath());
logFile.deleteOnExit();
if (logFile.getParentFile().exists()) {
FileUtils.deleteDirectory(logFile.getParentFile());
}
assertTrue("Failed to create log dir", logFile.getParentFile().mkdirs());
PrintWriter pw = new PrintWriter(logFile);
pw.print(logMessage);
pw.close();
// ask for it
ClientResponse response = r.path(filename)
.accept(MediaType.TEXT_PLAIN).get(ClientResponse.class);
String responseText = response.getEntity(String.class);
String responseLogMessage = getLogContext(responseText);
assertEquals(logMessage, responseLogMessage);
int fullTextSize = responseLogMessage.getBytes().length;
// specify how many bytes we should get from logs
// specify a position number, it would get the first n bytes from
// container log
response = r.path(filename)
.queryParam("size", "5")
.accept(MediaType.TEXT_PLAIN).get(ClientResponse.class);
responseText = response.getEntity(String.class);
responseLogMessage = getLogContext(responseText);
int truncatedLength = Math.min(5, logMessage.getBytes().length);
assertEquals(truncatedLength, responseLogMessage.getBytes().length);
assertEquals(new String(logMessage.getBytes(), 0, truncatedLength),
responseLogMessage);
assertTrue(fullTextSize >= responseLogMessage.getBytes().length);
// specify the bytes which is larger than the actual file size,
// we would get the full logs
response = r.path(filename)
.queryParam("size", "10000")
.accept(MediaType.TEXT_PLAIN).get(ClientResponse.class);
responseText = response.getEntity(String.class);
responseLogMessage = getLogContext(responseText);
assertEquals(fullTextSize, responseLogMessage.getBytes().length);
assertEquals(logMessage, responseLogMessage);
// specify a negative number, it would get the last n bytes from
// container log
response = r.path(filename)
.queryParam("size", "-5")
.accept(MediaType.TEXT_PLAIN).get(ClientResponse.class);
responseText = response.getEntity(String.class);
responseLogMessage = getLogContext(responseText);
assertEquals(truncatedLength, responseLogMessage.getBytes().length);
assertEquals(new String(logMessage.getBytes(),
logMessage.getBytes().length - truncatedLength, truncatedLength),
responseLogMessage);
assertTrue(fullTextSize >= responseLogMessage.getBytes().length);
response = r.path(filename)
.queryParam("size", "-10000")
.accept(MediaType.TEXT_PLAIN).get(ClientResponse.class);
responseText = response.getEntity(String.class);
responseLogMessage = getLogContext(responseText);
assertEquals("text/plain; charset=utf-8", response.getType().toString());
assertEquals(fullTextSize, responseLogMessage.getBytes().length);
assertEquals(logMessage, responseLogMessage);
// ask and download it
response = r.path(filename)
.queryParam("format", "octet-stream")
.accept(MediaType.TEXT_PLAIN).get(ClientResponse.class);
responseText = response.getEntity(String.class);
responseLogMessage = getLogContext(responseText);
assertEquals(logMessage, responseLogMessage);
assertEquals(200, response.getStatus());
assertEquals("application/octet-stream; charset=utf-8",
response.getType().toString());
// specify a invalid format value
response = r.path(filename)
.queryParam("format", "123")
.accept(MediaType.TEXT_PLAIN).get(ClientResponse.class);
responseText = response.getEntity(String.class);
assertEquals("The valid values for the parameter : format are "
+ WebAppUtils.listSupportedLogContentType(), responseText);
assertEquals(400, response.getStatus());
// ask for file that doesn't exist and it will re-direct to
// the log server
URI requestURI = r.path("uhhh").getURI();
String redirectURL = getRedirectURL(requestURI.toString());
assertTrue(redirectURL != null);
assertTrue(redirectURL.contains(LOGSERVICEWSADDR));
// Get container log files' name
WebResource r1 = resource();
response = r1.path("ws").path("v1").path("node")
.path("containers").path(containerIdStr)
.path("logs").accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
assertEquals(200, response.getStatus());
List<ContainerLogsInfo> responseList = response.getEntity(new GenericType<
List<ContainerLogsInfo>>(){});
assertTrue(responseList.size() == 1);
assertEquals(responseList.get(0).getLogType(),
ContainerLogAggregationType.LOCAL.toString());
List<ContainerLogFileInfo> logMeta = responseList.get(0)
.getContainerLogsInfo();
assertTrue(logMeta.size() == 1);
assertThat(logMeta.get(0).getFileName()).isEqualTo(filename);
// now create an aggregated log in Remote File system
File tempLogDir = new File("target",
TestNMWebServices.class.getSimpleName() + "temp-log-dir");
try {
String aggregatedLogFile = filename + "-aggregated";
String aggregatedLogMessage = "This is aggregated ;og.";
TestContainerLogsUtils.createContainerLogFileInRemoteFS(
nmContext.getConf(), FileSystem.get(nmContext.getConf()),
tempLogDir.getAbsolutePath(), containerId, nmContext.getNodeId(),
aggregatedLogFile, "user", aggregatedLogMessage, true);
r1 = resource();
response = r1.path("ws").path("v1").path("node")
.path("containers").path(containerIdStr)
.path("logs").accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
assertEquals(200, response.getStatus());
responseList = response.getEntity(new GenericType<
List<ContainerLogsInfo>>(){});
assertThat(responseList).hasSize(2);
for (ContainerLogsInfo logInfo : responseList) {
if(logInfo.getLogType().equals(
ContainerLogAggregationType.AGGREGATED.toString())) {
List<ContainerLogFileInfo> meta = logInfo.getContainerLogsInfo();
assertTrue(meta.size() == 1);
assertThat(meta.get(0).getFileName()).isEqualTo(aggregatedLogFile);
} else {
assertEquals(logInfo.getLogType(),
ContainerLogAggregationType.LOCAL.toString());
List<ContainerLogFileInfo> meta = logInfo.getContainerLogsInfo();
assertTrue(meta.size() == 1);
assertThat(meta.get(0).getFileName()).isEqualTo(filename);
}
}
// Test whether we could get aggregated log as well
TestContainerLogsUtils.createContainerLogFileInRemoteFS(
nmContext.getConf(), FileSystem.get(nmContext.getConf()),
tempLogDir.getAbsolutePath(), containerId, nmContext.getNodeId(),
filename, "user", aggregatedLogMessage, true);
response = r.path(filename)
.accept(MediaType.TEXT_PLAIN).get(ClientResponse.class);
responseText = response.getEntity(String.class);
assertTrue(responseText.contains("LogAggregationType: "
+ ContainerLogAggregationType.AGGREGATED));
assertTrue(responseText.contains(aggregatedLogMessage));
assertTrue(responseText.contains("LogAggregationType: "
+ ContainerLogAggregationType.LOCAL));
assertTrue(responseText.contains(logMessage));
} finally {
FileUtil.fullyDelete(tempLogDir);
}
// After container is completed, it is removed from nmContext
nmContext.getContainers().remove(containerId);
assertNull(nmContext.getContainers().get(containerId));
response =
r.path(filename).accept(MediaType.TEXT_PLAIN)
.get(ClientResponse.class);
responseText = response.getEntity(String.class);
assertTrue(responseText.contains(logMessage));
}
public void verifyNodesXML(NodeList nodes) throws JSONException, Exception {
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
verifyNodeInfoGeneric(WebServicesTestUtils.getXmlString(element, "id"),
WebServicesTestUtils.getXmlString(element, "healthReport"),
WebServicesTestUtils.getXmlLong(element,
"totalVmemAllocatedContainersMB"),
WebServicesTestUtils.getXmlLong(element,
"totalPmemAllocatedContainersMB"),
WebServicesTestUtils.getXmlLong(element,
"totalVCoresAllocatedContainers"),
WebServicesTestUtils.getXmlBoolean(element, "vmemCheckEnabled"),
WebServicesTestUtils.getXmlBoolean(element, "pmemCheckEnabled"),
WebServicesTestUtils.getXmlLong(element, "lastNodeUpdateTime"),
WebServicesTestUtils.getXmlBoolean(element, "nodeHealthy"),
WebServicesTestUtils.getXmlString(element, "nodeHostName"),
WebServicesTestUtils.getXmlString(element, "hadoopVersionBuiltOn"),
WebServicesTestUtils.getXmlString(element, "hadoopBuildVersion"),
WebServicesTestUtils.getXmlString(element, "hadoopVersion"),
WebServicesTestUtils.getXmlString(element,
"nodeManagerVersionBuiltOn"), WebServicesTestUtils.getXmlString(
element, "nodeManagerBuildVersion"),
WebServicesTestUtils.getXmlString(element, "nodeManagerVersion"),
WebServicesTestUtils.getXmlString(element, "resourceTypes"));
}
}
public void verifyNodeInfo(JSONObject json) throws JSONException, Exception {
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("nodeInfo");
assertEquals("incorrect number of elements", 18, info.length());
verifyNodeInfoGeneric(info.getString("id"), info.getString("healthReport"),
info.getLong("totalVmemAllocatedContainersMB"),
info.getLong("totalPmemAllocatedContainersMB"),
info.getLong("totalVCoresAllocatedContainers"),
info.getBoolean("vmemCheckEnabled"),
info.getBoolean("pmemCheckEnabled"),
info.getLong("lastNodeUpdateTime"), info.getBoolean("nodeHealthy"),
info.getString("nodeHostName"), info.getString("hadoopVersionBuiltOn"),
info.getString("hadoopBuildVersion"), info.getString("hadoopVersion"),
info.getString("nodeManagerVersionBuiltOn"),
info.getString("nodeManagerBuildVersion"),
info.getString("nodeManagerVersion"),
info.getString("resourceTypes")
);
}
public void verifyNodeInfoGeneric(String id, String healthReport,
long totalVmemAllocatedContainersMB, long totalPmemAllocatedContainersMB,
long totalVCoresAllocatedContainers,
boolean vmemCheckEnabled, boolean pmemCheckEnabled,
long lastNodeUpdateTime, Boolean nodeHealthy, String nodeHostName,
String hadoopVersionBuiltOn, String hadoopBuildVersion,
String hadoopVersion, String resourceManagerVersionBuiltOn,
String resourceManagerBuildVersion, String resourceManagerVersion,
String resourceTypes) {
WebServicesTestUtils.checkStringMatch("id", "testhost.foo.com:8042", id);
WebServicesTestUtils.checkStringMatch("healthReport", "Healthy",
healthReport);
assertEquals("totalVmemAllocatedContainersMB incorrect", 15872,
totalVmemAllocatedContainersMB);
assertEquals("totalPmemAllocatedContainersMB incorrect", 16384,
totalPmemAllocatedContainersMB);
assertEquals("totalVCoresAllocatedContainers incorrect", 4000,
totalVCoresAllocatedContainers);
assertEquals("vmemCheckEnabled incorrect", true, vmemCheckEnabled);
assertEquals("pmemCheckEnabled incorrect", true, pmemCheckEnabled);
assertTrue("lastNodeUpdateTime incorrect", lastNodeUpdateTime == nmContext
.getNodeHealthStatus().getLastHealthReportTime());
assertTrue("nodeHealthy isn't true", nodeHealthy);
WebServicesTestUtils.checkStringMatch("nodeHostName", "testhost.foo.com",
nodeHostName);
WebServicesTestUtils.checkStringMatch("hadoopVersionBuiltOn",
VersionInfo.getDate(), hadoopVersionBuiltOn);
WebServicesTestUtils.checkStringEqual("hadoopBuildVersion",
VersionInfo.getBuildVersion(), hadoopBuildVersion);
WebServicesTestUtils.checkStringMatch("hadoopVersion",
VersionInfo.getVersion(), hadoopVersion);
WebServicesTestUtils.checkStringMatch("resourceManagerVersionBuiltOn",
YarnVersionInfo.getDate(), resourceManagerVersionBuiltOn);
WebServicesTestUtils.checkStringEqual("resourceManagerBuildVersion",
YarnVersionInfo.getBuildVersion(), resourceManagerBuildVersion);
WebServicesTestUtils.checkStringMatch("resourceManagerVersion",
YarnVersionInfo.getVersion(), resourceManagerVersion);
assertEquals("memory-mb (unit=Mi), vcores", resourceTypes);
}
private String getLogContext(String fullMessage) {
String prefix = "LogContents:\n";
String postfix = "End of LogType:";
int prefixIndex = fullMessage.indexOf(prefix) + prefix.length();
int postfixIndex = fullMessage.indexOf(postfix);
return fullMessage.substring(prefixIndex, postfixIndex);
}
private static String getRedirectURL(String url) {
String redirectUrl = null;
try {
HttpURLConnection conn = (HttpURLConnection) new URL(url)
.openConnection();
// do not automatically follow the redirection
// otherwise we get too many redirections exception
conn.setInstanceFollowRedirects(false);
if(conn.getResponseCode() == HttpServletResponse.SC_TEMPORARY_REDIRECT) {
redirectUrl = conn.getHeaderField("Location");
}
} catch (Exception e) {
// throw new RuntimeException(e);
}
return redirectUrl;
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.manager.oracle;
import org.apache.hadoop.conf.Configuration;
import org.apache.sqoop.manager.oracle.OraOopConstants.
OraOopOracleDataChunkMethod;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
/**
* Test import data from Oracle.
*/
public class ImportTest extends OraOopTestCase {
private static final boolean DISABLE_ORACLE_ESCAPING_FLAG = false;
@Test
public void testProductImport() throws Exception {
setSqoopTargetDirectory(getSqoopTargetDirectory() + "tst_product");
createTable("table_tst_product.xml");
try {
int retCode = runImport("TST_PRODUCT", getSqoopConf(), false, DISABLE_ORACLE_ESCAPING_FLAG);
assertEquals("Return code should be 0", 0, retCode);
} finally {
cleanupFolders();
closeTestEnvConnection();
}
}
@Test
public void testProductWithWhiteSpaceImport() throws Exception {
setSqoopTargetDirectory(getSqoopTargetDirectory() + "tst_product");
createTable("table_tst_product_with_white_space.xml");
try {
int retCode = runImport("TST_Pr OdUCT", getSqoopConf(), false, DISABLE_ORACLE_ESCAPING_FLAG);
assertEquals("Return code should be 0", 0, retCode);
} finally {
cleanupFolders();
closeTestEnvConnection();
}
}
@Test
public void testProductPartImport() throws Exception {
setSqoopTargetDirectory(getSqoopTargetDirectory() + "tst_product_part");
createTable("table_tst_product_part.xml");
try {
int retCode = runImport("TST_PRODUCT_PART", getSqoopConf(), false, DISABLE_ORACLE_ESCAPING_FLAG);
assertEquals("Return code should be 0", 0, retCode);
} finally {
cleanupFolders();
closeTestEnvConnection();
}
}
@Test
public void testProductPartImportPartitionChunk() throws Exception {
setSqoopTargetDirectory(getSqoopTargetDirectory() + "tst_product_part");
createTable("table_tst_product_part.xml");
Configuration sqoopConf = getSqoopConf();
sqoopConf.set(OraOopConstants.ORAOOP_ORACLE_DATA_CHUNK_METHOD,
OraOopConstants.OraOopOracleDataChunkMethod.PARTITION.toString());
try {
int retCode = runImport("TST_PRODUCT_PART", sqoopConf, false, DISABLE_ORACLE_ESCAPING_FLAG);
assertEquals("Return code should be 0", 0, retCode);
} finally {
cleanupFolders();
closeTestEnvConnection();
}
}
@Test
public void testProductPartImportSubset() throws Exception {
setSqoopTargetDirectory(getSqoopTargetDirectory() + "tst_product_part");
createTable("table_tst_product_part.xml");
Configuration sqoopConf = getSqoopConf();
sqoopConf.set(OraOopConstants.ORAOOP_ORACLE_DATA_CHUNK_METHOD,
OraOopOracleDataChunkMethod.ROWID.toString());
sqoopConf.set(OraOopConstants.ORAOOP_IMPORT_PARTITION_LIST,
"tst_product_part_1,tst_product_part_2,\"tst_product_pa#rt_6\"");
try {
int retCode = runImport("TST_PRODUCT_PART", sqoopConf, false, DISABLE_ORACLE_ESCAPING_FLAG);
assertEquals("Return code should be 0", 0, retCode);
} finally {
cleanupFolders();
closeTestEnvConnection();
}
}
@Test
public void testProductPartImportSubsetPartitionChunk() throws Exception {
setSqoopTargetDirectory(getSqoopTargetDirectory() + "tst_product_part");
createTable("table_tst_product_part.xml");
Configuration sqoopConf = getSqoopConf();
sqoopConf.set(OraOopConstants.ORAOOP_ORACLE_DATA_CHUNK_METHOD,
OraOopOracleDataChunkMethod.PARTITION.toString());
sqoopConf
.set(
OraOopConstants.ORAOOP_IMPORT_PARTITION_LIST,
"tst_product_part_1,tst_product_part_2,"
+"tst_product_part_3,\"tst_product_pa#rt_6\"");
try {
int retCode = runImport("TST_PRODUCT_PART", sqoopConf, false, DISABLE_ORACLE_ESCAPING_FLAG);
assertEquals("Return code should be 0", 0, retCode);
} finally {
cleanupFolders();
closeTestEnvConnection();
}
}
@Test
public void testProductSubPartImport() throws Exception {
setSqoopTargetDirectory(getSqoopTargetDirectory() + "tst_product_subpart");
createTable("table_tst_product_subpart.xml");
try {
int retCode = runImport("TST_PRODUCT_SUBPART", getSqoopConf(), false, DISABLE_ORACLE_ESCAPING_FLAG);
assertEquals("Return code should be 0", 0, retCode);
} finally {
cleanupFolders();
closeTestEnvConnection();
}
}
@Test
public void testProductSubPartImportPartitionChunk() throws Exception {
setSqoopTargetDirectory(getSqoopTargetDirectory() + "tst_product_subpart");
createTable("table_tst_product_subpart.xml");
Configuration sqoopConf = getSqoopConf();
sqoopConf.set(OraOopConstants.ORAOOP_ORACLE_DATA_CHUNK_METHOD,
OraOopConstants.OraOopOracleDataChunkMethod.PARTITION.toString());
try {
int retCode = runImport("TST_PRODUCT_SUBPART", sqoopConf, false, DISABLE_ORACLE_ESCAPING_FLAG);
assertEquals("Return code should be 0", 0, retCode);
} finally {
cleanupFolders();
closeTestEnvConnection();
}
}
@Test
public void testProductSubPartImportSubset() throws Exception {
setSqoopTargetDirectory(getSqoopTargetDirectory() + "tst_product_subpart");
createTable("table_tst_product_subpart.xml");
Configuration sqoopConf = getSqoopConf();
sqoopConf.set(OraOopConstants.ORAOOP_ORACLE_DATA_CHUNK_METHOD,
OraOopOracleDataChunkMethod.ROWID.toString());
sqoopConf
.set(OraOopConstants.ORAOOP_IMPORT_PARTITION_LIST,
"TST_PRODUCT_PART_1,TST_PRODUCT_PART_2,"
+"TST_PRODUCT_PART_3,TST_PRODUCT_PART_4");
try {
int retCode = runImport("TST_PRODUCT_SUBPART", sqoopConf, false, DISABLE_ORACLE_ESCAPING_FLAG);
assertEquals("Return code should be 0", 0, retCode);
} finally {
cleanupFolders();
closeTestEnvConnection();
}
}
@Test
public void testProductSubPartImportSubsetPartitionChunk() throws Exception {
setSqoopTargetDirectory(getSqoopTargetDirectory() + "tst_product_subpart");
createTable("table_tst_product_subpart.xml");
Configuration sqoopConf = getSqoopConf();
sqoopConf.set(OraOopConstants.ORAOOP_ORACLE_DATA_CHUNK_METHOD,
OraOopConstants.OraOopOracleDataChunkMethod.PARTITION.toString());
sqoopConf.set(OraOopConstants.ORAOOP_IMPORT_PARTITION_LIST,
"TST_PRODUCT_PART_1,TST_PRODUCT_PART_2,TST_PRODUCT_PART_3");
try {
int retCode = runImport("TST_PRODUCT_SUBPART", sqoopConf, false, DISABLE_ORACLE_ESCAPING_FLAG);
assertEquals("Return code should be 0", 0, retCode);
} finally {
cleanupFolders();
closeTestEnvConnection();
}
}
@Test
public void testProductImportConsistentRead() throws Exception {
setSqoopTargetDirectory(getSqoopTargetDirectory() + "tst_product");
createTable("table_tst_product.xml");
// Make sure Oracle SCN has updated since creating table
Thread.sleep(10000);
Configuration sqoopConf = getSqoopConf();
sqoopConf.setBoolean(OraOopConstants.ORAOOP_IMPORT_CONSISTENT_READ, true);
try {
int retCode = runImport("TST_PRODUCT", sqoopConf, false, DISABLE_ORACLE_ESCAPING_FLAG);
assertEquals("Return code should be 0", 0, retCode);
} finally {
cleanupFolders();
closeTestEnvConnection();
}
}
@Test
public void testProductImportMixedCaseTableName() throws Exception {
setSqoopTargetDirectory(getSqoopTargetDirectory() + "T5+_Pr#duct");
createTable("table_tst_product_special_chars.xml");
try {
int retCode = runImport("\"\"T5+_Pr#duct\"\"", getSqoopConf(), false, DISABLE_ORACLE_ESCAPING_FLAG);
assertEquals("Return code should be 0", 0, retCode);
} finally {
cleanupFolders();
closeTestEnvConnection();
}
}
@Test
public void testProductPartIotImport() throws Exception {
setSqoopTargetDirectory(getSqoopTargetDirectory() + "tst_product_part");
createTable("table_tst_product_part_iot.xml");
Configuration sqoopConf = getSqoopConf();
sqoopConf.set(OraOopConstants.ORAOOP_ORACLE_DATA_CHUNK_METHOD,
OraOopConstants.OraOopOracleDataChunkMethod.PARTITION.toString());
try {
int retCode = runImport("TST_PRODUCT_PART_IOT", sqoopConf, false, DISABLE_ORACLE_ESCAPING_FLAG);
assertEquals("Return code should be 0", 0, retCode);
} finally {
cleanupFolders();
closeTestEnvConnection();
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.clients;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.protocol.Errors;
import org.apache.kafka.common.record.MemoryRecords;
import org.apache.kafka.common.requests.FetchRequest;
import org.apache.kafka.common.requests.FetchResponse;
import org.apache.kafka.common.utils.LogContext;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.Timeout;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.TreeSet;
import static org.apache.kafka.common.requests.FetchMetadata.INITIAL_EPOCH;
import static org.apache.kafka.common.requests.FetchMetadata.INVALID_SESSION_ID;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* A unit test for FetchSessionHandler.
*/
public class FetchSessionHandlerTest {
@Rule
final public Timeout globalTimeout = Timeout.millis(120000);
private static final LogContext LOG_CONTEXT = new LogContext("[FetchSessionHandler]=");
/**
* Create a set of TopicPartitions. We use a TreeSet, in order to get a deterministic
* ordering for test purposes.
*/
private static Set<TopicPartition> toSet(TopicPartition... arr) {
TreeSet<TopicPartition> set = new TreeSet<>(new Comparator<TopicPartition>() {
@Override
public int compare(TopicPartition o1, TopicPartition o2) {
return o1.toString().compareTo(o2.toString());
}
});
set.addAll(Arrays.asList(arr));
return set;
}
@Test
public void testFindMissing() {
TopicPartition foo0 = new TopicPartition("foo", 0);
TopicPartition foo1 = new TopicPartition("foo", 1);
TopicPartition bar0 = new TopicPartition("bar", 0);
TopicPartition bar1 = new TopicPartition("bar", 1);
TopicPartition baz0 = new TopicPartition("baz", 0);
TopicPartition baz1 = new TopicPartition("baz", 1);
assertEquals(toSet(), FetchSessionHandler.findMissing(toSet(foo0), toSet(foo0)));
assertEquals(toSet(foo0), FetchSessionHandler.findMissing(toSet(foo0), toSet(foo1)));
assertEquals(toSet(foo0, foo1),
FetchSessionHandler.findMissing(toSet(foo0, foo1), toSet(baz0)));
assertEquals(toSet(bar1, foo0, foo1),
FetchSessionHandler.findMissing(toSet(foo0, foo1, bar0, bar1),
toSet(bar0, baz0, baz1)));
assertEquals(toSet(),
FetchSessionHandler.findMissing(toSet(foo0, foo1, bar0, bar1, baz1),
toSet(foo0, foo1, bar0, bar1, baz0, baz1)));
}
private static final class ReqEntry {
final TopicPartition part;
final FetchRequest.PartitionData data;
ReqEntry(String topic, int partition, long fetchOffset, long logStartOffset, int maxBytes) {
this.part = new TopicPartition(topic, partition);
this.data = new FetchRequest.PartitionData(fetchOffset, logStartOffset, maxBytes, Optional.empty());
}
}
private static LinkedHashMap<TopicPartition, FetchRequest.PartitionData> reqMap(ReqEntry... entries) {
LinkedHashMap<TopicPartition, FetchRequest.PartitionData> map = new LinkedHashMap<>();
for (ReqEntry entry : entries) {
map.put(entry.part, entry.data);
}
return map;
}
private static void assertMapEquals(Map<TopicPartition, FetchRequest.PartitionData> expected,
Map<TopicPartition, FetchRequest.PartitionData> actual) {
Iterator<Map.Entry<TopicPartition, FetchRequest.PartitionData>> expectedIter =
expected.entrySet().iterator();
Iterator<Map.Entry<TopicPartition, FetchRequest.PartitionData>> actualIter =
actual.entrySet().iterator();
int i = 1;
while (expectedIter.hasNext()) {
Map.Entry<TopicPartition, FetchRequest.PartitionData> expectedEntry = expectedIter.next();
if (!actualIter.hasNext()) {
fail("Element " + i + " not found.");
}
Map.Entry<TopicPartition, FetchRequest.PartitionData> actuaLEntry = actualIter.next();
assertEquals("Element " + i + " had a different TopicPartition than expected.",
expectedEntry.getKey(), actuaLEntry.getKey());
assertEquals("Element " + i + " had different PartitionData than expected.",
expectedEntry.getValue(), actuaLEntry.getValue());
i++;
}
if (expectedIter.hasNext()) {
fail("Unexpected element " + i + " found.");
}
}
@SafeVarargs
private static void assertMapsEqual(Map<TopicPartition, FetchRequest.PartitionData> expected,
Map<TopicPartition, FetchRequest.PartitionData>... actuals) {
for (Map<TopicPartition, FetchRequest.PartitionData> actual : actuals) {
assertMapEquals(expected, actual);
}
}
private static void assertListEquals(List<TopicPartition> expected, List<TopicPartition> actual) {
for (TopicPartition expectedPart : expected) {
if (!actual.contains(expectedPart)) {
fail("Failed to find expected partition " + expectedPart);
}
}
for (TopicPartition actualPart : actual) {
if (!expected.contains(actualPart)) {
fail("Found unexpected partition " + actualPart);
}
}
}
private static final class RespEntry {
final TopicPartition part;
final FetchResponse.PartitionData<MemoryRecords> data;
RespEntry(String topic, int partition, long highWatermark, long lastStableOffset) {
this.part = new TopicPartition(topic, partition);
this.data = new FetchResponse.PartitionData<>(
Errors.NONE,
highWatermark,
lastStableOffset,
0,
null,
null);
}
}
private static LinkedHashMap<TopicPartition, FetchResponse.PartitionData<MemoryRecords>> respMap(RespEntry... entries) {
LinkedHashMap<TopicPartition, FetchResponse.PartitionData<MemoryRecords>> map = new LinkedHashMap<>();
for (RespEntry entry : entries) {
map.put(entry.part, entry.data);
}
return map;
}
/**
* Test the handling of SESSIONLESS responses.
* Pre-KIP-227 brokers always supply this kind of response.
*/
@Test
public void testSessionless() {
FetchSessionHandler handler = new FetchSessionHandler(LOG_CONTEXT, 1);
FetchSessionHandler.Builder builder = handler.newBuilder();
builder.add(new TopicPartition("foo", 0),
new FetchRequest.PartitionData(0, 100, 200, Optional.empty()));
builder.add(new TopicPartition("foo", 1),
new FetchRequest.PartitionData(10, 110, 210, Optional.empty()));
FetchSessionHandler.FetchRequestData data = builder.build();
assertMapsEqual(reqMap(new ReqEntry("foo", 0, 0, 100, 200),
new ReqEntry("foo", 1, 10, 110, 210)),
data.toSend(), data.sessionPartitions());
assertEquals(INVALID_SESSION_ID, data.metadata().sessionId());
assertEquals(INITIAL_EPOCH, data.metadata().epoch());
FetchResponse<MemoryRecords> resp = new FetchResponse<>(Errors.NONE,
respMap(new RespEntry("foo", 0, 0, 0),
new RespEntry("foo", 1, 0, 0)),
0, INVALID_SESSION_ID);
handler.handleResponse(resp);
FetchSessionHandler.Builder builder2 = handler.newBuilder();
builder2.add(new TopicPartition("foo", 0),
new FetchRequest.PartitionData(0, 100, 200, Optional.empty()));
FetchSessionHandler.FetchRequestData data2 = builder2.build();
assertEquals(INVALID_SESSION_ID, data2.metadata().sessionId());
assertEquals(INITIAL_EPOCH, data2.metadata().epoch());
assertMapsEqual(reqMap(new ReqEntry("foo", 0, 0, 100, 200)),
data.toSend(), data.sessionPartitions());
}
/**
* Test handling an incremental fetch session.
*/
@Test
public void testIncrementals() {
FetchSessionHandler handler = new FetchSessionHandler(LOG_CONTEXT, 1);
FetchSessionHandler.Builder builder = handler.newBuilder();
builder.add(new TopicPartition("foo", 0),
new FetchRequest.PartitionData(0, 100, 200, Optional.empty()));
builder.add(new TopicPartition("foo", 1),
new FetchRequest.PartitionData(10, 110, 210, Optional.empty()));
FetchSessionHandler.FetchRequestData data = builder.build();
assertMapsEqual(reqMap(new ReqEntry("foo", 0, 0, 100, 200),
new ReqEntry("foo", 1, 10, 110, 210)),
data.toSend(), data.sessionPartitions());
assertEquals(INVALID_SESSION_ID, data.metadata().sessionId());
assertEquals(INITIAL_EPOCH, data.metadata().epoch());
FetchResponse<MemoryRecords> resp = new FetchResponse<>(Errors.NONE,
respMap(new RespEntry("foo", 0, 10, 20),
new RespEntry("foo", 1, 10, 20)),
0, 123);
handler.handleResponse(resp);
// Test an incremental fetch request which adds one partition and modifies another.
FetchSessionHandler.Builder builder2 = handler.newBuilder();
builder2.add(new TopicPartition("foo", 0),
new FetchRequest.PartitionData(0, 100, 200, Optional.empty()));
builder2.add(new TopicPartition("foo", 1),
new FetchRequest.PartitionData(10, 120, 210, Optional.empty()));
builder2.add(new TopicPartition("bar", 0),
new FetchRequest.PartitionData(20, 200, 200, Optional.empty()));
FetchSessionHandler.FetchRequestData data2 = builder2.build();
assertFalse(data2.metadata().isFull());
assertMapEquals(reqMap(new ReqEntry("foo", 0, 0, 100, 200),
new ReqEntry("foo", 1, 10, 120, 210),
new ReqEntry("bar", 0, 20, 200, 200)),
data2.sessionPartitions());
assertMapEquals(reqMap(new ReqEntry("bar", 0, 20, 200, 200),
new ReqEntry("foo", 1, 10, 120, 210)),
data2.toSend());
FetchResponse<MemoryRecords> resp2 = new FetchResponse<>(Errors.NONE,
respMap(new RespEntry("foo", 1, 20, 20)),
0, 123);
handler.handleResponse(resp2);
// Skip building a new request. Test that handling an invalid fetch session epoch response results
// in a request which closes the session.
FetchResponse<MemoryRecords> resp3 = new FetchResponse<>(Errors.INVALID_FETCH_SESSION_EPOCH, respMap(),
0, INVALID_SESSION_ID);
handler.handleResponse(resp3);
FetchSessionHandler.Builder builder4 = handler.newBuilder();
builder4.add(new TopicPartition("foo", 0),
new FetchRequest.PartitionData(0, 100, 200, Optional.empty()));
builder4.add(new TopicPartition("foo", 1),
new FetchRequest.PartitionData(10, 120, 210, Optional.empty()));
builder4.add(new TopicPartition("bar", 0),
new FetchRequest.PartitionData(20, 200, 200, Optional.empty()));
FetchSessionHandler.FetchRequestData data4 = builder4.build();
assertTrue(data4.metadata().isFull());
assertEquals(data2.metadata().sessionId(), data4.metadata().sessionId());
assertEquals(INITIAL_EPOCH, data4.metadata().epoch());
assertMapsEqual(reqMap(new ReqEntry("foo", 0, 0, 100, 200),
new ReqEntry("foo", 1, 10, 120, 210),
new ReqEntry("bar", 0, 20, 200, 200)),
data4.sessionPartitions(), data4.toSend());
}
/**
* Test that calling FetchSessionHandler#Builder#build twice fails.
*/
@Test
public void testDoubleBuild() {
FetchSessionHandler handler = new FetchSessionHandler(LOG_CONTEXT, 1);
FetchSessionHandler.Builder builder = handler.newBuilder();
builder.add(new TopicPartition("foo", 0),
new FetchRequest.PartitionData(0, 100, 200, Optional.empty()));
builder.build();
try {
builder.build();
fail("Expected calling build twice to fail.");
} catch (Throwable t) {
// expected
}
}
@Test
public void testIncrementalPartitionRemoval() {
FetchSessionHandler handler = new FetchSessionHandler(LOG_CONTEXT, 1);
FetchSessionHandler.Builder builder = handler.newBuilder();
builder.add(new TopicPartition("foo", 0),
new FetchRequest.PartitionData(0, 100, 200, Optional.empty()));
builder.add(new TopicPartition("foo", 1),
new FetchRequest.PartitionData(10, 110, 210, Optional.empty()));
builder.add(new TopicPartition("bar", 0),
new FetchRequest.PartitionData(20, 120, 220, Optional.empty()));
FetchSessionHandler.FetchRequestData data = builder.build();
assertMapsEqual(reqMap(new ReqEntry("foo", 0, 0, 100, 200),
new ReqEntry("foo", 1, 10, 110, 210),
new ReqEntry("bar", 0, 20, 120, 220)),
data.toSend(), data.sessionPartitions());
assertTrue(data.metadata().isFull());
FetchResponse<MemoryRecords> resp = new FetchResponse<>(Errors.NONE,
respMap(new RespEntry("foo", 0, 10, 20),
new RespEntry("foo", 1, 10, 20),
new RespEntry("bar", 0, 10, 20)),
0, 123);
handler.handleResponse(resp);
// Test an incremental fetch request which removes two partitions.
FetchSessionHandler.Builder builder2 = handler.newBuilder();
builder2.add(new TopicPartition("foo", 1),
new FetchRequest.PartitionData(10, 110, 210, Optional.empty()));
FetchSessionHandler.FetchRequestData data2 = builder2.build();
assertFalse(data2.metadata().isFull());
assertEquals(123, data2.metadata().sessionId());
assertEquals(1, data2.metadata().epoch());
assertMapEquals(reqMap(new ReqEntry("foo", 1, 10, 110, 210)),
data2.sessionPartitions());
assertMapEquals(reqMap(), data2.toSend());
ArrayList<TopicPartition> expectedToForget2 = new ArrayList<>();
expectedToForget2.add(new TopicPartition("foo", 0));
expectedToForget2.add(new TopicPartition("bar", 0));
assertListEquals(expectedToForget2, data2.toForget());
// A FETCH_SESSION_ID_NOT_FOUND response triggers us to close the session.
// The next request is a session establishing FULL request.
FetchResponse<MemoryRecords> resp2 = new FetchResponse<>(Errors.FETCH_SESSION_ID_NOT_FOUND,
respMap(), 0, INVALID_SESSION_ID);
handler.handleResponse(resp2);
FetchSessionHandler.Builder builder3 = handler.newBuilder();
builder3.add(new TopicPartition("foo", 0),
new FetchRequest.PartitionData(0, 100, 200, Optional.empty()));
FetchSessionHandler.FetchRequestData data3 = builder3.build();
assertTrue(data3.metadata().isFull());
assertEquals(INVALID_SESSION_ID, data3.metadata().sessionId());
assertEquals(INITIAL_EPOCH, data3.metadata().epoch());
assertMapsEqual(reqMap(new ReqEntry("foo", 0, 0, 100, 200)),
data3.sessionPartitions(), data3.toSend());
}
}
|
|
/*
Copyright (c) 2010, NullNoname
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of NullNoname nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
package mu.nu.nullpo.gui.slick;
import java.io.File;
import java.io.FileInputStream;
import java.io.FilenameFilter;
import java.util.Arrays;
import java.util.LinkedList;
import mu.nu.nullpo.util.CustomProperties;
import org.newdawn.slick.GameContainer;
import org.newdawn.slick.Graphics;
import org.newdawn.slick.SlickException;
import org.newdawn.slick.state.StateBasedGame;
/**
* Rule selector state
*/
public class StateConfigRuleSelect extends DummyMenuScrollState {
/** This state's ID */
public static final int ID = 7;
/** Number of rules shown at a time */
public static final int PAGE_HEIGHT = 21;
/** Player ID */
public int player = 0;
/** Game style ID */
public int style = 0;
/** Rule file list (for loading) */
private String[] strFileList;
/** Rule name list */
private String[] strRuleNameList;
/** Rule file list (for list display) */
private String[] strRuleFileList;
/** Current Rule File name */
private String strCurrentFileName;
/** Current Rule name */
private String strCurrentRuleName;
/** Rule entries */
private LinkedList<RuleEntry> ruleEntries;
/**
* Constructor
*/
public StateConfigRuleSelect() {
pageHeight = PAGE_HEIGHT;
nullError = "RULE DIRECTORY NOT FOUND";
emptyError = "NO RULE FILE";
}
/*
* Fetch this state's ID
*/
@Override
public int getID() {
return ID;
}
/**
* Get rule file list
* @return Rule file list. null if directory doesn't exist.
*/
private String[] getRuleFileList() {
File dir = new File("config/rule");
FilenameFilter filter = new FilenameFilter() {
public boolean accept(File dir1, String name) {
return name.endsWith(".rul");
}
};
String[] list = dir.list(filter);
if(!System.getProperty("os.name").startsWith("Windows")) {
// Sort if not windows
Arrays.sort(list);
}
return list;
}
/**
* Create rule entries
* @param filelist Rule file list
* @param currentStyle Current style
*/
private void createRuleEntries(String[] filelist, int currentStyle) {
ruleEntries = new LinkedList<RuleEntry>();
for(int i = 0; i < filelist.length; i++) {
RuleEntry entry = new RuleEntry();
File file = new File("config/rule/" + filelist[i]);
entry.filename = filelist[i];
entry.filepath = file.getPath();
CustomProperties prop = new CustomProperties();
try {
FileInputStream in = new FileInputStream("config/rule/" + filelist[i]);
prop.load(in);
in.close();
entry.rulename = prop.getProperty("0.ruleopt.strRuleName", "");
entry.style = prop.getProperty("0.ruleopt.style", 0);
} catch (Exception e) {
entry.rulename = "";
entry.style = -1;
}
if(entry.style == currentStyle) {
ruleEntries.add(entry);
}
}
}
/**
* Get rule name list as String[]
* @return Rule name list
*/
private String[] extractRuleNameListFromRuleEntries() {
String[] result = new String[ruleEntries.size()];
for(int i = 0; i < ruleEntries.size(); i++) {
result[i] = ruleEntries.get(i).rulename;
}
return result;
}
/**
* Get rule file name list as String[]
* @return Rule name list
*/
private String[] extractFileNameListFromRuleEntries() {
String[] result = new String[ruleEntries.size()];
for(int i = 0; i < ruleEntries.size(); i++) {
result[i] = ruleEntries.get(i).filename;
}
return result;
}
/*
* Called when entering this state
*/
@Override
public void enter(GameContainer container, StateBasedGame game) throws SlickException {
strFileList = getRuleFileList();
createRuleEntries(strFileList, style);
strRuleNameList = extractRuleNameListFromRuleEntries();
strRuleFileList = extractFileNameListFromRuleEntries();
list = strRuleNameList;
maxCursor = list.length-1;
if(style == 0) {
strCurrentFileName = NullpoMinoSlick.propGlobal.getProperty(player + ".rulefile", "");
strCurrentRuleName = NullpoMinoSlick.propGlobal.getProperty(player + ".rulename", "");
} else {
strCurrentFileName = NullpoMinoSlick.propGlobal.getProperty(player + ".rulefile." + style, "");
strCurrentRuleName = NullpoMinoSlick.propGlobal.getProperty(player + ".rulename." + style, "");
}
cursor = 0;
for(int i = 0; i < ruleEntries.size(); i++) {
if(ruleEntries.get(i).filename.equals(strCurrentFileName)) {
cursor = i;
}
}
}
/*
* State initialization
*/
public void init(GameContainer container, StateBasedGame game) throws SlickException {
}
/*
* Draw the screen
*/
@Override
protected void onRenderSuccess (GameContainer container, StateBasedGame game, Graphics graphics) {
String title = "SELECT " + (player + 1) + "P RULE (" + (cursor + 1) + "/" + (list.length) + ")";
NormalFontSlick.printFontGrid(1, 1, title, NormalFontSlick.COLOR_ORANGE);
NormalFontSlick.printFontGrid(1, 25, "CURRENT:" + strCurrentRuleName.toUpperCase(), NormalFontSlick.COLOR_BLUE);
NormalFontSlick.printFontGrid(9, 26, strCurrentFileName.toUpperCase(), NormalFontSlick.COLOR_BLUE);
NormalFontSlick.printFontGrid(1, 28, "A:OK B:CANCEL D:TOGGLE-VIEW", NormalFontSlick.COLOR_GREEN);
}
/*
* Decide
*/
@Override
protected boolean onDecide(GameContainer container, StateBasedGame game, int delta) {
ResourceHolderSlick.soundManager.play("decide");
RuleEntry entry = ruleEntries.get(cursor);
if(style == 0) {
NullpoMinoSlick.propGlobal.setProperty(player + ".rule", entry.filepath);
NullpoMinoSlick.propGlobal.setProperty(player + ".rulefile", entry.filename);
NullpoMinoSlick.propGlobal.setProperty(player + ".rulename", entry.rulename);
} else {
NullpoMinoSlick.propGlobal.setProperty(player + ".rule." + style, entry.filepath);
NullpoMinoSlick.propGlobal.setProperty(player + ".rulefile." + style, entry.filename);
NullpoMinoSlick.propGlobal.setProperty(player + ".rulename." + style, entry.rulename);
}
NullpoMinoSlick.saveConfig();
game.enterState(StateConfigRuleStyleSelect.ID);
return true;
}
/*
* Cancel
*/
@Override
protected boolean onCancel(GameContainer container, StateBasedGame game, int delta) {
game.enterState(StateConfigRuleStyleSelect.ID);
return true;
}
/*
* D button
*/
@Override
protected boolean onPushButtonD(GameContainer container, StateBasedGame game, int delta) {
ResourceHolderSlick.soundManager.play("change");
if(list == strRuleNameList) {
list = strRuleFileList;
} else {
list = strRuleNameList;
}
return false;
}
/**
* Rule entry
*/
private class RuleEntry {
/** File name */
public String filename;
/** File path */
public String filepath;
/** Rule name */
public String rulename;
/** Game style */
public int style;
}
}
|
|
/*
* Copyright 2008 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import com.google.javascript.jscomp.AbstractScope.ImplicitVar;
import com.google.javascript.jscomp.NodeTraversal.AbstractShallowCallback;
import com.google.javascript.jscomp.ReferenceCollector.Behavior;
import com.google.javascript.rhino.JSDocInfo;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.annotation.Nullable;
/**
* Checks variables to see if they are referenced before their declaration, or if they are
* redeclared in a way that is suspicious (i.e. not dictated by control structures). This is a more
* aggressive version of {@link VarCheck}, but it lacks the cross-module checks.
*/
class VariableReferenceCheck implements CompilerPass {
static final DiagnosticType EARLY_REFERENCE =
DiagnosticType.warning(
"JSC_REFERENCE_BEFORE_DECLARE", "Variable referenced before declaration: {0}");
static final DiagnosticType EARLY_EXPORTS_REFERENCE =
DiagnosticType.error(
"JSC_EXPORTS_REFERENCE_BEFORE_ASSIGN",
"Illegal reference to `exports` before assignment `exports = ...`");
static final DiagnosticType REDECLARED_VARIABLE =
DiagnosticType.warning("JSC_REDECLARED_VARIABLE", "Redeclared variable: {0}");
static final DiagnosticType EARLY_REFERENCE_ERROR =
DiagnosticType.error(
"JSC_REFERENCE_BEFORE_DECLARE_ERROR",
"Illegal variable reference before declaration: {0}");
static final DiagnosticType REASSIGNED_CONSTANT =
DiagnosticType.error("JSC_REASSIGNED_CONSTANT", "Constant reassigned: {0}");
static final DiagnosticType REDECLARED_VARIABLE_ERROR =
DiagnosticType.error("JSC_REDECLARED_VARIABLE_ERROR", "Illegal redeclared variable: {0}");
static final DiagnosticType DECLARATION_NOT_DIRECTLY_IN_BLOCK =
DiagnosticType.error(
"JSC_DECLARATION_NOT_DIRECTLY_IN_BLOCK",
"Block-scoped declaration not directly within block: {0}");
static final DiagnosticType UNUSED_LOCAL_ASSIGNMENT =
DiagnosticType.disabled(
"JSC_UNUSED_LOCAL_ASSIGNMENT", "Value assigned to local variable {0} is never read");
private final AbstractCompiler compiler;
private final boolean checkUnusedLocals;
// NOTE(nicksantos): It's a lot faster to use a shared Set that
// we clear after each method call, because the Set never gets too big.
private final Set<BasicBlock> blocksWithDeclarations = new HashSet<>();
// These types do not permit a block-scoped declaration inside them without an explicit block.
// e.g. if (b) let x;
// This list omits Token.LABEL intentionally. It's handled differently in IRFactory.
private static final ImmutableSet<Token> BLOCKLESS_DECLARATION_FORBIDDEN_STATEMENTS =
Sets.immutableEnumSet(
Token.IF, Token.FOR, Token.FOR_IN, Token.FOR_OF, Token.FOR_AWAIT_OF, Token.WHILE);
public VariableReferenceCheck(AbstractCompiler compiler) {
this.compiler = compiler;
this.checkUnusedLocals =
compiler.getOptions().enables(DiagnosticGroup.forType(UNUSED_LOCAL_ASSIGNMENT));
}
@Override
public void process(Node externs, Node root) {
new ReferenceCollector(
compiler, new ReferenceCheckingBehavior(), new SyntacticScopeCreator(compiler))
.process(externs, root);
}
/**
* Behavior that checks variables for redeclaration or early references just after they go out of
* scope.
*/
private class ReferenceCheckingBehavior implements Behavior {
private final Set<String> varsInFunctionBody;
private ReferenceCheckingBehavior() {
varsInFunctionBody = new HashSet<>();
}
@Override
public void afterExitScope(NodeTraversal t, ReferenceMap referenceMap) {
// Check all vars after finishing a scope
Scope scope = t.getScope();
if (scope.isFunctionBlockScope()) {
varsInFunctionBody.clear();
for (Var v : scope.getVarIterable()) {
varsInFunctionBody.add(v.getName());
}
}
for (Var v : scope.getVarIterable()) {
ReferenceCollection referenceCollection = referenceMap.getReferences(v);
// TODO(moz): Figure out why this could be null
if (referenceCollection != null) {
if (scope.getRootNode().isFunction() && v.isDefaultParam()) {
checkDefaultParam(v, scope, varsInFunctionBody);
}
if (scope.getRootNode().isFunction()) {
checkShadowParam(v, scope, referenceCollection.references);
}
checkVar(v, referenceCollection.references);
}
}
if (scope.hasOwnImplicitSlot(ImplicitVar.EXPORTS)) {
checkGoogModuleExports(scope.makeImplicitVar(ImplicitVar.EXPORTS), referenceMap);
}
}
private void checkDefaultParam(
Var param, final Scope scope, final Set<String> varsInFunctionBody) {
NodeTraversal.traverse(
compiler,
param.getParentNode().getSecondChild(),
/**
* Do a shallow check since cases like: {@code
* function f(y = () => x, x = 5) { return y(); }
* } is legal. We are going to miss cases like: {@code
* function f(y = (() => x)(), x = 5) { return y(); }
* } but this should be rare.
*/
new AbstractShallowCallback() {
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (!NodeUtil.isReferenceName(n)) {
return;
}
String refName = n.getString();
if (varsInFunctionBody.contains(refName) && !scope.hasSlot(refName)) {
compiler.report(JSError.make(n, EARLY_REFERENCE_ERROR, refName));
}
}
});
}
private void checkShadowParam(Var v, Scope functionScope, List<Reference> references) {
Var maybeParam = functionScope.getVar(v.getName());
if (maybeParam != null && maybeParam.isParam() && maybeParam.getScope() == functionScope) {
for (Reference r : references) {
if ((r.isVarDeclaration() || r.isHoistedFunction())
&& r.getNode() != v.getNameNode()) {
compiler.report(JSError.make(r.getNode(), REDECLARED_VARIABLE, v.getName()));
}
}
}
}
private void checkGoogModuleExports(Var exportsVar, ReferenceMap referenceMap) {
ReferenceCollection references = referenceMap.getReferences(exportsVar);
if (references == null || references.isNeverAssigned()) {
return;
}
for (Reference reference : references.references) {
if (reference.isLvalue()) {
break;
}
checkEarlyReference(exportsVar, reference, reference.getNode());
}
}
/**
* If the variable is declared more than once in a basic block, generate a
* warning. Also check if a variable is used in a given scope before it is
* declared, which suggest a likely error. Relies on the fact that
* references is in parse-tree order.
*/
private void checkVar(Var v, List<Reference> references) {
blocksWithDeclarations.clear();
boolean hasSeenDeclaration = false;
boolean hasErrors = false;
boolean isRead = false;
Reference unusedAssignment = null;
Reference hoistedFn = lookForHoistedFunction(references);
if (hoistedFn != null) {
hasSeenDeclaration = true;
}
for (Reference reference : references) {
if (reference == hoistedFn) {
continue;
}
Node referenceNode = reference.getNode();
BasicBlock basicBlock = reference.getBasicBlock();
boolean isDeclaration = reference.isDeclaration();
boolean isAssignment = isDeclaration || reference.isLvalue();
if (isDeclaration) {
// Checks for declarations
hasSeenDeclaration = true;
hasErrors = checkRedeclaration(v, reference, referenceNode, hoistedFn, basicBlock);
// Add the current basic block after checking redeclarations
blocksWithDeclarations.add(basicBlock);
checkBlocklessDeclaration(v, reference, referenceNode);
if (reference.getGrandparent().isExport()) {
isRead = true;
}
} else {
// Checks for references
if (!hasSeenDeclaration) {
hasErrors = checkEarlyReference(v, reference, referenceNode);
}
if (!hasErrors && v.isConst() && reference.isLvalue()) {
compiler.report(JSError.make(referenceNode, REASSIGNED_CONSTANT, v.getName()));
}
// Check for temporal dead zone of let / const declarations in for-in and for-of loops
// TODO(b/111441110): Fix this check. it causes spurious warnings on `b = a` in
// for (const [a, b = a] of []) {}
if ((v.isLet() || v.isConst())
&& v.getScope() == reference.getScope()
&& NodeUtil.isEnhancedFor(reference.getScope().getRootNode())) {
compiler.report(JSError.make(referenceNode, EARLY_REFERENCE_ERROR, v.getName()));
}
}
if (isAssignment) {
Reference decl = references.get(0);
Node declNode = decl.getNode();
Node gp = declNode.getGrandparent();
boolean lhsOfForInLoop = gp.isForIn() && gp.getFirstFirstChild() == declNode;
if (decl.getScope().isLocal()
&& (decl.isVarDeclaration() || decl.isLetDeclaration() || decl.isConstDeclaration())
&& !decl.getNode().isFromExterns()
&& !lhsOfForInLoop) {
unusedAssignment = reference;
}
if ((reference.getParent().isDec() || reference.getParent().isInc()
|| NodeUtil.isCompoundAssignmentOp(reference.getParent()))
&& NodeUtil.isExpressionResultUsed(reference.getNode())) {
isRead = true;
}
} else {
isRead = true;
}
}
if (checkUnusedLocals && unusedAssignment != null && !isRead && !hasErrors) {
checkForUnusedLocalVar(v, unusedAssignment);
}
}
}
/**
* @return The reference to the hoisted function, if the variable is one
*/
private Reference lookForHoistedFunction(List<Reference> references) {
for (Reference reference : references) {
if (reference.isHoistedFunction()) {
blocksWithDeclarations.add(reference.getBasicBlock());
return reference;
}
}
return null;
}
private void checkBlocklessDeclaration(Var v, Reference reference, Node referenceNode) {
if (!reference.isVarDeclaration() && reference.getGrandparent().isAddedBlock()
&& BLOCKLESS_DECLARATION_FORBIDDEN_STATEMENTS.contains(
reference.getGrandparent().getParent().getToken())) {
compiler.report(JSError.make(referenceNode, DECLARATION_NOT_DIRECTLY_IN_BLOCK, v.getName()));
}
}
/**
* @return If a redeclaration error has been found
*/
private boolean checkRedeclaration(
Var v, Reference reference, Node referenceNode, Reference hoistedFn, BasicBlock basicBlock) {
boolean allowDupe =
VarCheck.hasDuplicateDeclarationSuppression(compiler, referenceNode, v.getNameNode());
boolean letConstShadowsVar = v.getParentNode().isVar()
&& (reference.isLetDeclaration() || reference.isConstDeclaration());
boolean isVarNodeSameAsReferenceNode = v.getNode() == reference.getNode();
// We disallow redeclaration of caught exceptions
boolean shadowCatchVar = v.getParentNode().isCatch() && !isVarNodeSameAsReferenceNode;
boolean shadowParam = v.isParam() && NodeUtil.isBlockScopedDeclaration(referenceNode)
&& v.getScope() == reference.getScope().getParent();
boolean shadowDetected = false;
if (!allowDupe) {
// Look through all the declarations we've found so far, and
// check if any of them are before this block.
for (BasicBlock declaredBlock : blocksWithDeclarations) {
if (declaredBlock.provablyExecutesBefore(basicBlock)) {
shadowDetected = true;
DiagnosticType diagnosticType;
Node warningNode = referenceNode;
if (v.isLet()
|| v.isConst()
|| v.isClass()
|| letConstShadowsVar
|| shadowCatchVar
|| shadowParam
|| v.isImport()) {
// These cases are all hard errors that violate ES6 semantics
diagnosticType = REDECLARED_VARIABLE_ERROR;
} else if (reference.getNode().getParent().isCatch() || allowDupe) {
return false;
} else {
// These diagnostics are for valid, but suspicious, code, and are suppressible.
// For vars defined in the global scope, give the same error as VarCheck
diagnosticType =
v.getScope().isGlobal()
? VarCheck.VAR_MULTIPLY_DECLARED_ERROR
: REDECLARED_VARIABLE;
// Since we skip hoisted functions, we would have the wrong warning node in cases
// where the redeclaration is a function declaration. Check for that case.
if (isVarNodeSameAsReferenceNode
&& hoistedFn != null
&& v.getName().equals(hoistedFn.getNode().getString())) {
warningNode = hoistedFn.getNode();
}
}
compiler.report(
JSError.make(warningNode, diagnosticType, v.getName(), locationOf(v.getNode())));
return true;
}
}
}
if (!shadowDetected && (letConstShadowsVar || shadowCatchVar)
&& v.getScope() == reference.getScope()) {
compiler.report(JSError.make(referenceNode, REDECLARED_VARIABLE_ERROR, v.getName()));
return true;
}
return false;
}
private static String locationOf(@Nullable Node n) {
return (n == null) ? "<unknown>" : n.getLocation();
}
/**
* @return If an early reference has been found
*/
private boolean checkEarlyReference(Var v, Reference reference, Node referenceNode) {
// Don't check the order of references in externs files.
if (referenceNode.isFromExterns() || v.isImplicitGoogNamespace()) {
return false;
}
// Special case to deal with var goog = goog || {}. Note that
// let x = x || {} is illegal, just like var y = x || {}; let x = y;
if (v.isVar()) {
Node curr = reference.getParent();
while (curr.isOr() && curr.getParent().getFirstChild() == curr) {
curr = curr.getParent();
}
if (curr.isName() && curr.getString().equals(v.getName())) {
return false;
}
}
// Only generate warnings for early references in the same function scope/global scope in
// order to deal with possible forward declarations and recursion
// e.g. don't warn on:
// function f() { return x; } f(); let x = 5;
// We don't track where `f` is called, just where it's defined, and don't want to warn for
// function f() { return x; } let x = 5; f();
// TODO(moz): See if we can remove the bypass for "goog"
if (reference.getScope().hasSameContainerScope(v.getScope()) && !v.getName().equals("goog")) {
compiler.report(
JSError.make(
reference.getNode(),
v.isGoogModuleExports()
? EARLY_EXPORTS_REFERENCE
: (v.isLet() || v.isConst() || v.isClass() || v.isParam())
? EARLY_REFERENCE_ERROR
: EARLY_REFERENCE,
v.getName()));
return true;
}
return false;
}
// Only check for unused local if not in a goog.scope function.
// TODO(tbreisacher): Consider moving UNUSED_LOCAL_ASSIGNMENT into its own check pass, so
// that we can run it after goog.scope processing, and get rid of the inGoogScope check.
private void checkForUnusedLocalVar(Var v, Reference unusedAssignment) {
if (!v.isLocal()) {
return;
}
JSDocInfo jsDoc = NodeUtil.getBestJSDocInfo(unusedAssignment.getNode());
if (jsDoc != null && jsDoc.hasTypedefType()) {
return;
}
boolean inGoogScope = false;
Scope s = v.getScope();
if (s.isFunctionBlockScope()) {
Node function = s.getRootNode().getParent();
Node callee = function.getPrevious();
inGoogScope = callee != null && callee.matchesQualifiedName("goog.scope");
}
if (inGoogScope) {
// No warning.
return;
}
if (s.isModuleScope()) {
Node statement = NodeUtil.getEnclosingStatement(v.getNode());
if (NodeUtil.isNameDeclaration(statement)) {
Node lhs = statement.getFirstChild();
Node rhs = lhs.getFirstChild();
if (rhs != null
&& (NodeUtil.isCallTo(rhs, "goog.forwardDeclare")
|| NodeUtil.isCallTo(rhs, "goog.requireType")
|| NodeUtil.isCallTo(rhs, "goog.require")
|| rhs.isQualifiedName())) {
// No warning. module imports will be caught by the unused-require check, and if the
// right side is a qualified name then this is likely an alias used in type annotations.
return;
}
}
}
compiler.report(JSError.make(unusedAssignment.getNode(), UNUSED_LOCAL_ASSIGNMENT, v.getName()));
}
}
|
|
package ameba.db.model;
import com.avaje.ebean.*;
import com.avaje.ebean.text.PathProperties;
import org.apache.commons.lang3.StringUtils;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* <p>Abstract Finder class.</p>
*
* @author icode
*/
public abstract class Finder<ID, T> {
private final Class<ID> idType;
private final Class<T> modelType;
private final String serverName;
/**
* Creates a finder for entity of modelType <code>T</code> with <I extends ID> I of modelType <code>ID</code>, using a specific Ebean server.
*
* @param serverName a {@link java.lang.String} object.
* @param idType a {@link java.lang.Class} object.
* @param modelType a {@link java.lang.Class} object.
*/
public Finder(String serverName, Class<ID> idType, Class<T> modelType) {
if (StringUtils.isBlank(serverName)) {
throw new IllegalArgumentException("server name is blank");
}
if (idType == null) {
throw new IllegalArgumentException("id model type is null");
}
if (modelType == null) {
throw new IllegalArgumentException("model model type is null");
}
this.modelType = modelType;
this.idType = idType;
this.serverName = serverName;
}
/**
* <p>Getter for the field <code>idType</code>.</p>
*
* @return a {@link java.lang.Class} object.
*/
public Class<ID> getIdType() {
return idType;
}
/**
* <p>Getter for the field <code>modelType</code>.</p>
*
* @param <M> a M object.
* @return a {@link java.lang.Class} object.
*/
@SuppressWarnings("unchecked")
public <M extends T> Class<M> getModelType() {
return (Class<M>) modelType;
}
/**
* <p>Getter for the field <code>serverName</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getServerName() {
return serverName;
}
/**
* Changes the model server.
*
* @param server a {@link java.lang.String} object.
* @return a {@link ameba.db.model.Finder} object.
*/
@SuppressWarnings("unchecked")
public abstract <M extends T> Finder<ID, M> on(String server);
/**
* Retrieves an entity by ID.
*
* @param id a ID object.
* @return a M object.
*/
public abstract <M extends T> M byId(ID id);
/**
* Retrieves an entity reference for this ID.
*
* @param id a ID object.
* @return a M object.
*/
public abstract <M extends T> M ref(ID id);
/**
* Creates a filter for sorting and filtering lists of entities locally without going back to the database.
*
* @return Filter<T>
*/
public abstract Filter<T> filter();
/**
* <p>query.</p>
*
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> query();
/**
* Creates a query.
*
* @return Query<T>
*/
public abstract Query<T> createQuery();
/**
* <p>createNamedQuery.</p>
*
* @param name a {@link java.lang.String} object.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> createNamedQuery(String name);
/**
* <p>createNamedSqlQuery.</p>
*
* @param name a {@link java.lang.String} object.
* @return a {@link com.avaje.ebean.SqlQuery} object.
*/
public abstract SqlQuery createNamedSqlQuery(String name);
/**
* <p>createSqlQuery.</p>
*
* @param sql a {@link java.lang.String} object.
* @return a {@link com.avaje.ebean.SqlQuery} object.
*/
public abstract SqlQuery createSqlQuery(String sql);
/**
* Returns the next identity value.
*
* @return a I object.
*/
public abstract <I extends ID> I nextId();
/**
* Sets the OQL query to run
*
* @param oql oql
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> setQuery(String oql);
/**
* <p>setRawSql.</p>
*
* @param rawSql a {@link com.avaje.ebean.RawSql} object.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> setRawSql(RawSql rawSql);
/**
* <p>setPersistenceContextScope.</p>
*
* @param scope a {@link com.avaje.ebean.PersistenceContextScope} object.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> setPersistenceContextScope(PersistenceContextScope scope);
/**
* <p>setLazyLoadBatchSize.</p>
*
* @param size a int.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> setLazyLoadBatchSize(int size);
/**
* <p>select.</p>
*
* @param fetchProperties a {@link java.lang.String} object.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> select(String fetchProperties);
public abstract Query<T> setAutoTune(boolean autoTune);
/**
* <p>fetch.</p>
*
* @param path a {@link java.lang.String} object.
* @param fetchProperties a {@link java.lang.String} object.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> fetch(String path, String fetchProperties);
/**
* <p>fetch.</p>
*
* @param assocProperty a {@link java.lang.String} object.
* @param fetchProperties a {@link java.lang.String} object.
* @param fetchConfig a {@link com.avaje.ebean.FetchConfig} object.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> fetch(String assocProperty, String fetchProperties, FetchConfig fetchConfig);
/**
* <p>fetch.</p>
*
* @param path a {@link java.lang.String} object.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> fetch(String path);
/**
* <p>fetch.</p>
*
* @param path a {@link java.lang.String} object.
* @param joinConfig a {@link com.avaje.ebean.FetchConfig} object.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> fetch(String path, FetchConfig joinConfig);
/**
* <p>apply.</p>
*
* @param pathProperties a {@link com.avaje.ebean.text.PathProperties} object.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> apply(PathProperties pathProperties);
/**
* <p>findIds.</p>
*
* @return a {@link java.util.List} object.
*/
public abstract List<Object> findIds();
/**
* <p>findIterate.</p>
*
* @return a {@link com.avaje.ebean.QueryIterator} object.
*/
public abstract QueryIterator<T> findIterate();
/**
* <p>findEach.</p>
*
* @param consumer a {@link com.avaje.ebean.QueryEachConsumer} object.
*/
public abstract void findEach(QueryEachConsumer<T> consumer);
/**
* <p>findEachWhile.</p>
*
* @param consumer a {@link com.avaje.ebean.QueryEachWhileConsumer} object.
*/
public abstract void findEachWhile(QueryEachWhileConsumer<T> consumer);
/**
* <p>findList.</p>
*
* @param <M> a M object.
* @return a {@link java.util.List} object.
*/
public abstract <M extends T> List<M> findList();
/**
* <p>findSet.</p>
*
* @param <M> a M object.
* @return a {@link java.util.Set} object.
*/
public abstract <M extends T> Set<M> findSet();
/**
* <p>findMap.</p>
*
* @param <M> a M object.
* @return a {@link java.util.Map} object.
*/
public abstract <M extends T> Map<?, M> findMap();
/**
* <p>findMap.</p>
*
* @param keyProperty a {@link java.lang.String} object.
* @param keyType a {@link java.lang.Class} object.
* @param <K> a K object.
* @param <M> a M object.
* @return a {@link java.util.Map} object.
*/
public abstract <K, M extends T> Map<K, M> findMap(String keyProperty, Class<K> keyType);
/**
* <p>findUnique.</p>
*
* @param <M> a M object.
* @return a M object.
*/
public abstract <M extends T> M findUnique();
/**
* <p>findRowCount.</p>
*
* @return a int.
*/
public abstract int findRowCount();
/**
* <p>findFutureRowCount.</p>
*
* @return a {@link com.avaje.ebean.FutureRowCount} object.
*/
public abstract FutureRowCount<T> findFutureRowCount();
/**
* <p>findFutureIds.</p>
*
* @return a {@link com.avaje.ebean.FutureIds} object.
*/
public abstract FutureIds<T> findFutureIds();
/**
* <p>findFutureList.</p>
*
* @return a {@link com.avaje.ebean.FutureList} object.
*/
public abstract FutureList<T> findFutureList();
/**
* <p>findPagedList.</p>
*
* @param pageIndex a int.
* @param pageSize a int.
* @return a {@link com.avaje.ebean.PagedList} object.
*/
public abstract PagedList<T> findPagedList(int pageIndex, int pageSize);
/**
* <p>setParameter.</p>
*
* @param name a {@link java.lang.String} object.
* @param value a {@link java.lang.Object} object.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> setParameter(String name, Object value);
/**
* <p>setParameter.</p>
*
* @param position a int.
* @param value a {@link java.lang.Object} object.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> setParameter(int position, Object value);
/**
* <p>setId.</p>
*
* @param id a {@link java.lang.Object} object.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> setId(Object id);
/**
* <p>where.</p>
*
* @param addToWhereClause a {@link java.lang.String} object.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> where(String addToWhereClause);
/**
* <p>where.</p>
*
* @param expression a {@link com.avaje.ebean.Expression} object.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> where(Expression expression);
/**
* <p>where.</p>
*
* @return a {@link com.avaje.ebean.ExpressionList} object.
*/
public abstract ExpressionList<T> where();
/**
* <p>filterMany.</p>
*
* @param propertyName a {@link java.lang.String} object.
* @return a {@link com.avaje.ebean.ExpressionList} object.
*/
public abstract ExpressionList<T> filterMany(String propertyName);
/**
* <p>having.</p>
*
* @return a {@link com.avaje.ebean.ExpressionList} object.
*/
public abstract ExpressionList<T> having();
/**
* <p>having.</p>
*
* @param addToHavingClause a {@link java.lang.String} object.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> having(String addToHavingClause);
/**
* <p>having.</p>
*
* @param addExpressionToHaving a {@link com.avaje.ebean.Expression} object.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> having(Expression addExpressionToHaving);
/**
* <p>orderBy.</p>
*
* @param orderByClause a {@link java.lang.String} object.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> orderBy(String orderByClause);
/**
* <p>order.</p>
*
* @param orderByClause a {@link java.lang.String} object.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> order(String orderByClause);
/**
* <p>order.</p>
*
* @return a {@link com.avaje.ebean.OrderBy} object.
*/
public abstract OrderBy<T> order();
/**
* <p>orderBy.</p>
*
* @return a {@link com.avaje.ebean.OrderBy} object.
*/
public abstract OrderBy<T> orderBy();
/**
* <p>setOrder.</p>
*
* @param orderBy a {@link com.avaje.ebean.OrderBy} object.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> setOrder(OrderBy<T> orderBy);
/**
* <p>setOrderBy.</p>
*
* @param orderBy a {@link com.avaje.ebean.OrderBy} object.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> setOrderBy(OrderBy<T> orderBy);
/**
* <p>setDistinct.</p>
*
* @param isDistinct a boolean.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> setDistinct(boolean isDistinct);
/**
* <p>getExpressionFactory.</p>
*
* @return a {@link com.avaje.ebean.ExpressionFactory} object.
*/
public abstract ExpressionFactory getExpressionFactory();
/**
* <p>getFirstRow.</p>
*
* @return a int.
*/
public abstract int getFirstRow();
/**
* <p>getGeneratedSql.</p>
*
* @return a {@link java.lang.String} object.
*/
public abstract String getGeneratedSql();
/**
* <p>setFirstRow.</p>
*
* @param firstRow a int.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> setFirstRow(int firstRow);
/**
* <p>getMaxRows.</p>
*
* @return a int.
*/
public abstract int getMaxRows();
/**
* <p>setMaxRows.</p>
*
* @param maxRows a int.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> setMaxRows(int maxRows);
/**
* <p>setMapKey.</p>
*
* @param mapKey a {@link java.lang.String} object.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> setMapKey(String mapKey);
/**
* <p>setUseCache.</p>
*
* @param useBeanCache a boolean.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> setUseCache(boolean useBeanCache);
/**
* <p>setUseQueryCache.</p>
*
* @param useQueryCache a boolean.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> setUseQueryCache(boolean useQueryCache);
/**
* <p>setReadOnly.</p>
*
* @param readOnly a boolean.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> setReadOnly(boolean readOnly);
/**
* <p>setLoadBeanCache.</p>
*
* @param loadBeanCache a boolean.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> setLoadBeanCache(boolean loadBeanCache);
/**
* <p>setTimeout.</p>
*
* @param secs a int.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> setTimeout(int secs);
/**
* <p>setBufferFetchSizeHint.</p>
*
* @param fetchSize a int.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> setBufferFetchSizeHint(int fetchSize);
/**
* <p>setForUpdate.</p>
*
* @param forUpdate a boolean.
* @return a {@link com.avaje.ebean.Query} object.
*/
public abstract Query<T> setForUpdate(boolean forUpdate);
}
|
|
package gov.healthit.chpl.dto;
import gov.healthit.chpl.entity.CertifiedProductEntity;
import java.util.Date;
import org.springframework.util.StringUtils;
public class CertifiedProductDTO {
private Long id;
private String productCode;
private String versionCode;
private String icsCode;
private String additionalSoftwareCode;
private String certifiedDateCode;
private String acbCertificationId;
private Long certificationBodyId;
private Long certificationEditionId;
private String chplProductNumber;
private Date creationDate;
private Boolean deleted;
private Date lastModifiedDate;
private Long lastModifiedUser;
private Long practiceTypeId;
private Long productClassificationTypeId;
private Long productVersionId;
private String reportFileLocation;
private String sedReportFileLocation;
private Long testingLabId;
private Long certificationStatusId;
private String otherAcb;
private Boolean visibleOnChpl;
private String termsOfUse;
private String apiDocumentation;
private String transparencyAttestationUrl;
private Boolean ics;
private Boolean sedTesting;
private Boolean qmsTesting;
private String productAdditionalSoftware;
private Boolean transparencyAttestation = null;
public CertifiedProductDTO(){}
public CertifiedProductDTO(CertifiedProductEntity entity){
this.id = entity.getId();
this.productCode = entity.getProductCode();
this.versionCode = entity.getVersionCode();
this.icsCode = entity.getIcsCode();
this.additionalSoftwareCode = entity.getAdditionalSoftwareCode();
this.certifiedDateCode = entity.getCertifiedDateCode();
this.acbCertificationId = entity.getAcbCertificationId();
this.certificationBodyId = entity.getCertificationBodyId();
this.certificationEditionId = entity.getCertificationEditionId();
this.chplProductNumber = entity.getChplProductNumber();
this.creationDate = entity.getCreationDate();
this.deleted = entity.getDeleted();
this.lastModifiedDate = entity.getLastModifiedDate();
this.lastModifiedUser = entity.getLastModifiedUser();
this.practiceTypeId = entity.getPracticeTypeId();
this.productClassificationTypeId = entity.getProductClassificationTypeId();
this.productVersionId = entity.getProductVersionId();
this.reportFileLocation = entity.getReportFileLocation();
this.sedReportFileLocation = entity.getSedReportFileLocation();
this.transparencyAttestationUrl = entity.getTransparencyAttestationUrl();
this.testingLabId = entity.getTestingLabId();
this.certificationStatusId = entity.getCertificationStatusId();
this.otherAcb = entity.getOtherAcb();
this.setVisibleOnChpl(entity.getVisibleOnChpl());
this.setTermsOfUse(entity.getTermsOfUse());
this.setApiDocumentation(entity.getApiDocumentation());
this.setIcs(entity.getIcs());
this.setSedTesting(entity.getSedTesting());
this.setQmsTesting(entity.getQmsTesting());
this.setProductAdditionalSoftware(entity.getProductAdditionalSoftware());
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getAcbCertificationId() {
return acbCertificationId;
}
public void setAcbCertificationId(String acbCertificationId) {
this.acbCertificationId = acbCertificationId;
}
public Long getCertificationBodyId() {
return certificationBodyId;
}
public void setCertificationBodyId(Long certificationBodyId) {
this.certificationBodyId = certificationBodyId;
}
public Long getCertificationEditionId() {
return certificationEditionId;
}
public void setCertificationEditionId(Long certificationEditionId) {
this.certificationEditionId = certificationEditionId;
}
public String getChplProductNumber() {
return chplProductNumber;
}
public void setChplProductNumber(String chplProductNumber) {
this.chplProductNumber = chplProductNumber;
}
public String getChplProductNumberForActivity() {
if(StringUtils.isEmpty(this.chplProductNumber)) {
return "a certified product";
}
return this.chplProductNumber;
}
public Date getCreationDate() {
return creationDate;
}
public void setCreationDate(Date creationDate) {
this.creationDate = creationDate;
}
public Boolean getDeleted() {
return deleted;
}
public void setDeleted(Boolean deleted) {
this.deleted = deleted;
}
public Date getLastModifiedDate() {
return lastModifiedDate;
}
public void setLastModifiedDate(Date lastModifiedDate) {
this.lastModifiedDate = lastModifiedDate;
}
public Long getLastModifiedUser() {
return lastModifiedUser;
}
public void setLastModifiedUser(Long lastModifiedUser) {
this.lastModifiedUser = lastModifiedUser;
}
public Long getPracticeTypeId() {
return practiceTypeId;
}
public void setPracticeTypeId(Long practiceTypeId) {
this.practiceTypeId = practiceTypeId;
}
public Long getProductClassificationTypeId() {
return productClassificationTypeId;
}
public void setProductClassificationTypeId(Long productClassificationTypeId) {
this.productClassificationTypeId = productClassificationTypeId;
}
public Long getProductVersionId() {
return productVersionId;
}
public void setProductVersionId(Long productVersionId) {
this.productVersionId = productVersionId;
}
public String getReportFileLocation() {
return reportFileLocation;
}
public void setReportFileLocation(String reportFileLocation) {
this.reportFileLocation = reportFileLocation;
}
public Long getTestingLabId() {
return testingLabId;
}
public void setTestingLabId(Long testingLabId) {
this.testingLabId = testingLabId;
}
public Long getCertificationStatusId() {
return certificationStatusId;
}
public void setCertificationStatusId(Long certificationStatusId) {
this.certificationStatusId = certificationStatusId;
}
public String getOtherAcb() {
return otherAcb;
}
public void setOtherAcb(String otherAcb) {
this.otherAcb = otherAcb;
}
public Boolean getVisibleOnChpl() {
return visibleOnChpl;
}
public void setVisibleOnChpl(Boolean visibleOnChpl) {
this.visibleOnChpl = visibleOnChpl;
}
public String getProductCode() {
return productCode;
}
public void setProductCode(String productCode) {
this.productCode = productCode;
}
public String getVersionCode() {
return versionCode;
}
public void setVersionCode(String versionCode) {
this.versionCode = versionCode;
}
public String getAdditionalSoftwareCode() {
return additionalSoftwareCode;
}
public void setAdditionalSoftwareCode(String additionalSoftwareCode) {
this.additionalSoftwareCode = additionalSoftwareCode;
}
public String getCertifiedDateCode() {
return certifiedDateCode;
}
public void setCertifiedDateCode(String certifiedDateCode) {
this.certifiedDateCode = certifiedDateCode;
}
public String getIcsCode() {
return icsCode;
}
public void setIcsCode(String icsCode) {
this.icsCode = icsCode;
}
public String getTermsOfUse() {
return termsOfUse;
}
public void setTermsOfUse(String termsOfUse) {
this.termsOfUse = termsOfUse;
}
public String getApiDocumentation() {
return apiDocumentation;
}
public void setApiDocumentation(String apiDocumentation) {
this.apiDocumentation = apiDocumentation;
}
public Boolean getTransparencyAttestation() {
return transparencyAttestation;
}
public void setTransparencyAttestation(Boolean transparencyAttestation) {
this.transparencyAttestation = transparencyAttestation;
}
public Boolean getIcs() {
return ics;
}
public void setIcs(Boolean ics) {
this.ics = ics;
}
public Boolean getSedTesting() {
return sedTesting;
}
public void setSedTesting(Boolean sedTesting) {
this.sedTesting = sedTesting;
}
public String getSedReportFileLocation() {
return sedReportFileLocation;
}
public void setSedReportFileLocation(String sedReportFileLocation) {
this.sedReportFileLocation = sedReportFileLocation;
}
public Boolean getQmsTesting() {
return qmsTesting;
}
public void setQmsTesting(Boolean qmsTesting) {
this.qmsTesting = qmsTesting;
}
public String getProductAdditionalSoftware() {
return productAdditionalSoftware;
}
public void setProductAdditionalSoftware(String productAdditionalSoftware) {
this.productAdditionalSoftware = productAdditionalSoftware;
}
public String getTransparencyAttestationUrl() {
return transparencyAttestationUrl;
}
public void setTransparencyAttestationUrl(String transparencyAttestationUrl) {
this.transparencyAttestationUrl = transparencyAttestationUrl;
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
import java.io.IOException;
import java.util.Locale;
import java.util.Objects;
/**
* Filter results of a query to include only those within a specific distance to some
* geo point.
* */
public class GeoDistanceQueryBuilder extends AbstractQueryBuilder<GeoDistanceQueryBuilder> {
/** Name of the query in the query dsl. */
public static final String NAME = "geo_distance";
/** Default for latitude normalization (as of this writing true).*/
public static final boolean DEFAULT_NORMALIZE_LAT = true;
/** Default for longitude normalization (as of this writing true). */
public static final boolean DEFAULT_NORMALIZE_LON = true;
/** Default for distance unit computation. */
public static final DistanceUnit DEFAULT_DISTANCE_UNIT = DistanceUnit.DEFAULT;
/** Default for geo distance computation. */
public static final GeoDistance DEFAULT_GEO_DISTANCE = GeoDistance.DEFAULT;
/** Default for optimising query through pre computed bounding box query. */
public static final String DEFAULT_OPTIMIZE_BBOX = "memory";
private final String fieldName;
/** Distance from center to cover. */
private double distance;
/** Point to use as center. */
private GeoPoint center = new GeoPoint(Double.NaN, Double.NaN);
/** Algorithm to use for distance computation. */
private GeoDistance geoDistance = DEFAULT_GEO_DISTANCE;
/** Whether or not to use a bbox for pre-filtering. TODO change to enum? */
private String optimizeBbox = DEFAULT_OPTIMIZE_BBOX;
/** How strict should geo coordinate validation be? */
private GeoValidationMethod validationMethod = GeoValidationMethod.DEFAULT;
static final GeoDistanceQueryBuilder PROTOTYPE = new GeoDistanceQueryBuilder("_na_");
/**
* Construct new GeoDistanceQueryBuilder.
* @param fieldName name of indexed geo field to operate distance computation on.
* */
public GeoDistanceQueryBuilder(String fieldName) {
if (Strings.isEmpty(fieldName)) {
throw new IllegalArgumentException("fieldName must not be null or empty");
}
this.fieldName = fieldName;
}
/** Name of the field this query is operating on. */
public String fieldName() {
return this.fieldName;
}
/** Sets the center point for the query.
* @param point the center of the query
**/
public GeoDistanceQueryBuilder point(GeoPoint point) {
if (point == null) {
throw new IllegalArgumentException("center point must not be null");
}
this.center = point;
return this;
}
/**
* Sets the center point of the query.
* @param lat latitude of center
* @param lon longitude of center
* */
public GeoDistanceQueryBuilder point(double lat, double lon) {
this.center = new GeoPoint(lat, lon);
return this;
}
/** Returns the center point of the distance query. */
public GeoPoint point() {
return this.center;
}
/** Sets the distance from the center using the default distance unit.*/
public GeoDistanceQueryBuilder distance(String distance) {
return distance(distance, DistanceUnit.DEFAULT);
}
/** Sets the distance from the center for this query. */
public GeoDistanceQueryBuilder distance(String distance, DistanceUnit unit) {
if (Strings.isEmpty(distance)) {
throw new IllegalArgumentException("distance must not be null or empty");
}
if (unit == null) {
throw new IllegalArgumentException("distance unit must not be null");
}
this.distance = DistanceUnit.parse(distance, unit, DistanceUnit.DEFAULT);
return this;
}
/** Sets the distance from the center for this query. */
public GeoDistanceQueryBuilder distance(double distance, DistanceUnit unit) {
return distance(Double.toString(distance), unit);
}
/** Returns the distance configured as radius. */
public double distance() {
return distance;
}
/** Sets the center point for this query. */
public GeoDistanceQueryBuilder geohash(String geohash) {
if (Strings.isEmpty(geohash)) {
throw new IllegalArgumentException("geohash must not be null or empty");
}
this.center.resetFromGeoHash(geohash);
return this;
}
/** Which type of geo distance calculation method to use. */
public GeoDistanceQueryBuilder geoDistance(GeoDistance geoDistance) {
if (geoDistance == null) {
throw new IllegalArgumentException("geoDistance must not be null");
}
this.geoDistance = geoDistance;
return this;
}
/** Returns geo distance calculation type to use. */
public GeoDistance geoDistance() {
return this.geoDistance;
}
/**
* Set this to memory or indexed if before running the distance
* calculation you want to limit the candidates to hits in the
* enclosing bounding box.
**/
public GeoDistanceQueryBuilder optimizeBbox(String optimizeBbox) {
if (optimizeBbox == null) {
throw new IllegalArgumentException("optimizeBox must not be null");
}
switch (optimizeBbox) {
case "none":
case "memory":
case "indexed":
break;
default:
throw new IllegalArgumentException("optimizeBbox must be one of [none, memory, indexed]");
}
this.optimizeBbox = optimizeBbox;
return this;
}
/**
* Returns whether or not to run a BoundingBox query prior to
* distance query for optimization purposes.*/
public String optimizeBbox() {
return this.optimizeBbox;
}
/** Set validaton method for geo coordinates. */
public void setValidationMethod(GeoValidationMethod method) {
this.validationMethod = method;
}
/** Returns validation method for geo coordinates. */
public GeoValidationMethod getValidationMethod() {
return this.validationMethod;
}
@Override
protected Query doToQuery(QueryShardContext shardContext) throws IOException {
QueryValidationException exception = checkLatLon(shardContext.indexVersionCreated().before(Version.V_2_0_0));
if (exception != null) {
throw new QueryShardException(shardContext, "couldn't validate latitude/ longitude values", exception);
}
if (GeoValidationMethod.isCoerce(validationMethod)) {
GeoUtils.normalizePoint(center, true, true);
}
double normDistance = geoDistance.normalize(this.distance, DistanceUnit.DEFAULT);
MappedFieldType fieldType = shardContext.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryShardException(shardContext, "failed to find geo_point field [" + fieldName + "]");
}
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
throw new QueryShardException(shardContext, "field [" + fieldName + "] is not a geo_point field");
}
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
IndexGeoPointFieldData indexFieldData = shardContext.getForField(fieldType);
Query query = new GeoDistanceRangeQuery(center, null, normDistance, true, false, geoDistance, geoFieldType, indexFieldData, optimizeBbox);
return query;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
builder.startArray(fieldName).value(center.lon()).value(center.lat()).endArray();
builder.field("distance", distance);
builder.field("distance_type", geoDistance.name().toLowerCase(Locale.ROOT));
builder.field("optimize_bbox", optimizeBbox);
builder.field("validation_method", validationMethod);
printBoostAndQueryName(builder);
builder.endObject();
}
@Override
public int doHashCode() {
return Objects.hash(center, geoDistance, optimizeBbox, distance, validationMethod);
}
@Override
public boolean doEquals(GeoDistanceQueryBuilder other) {
return Objects.equals(fieldName, other.fieldName) &&
(distance == other.distance) &&
Objects.equals(validationMethod, other.validationMethod) &&
Objects.equals(center, other.center) &&
Objects.equals(optimizeBbox, other.optimizeBbox) &&
Objects.equals(geoDistance, other.geoDistance);
}
@Override
protected GeoDistanceQueryBuilder doReadFrom(StreamInput in) throws IOException {
String fieldName = in.readString();
GeoDistanceQueryBuilder result = new GeoDistanceQueryBuilder(fieldName);
result.distance = in.readDouble();
result.validationMethod = GeoValidationMethod.readGeoValidationMethodFrom(in);
result.center = in.readGeoPoint();
result.optimizeBbox = in.readString();
result.geoDistance = GeoDistance.readGeoDistanceFrom(in);
return result;
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeString(fieldName);
out.writeDouble(distance);
validationMethod.writeTo(out);
out.writeGeoPoint(center);
out.writeString(optimizeBbox);
geoDistance.writeTo(out);
}
private QueryValidationException checkLatLon(boolean indexCreatedBeforeV2_0) {
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
if (GeoValidationMethod.isIgnoreMalformed(validationMethod) || indexCreatedBeforeV2_0) {
return null;
}
QueryValidationException validationException = null;
// For everything post 2.0, validate latitude and longitude unless validation was explicitly turned off
if (GeoUtils.isValidLatitude(center.getLat()) == false) {
validationException = addValidationError("center point latitude is invalid: " + center.getLat(), validationException);
}
if (GeoUtils.isValidLongitude(center.getLon()) == false) {
validationException = addValidationError("center point longitude is invalid: " + center.getLon(), validationException);
}
return validationException;
}
@Override
public String getWriteableName() {
return NAME;
}
}
|
|
// fork from https://github.com/markeeftb/FileOpener
package org.apache.cordova.openfilenative;
import android.Manifest;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.res.AssetManager;
import android.net.Uri;
import android.os.AsyncTask;
import android.webkit.URLUtil;
import org.apache.cordova.CallbackContext;
import org.apache.cordova.CordovaInterface;
import org.apache.cordova.CordovaPlugin;
import org.apache.cordova.CordovaWebView;
import org.apache.cordova.PermissionHelper;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLConnection;
public class OpenFileNative extends CordovaPlugin {
private Context context;
private String progressTitle;
private CallbackContext callback;
private final int permissionRequestCode = 865;
@Override
public void initialize(CordovaInterface cordova, CordovaWebView webView) {
super.initialize(cordova, webView);
context = cordova.getActivity();
}
@Override
public boolean execute(String action, final JSONArray args, final CallbackContext callbackContext) throws JSONException {
if (action.equals("openFileNative")) {
callback = callbackContext;
if (!PermissionHelper.hasPermission(this, Manifest.permission.READ_EXTERNAL_STORAGE)) {
PermissionHelper.requestPermission(this, permissionRequestCode, Manifest.permission.READ_EXTERNAL_STORAGE);
return true;
}
final JSONObject params = args.getJSONObject(0);
if (!params.has("file") || params.has("file") && params.getString("file").length() == 0) {
callback.error("File Parameter is missing");
return true;
}
final String fileToOpen = params.getString("file");
progressTitle = (params.has("progressTitle") && params.getString("progressTitle").length() > 0) ? params.getString("progressTitle") : "Open File";
cordova.getActivity().runOnUiThread(new Runnable() {
public void run() {
try {
URI uri = new URI(fileToOpen);
if (uri.isAbsolute() && uri.getScheme().equalsIgnoreCase("market")) {
context.startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse(fileToOpen)));
callback.success();
} else {
new DownloadAndOpenTask().execute(uri);
}
} catch (URISyntaxException e) {
callback.error("Error while opening file \"" + fileToOpen + "\".");
}
}
});
return true;
} else {
return false;
}
}
private class DownloadAndOpenTask extends AsyncTask<URI, Integer, Boolean> {
// declare the dialog as a member field of your activity
private ProgressDialog mProgressDialog;
private File targetFile;
private boolean canceled = false;
@Override
protected void onPreExecute() {
super.onPreExecute();
// instantiate it within the onCreate method
mProgressDialog = new ProgressDialog(context);
mProgressDialog.setMessage(progressTitle);
mProgressDialog.setIndeterminate(true);
mProgressDialog.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL);
mProgressDialog.setCancelable(true);
mProgressDialog.setOnCancelListener(new DialogInterface.OnCancelListener() {
@Override
public void onCancel(DialogInterface dialog) {
canceled = true;
}
});
mProgressDialog.show();
}
@Override
protected Boolean doInBackground(URI... fileUris) {
InputStream input = null;
OutputStream output = null;
HttpURLConnection httpConnection = null;
URI uri = fileUris[0];
String targetFileName;
int fileLength = 0;
int sep = uri.toString().lastIndexOf("/");
if (sep > 0) {
targetFileName = uri.toString().substring(sep + 1, uri.toString().length());
} else {
targetFileName = uri.toString();
}
try {
if (!uri.isAbsolute()){
// local file in assets folder
AssetManager am = context.getAssets();
input = am.open("www/" + uri.toString());
} else if (uri.getScheme().equalsIgnoreCase("file")) {
// local file in phone storage
URLConnection urlConnection = uri.toURL().openConnection();
fileLength = urlConnection.getContentLength();
input = urlConnection.getInputStream();
} else {
// Remote file
URL url = uri.toURL();
httpConnection = (HttpURLConnection) url.openConnection();
httpConnection.connect();
// expect HTTP 200 OK, so we don't mistakenly save error report
// instead of the file
if (httpConnection.getResponseCode() != HttpURLConnection.HTTP_OK) {
callback.error("Server returned HTTP " + httpConnection.getResponseCode()
+ " " + httpConnection.getResponseMessage());
return false;
}
// this will be useful to display download percentage
// might be -1: server did not report the length
fileLength = httpConnection.getContentLength();
input = httpConnection.getInputStream();
}
if (input == null) {
callback.error("Could not open file '" + uri.toString() + "'");
return false;
}
// download the file and save it in externalcachedir
// so other apps can acces the file
targetFile = new File(context.getExternalCacheDir(), targetFileName);
output = new FileOutputStream(targetFile);
byte data[] = new byte[4096];
long total = 0;
int count;
while ((count = input.read(data)) != -1) {
// allow canceling with back button
if (canceled) {
return false;
}
total += count;
// publishing the progress....
if (fileLength > 0) // only if total length is known
publishProgress((int) (total * 100 / fileLength));
output.write(data, 0, count);
}
} catch (FileNotFoundException e) {
callback.error("File \"" + uri.toString() + "\" does not exists, could not be opened.");
return false;
} catch (Exception e) {
callback.error(e.toString());
return false;
} finally {
try {
if (output != null)
output.close();
if (input != null)
input.close();
} catch (IOException ignored) {
}
if (httpConnection != null)
httpConnection.disconnect();
}
return true;
}
@Override
protected void onProgressUpdate(Integer... progress) {
super.onProgressUpdate(progress);
mProgressDialog.setIndeterminate(false);
mProgressDialog.setMax(100);
mProgressDialog.setProgress(progress[0]);
}
@Override
protected void onPostExecute(Boolean result) {
if (mProgressDialog != null) {
mProgressDialog.dismiss();
}
if (result) {
openFile(targetFile.getAbsolutePath());
}
}
private void openFile(String sUrl) {
File file = new File(sUrl);
Uri uri = Uri.fromFile(file);
String mimeType = URLConnection.guessContentTypeFromName(sUrl);
String guessedFileName = URLUtil.guessFileName(sUrl, null, null);
Intent intent = new Intent(Intent.ACTION_VIEW);
if (guessedFileName.contains(".gif")) {
// GIF file
mimeType = "image/gif";
} else if (guessedFileName.contains(".jpg") || guessedFileName.contains(".jpeg")) {
// JPG file
mimeType = "image/jpeg";
} else if (guessedFileName.contains(".png")) {
// PNG file
mimeType = "image/png";
} else if (guessedFileName.contains(".txt")) {
// Text file
mimeType = "text/plain";
} else if (
guessedFileName.contains(".mpg") ||
guessedFileName.contains(".mpeg") ||
guessedFileName.contains(".mpe") ||
guessedFileName.contains(".mp4") ||
guessedFileName.contains(".avi") ||
guessedFileName.contains(".3gp") ||
guessedFileName.contains(".3gpp") ||
guessedFileName.contains(".3g2")
) {
// Video files
mimeType = "video/*";
} else if (guessedFileName.contains(".doc") || guessedFileName.contains(".docx")) {
// Word document
mimeType = "application/msword";
} else if (guessedFileName.contains(".pdf")) {
// PDF file
mimeType = "application/pdf";
} else if (guessedFileName.contains(".ppt") || guessedFileName.contains(".pptx")) {
// Powerpoint file
mimeType = "application/vnd.ms-powerpoint";
} else if (guessedFileName.contains(".xls") || guessedFileName.contains(".xlsx")) {
// Excel file
mimeType = "application/vnd.ms-excel";
} else if (guessedFileName.contains(".rtf")) {
// RTF file
mimeType = "application/rtf";
} else if (guessedFileName.contains(".apk")) {
mimeType = "application/vnd.android.package-archive";
} else if (guessedFileName.contains(".eml")) {
mimeType = "message/rfc822";
}
intent.setDataAndType(uri, mimeType);
try {
context.startActivity(intent);
callback.success();
} catch (Exception e) {
callback.error("There is no corresponding application installed for opening \"" + mimeType + "\" files.");
}
}
}
}
|
|
package ru.mephi.interpreter;
import org.antlr.v4.runtime.tree.ParseTree;
import ru.mephi.interpreter.generated.LangBaseVisitor;
import ru.mephi.interpreter.generated.LangParser;
import ru.mephi.interpreter.robot.Robot;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.List;
/**
* @author Anton_Chkadua
*/
public class TreeVisitor
extends LangBaseVisitor<String> {
private Scope currentScope = Scope.GLOBAL;
private Robot robot = Robot.getInstance();
@Override
public String visitMain(LangParser.MainContext ctx) {
visitChildren(ctx);
try {
visit(getFunction("main", new ArrayList<>()));
} catch (RuntimeLangException e) {
System.out.println(e.getType());
}
return null;
}
@Override
public String visitMoveLeft(LangParser.MoveLeftContext ctx) {
return String.valueOf(robot.left());
}
@Override
public String visitMoveRight(LangParser.MoveRightContext ctx) {
return String.valueOf(robot.right());
}
@Override
public String visitMoveTop(LangParser.MoveTopContext ctx) {
return String.valueOf(robot.top());
}
@Override
public String visitMoveBottom(LangParser.MoveBottomContext ctx) {
return String.valueOf(robot.bottom());
}
@Override
public String visitCreatePortal(LangParser.CreatePortalContext ctx) {
robot.createTeleport();
return null;
}
@Override
public String visitTeleport(LangParser.TeleportContext ctx) {
robot.teleport();
return null;
}
@Override
public String visitCanMoveBottom(LangParser.CanMoveBottomContext ctx) {
if (robot.canMoveBottom()) {
return "1";
}
return "0";
}
@Override
public String visitCanMoveLeft(LangParser.CanMoveLeftContext ctx) {
if (robot.canMoveLeft()) {
return "1";
}
return "0";
}
@Override
public String visitCanMoveRight(LangParser.CanMoveRightContext ctx) {
if (robot.canMoveRight()) {
return "1";
}
return "0";
}
@Override
public String visitCanMoveTop(LangParser.CanMoveTopContext ctx) {
if (robot.canMoveTop()) {
return "1";
}
return "0";
}
@Override
public String visitVisitedBottom(LangParser.VisitedBottomContext ctx) {
if (robot.visitedBottom()) {
return "1";
}
return "0";
}
@Override
public String visitVisitedLeft(LangParser.VisitedLeftContext ctx) {
if (robot.visitedLeft()) {
return "1";
}
return "0";
}
@Override
public String visitVisitedRight(LangParser.VisitedRightContext ctx) {
if (robot.visitedRight()) {
return "1";
}
return "0";
}
@Override
public String visitVisitedTop(LangParser.VisitedTopContext ctx) {
if (robot.visitedTop()) {
return "1";
}
return "0";
}
@Override
public String visitIsAtExit(LangParser.IsAtExitContext ctx) {
if (robot.checkIfAtExit()) {
return "1";
}
return "0";
}
@Override
public String visitNotAtExit(LangParser.NotAtExitContext ctx) {
if (robot.checkIfAtExit()) {
return "0";
}
return "1";
}
@Override
public String visitAddOp(LangParser.AddOpContext ctx) {
try {
if (ctx.op.getText().equals("+")) {
return toS(toI(visit(ctx.getChild(0))).add(toI(visit(ctx.getChild(2)))));
} else if (ctx.op.getText().equals("-")) {
return toS(toI(visit(ctx.getChild(0))).subtract(toI(visit(ctx.getChild(2)))));
}
} catch (RuntimeLangException e) {
System.out.println(e.getType());
}
return null;
}
@Override
public String visitMultiOp(LangParser.MultiOpContext ctx) {
try {
if (ctx.op.getText().equals("*")) {
return toS(toI(visit(ctx.getChild(0))).multiply(toI(visit(ctx.getChild(2)))));
} else if (ctx.op.getText().equals("/")) {
return toS(toI(visit(ctx.getChild(0))).divide(toI(visit(ctx.getChild(2)))));
} else if (ctx.op.getText().equals("%")) {
return toS(toI(visit(ctx.getChild(0))).remainder(toI(visit(ctx.getChild(2)))));
}
} catch (RuntimeLangException e) {
System.out.println(e.getType());
}
return null;
}
@Override
public String visitLength(LangParser.LengthContext ctx) {
try {
return toS(getVariable(ctx.getChild(1).getText()).getLength());
} catch (RuntimeLangException e) {
System.out.println(e.getType());
}
return "1";
}
@Override
public String visitComparing(LangParser.ComparingContext ctx) {
try {
if (ctx.op.getText().equals("==")) {
return toS(toI(visit(ctx.getChild(0))).compareTo(toI(visit(ctx.getChild(2)))) == 0 ? BigInteger.ONE :
BigInteger.ZERO);
} else if (ctx.op.getText().equals("!=")) {
return toS(toI(visit(ctx.getChild(0))).compareTo(toI(visit(ctx.getChild(2)))) != 0 ? BigInteger.ONE :
BigInteger.ZERO);
} else if (ctx.op.getText().equals("<=")) {
return toS(toI(visit(ctx.getChild(0))).compareTo(toI(visit(ctx.getChild(2)))) <= 0 ? BigInteger.ONE :
BigInteger.ZERO);
} else if (ctx.op.getText().equals(">=")) {
return toS(toI(visit(ctx.getChild(0))).compareTo(toI(visit(ctx.getChild(2)))) >= 0 ? BigInteger.ONE :
BigInteger.ZERO);
}
} catch (RuntimeLangException e) {
System.out.println(e.getType());
}
return null;
}
@Override
public String visitExistingVariable(LangParser.ExistingVariableContext ctx) {
try {
Variable variable = getVariable(visit(ctx.getChild(0)));
if (ctx.getChild(0).getChild(0).getText().contains("[")) {
String[] buf = ctx.getChild(0).getChild(0).getText().split("\\[");
List<BigInteger> indexes = new ArrayList<>();
for (int i = 1; i < buf.length; i++) {
indexes.add(toI(buf[i].replace("]", "")));
}
for (BigInteger index : indexes) {
variable = variable.getElement(index.intValue());
}
variable.setValue(toI(visit(ctx.getChild(2))));
} else if (variable instanceof SimpleVariable) {
variable.setValue(extractResult(variable.getType(), visit(ctx.getChild(2))));
} else if (variable instanceof Pointer) {
if (ctx.getChild(0).getChild(0).getText().contains("*")) {
currentScope.setValueByAddress((Pointer) variable, extractResult(variable.getType(), visit(ctx.getChild(2))));
} else if (ctx.getChild(0).getChild(0).getText().contains("&")) {
variable.setAddress(toI(visit(ctx.getChild(2))));
}
}
} catch (RuntimeLangException e) {
System.out.println(e.getType());
}
return null;
}
@Override
public String visitJustDeclaredPointer(LangParser.JustDeclaredPointerContext ctx) {
Class type = getVariableClass(ctx.getChild(0).getChild(ctx.getChild(0).getChildCount() - 2).getText());
boolean constAddress = ctx.getChild(0).getChild(0).getText().toLowerCase().equals("const");
boolean constValue =
ctx.getChild(0).getChild(1 + (constAddress ? 1 : 0)).getText().toLowerCase().equals("const");
int childCount = ctx.getChild(0).getChildCount() - 1;
try {
String result = visit(ctx.getChild(2));
BigInteger value = extractResult(type, result);
currentScope.add(new Pointer(ctx.getChild(0).getChild(childCount).getText(), type, constValue, value,
constAddress));
} catch (RuntimeLangException e) {
System.out.println(e.getType());
}
return null;
}
@Override
public String visitJustDeclaredVariable(LangParser.JustDeclaredVariableContext ctx) {
boolean isConstant = ctx.getChild(0).getChild(0).getText().toLowerCase().equals("const");
Class type = getVariableClass(ctx.getChild(0).getChild(ctx.getChild(0).getChildCount() - 2).getText());
try {
String result = visit(ctx.getChild(2));
BigInteger value = extractResult(type, result);
currentScope.add(new SimpleVariable(ctx.getChild(0).getChild(ctx.getChild(0).getChildCount() - 1).getText(),
type, value, isConstant));
} catch (RuntimeLangException e) {
System.out.println(e.getType());
}
return null;
}
@Override
public String visitJustDeclaredArray(LangParser.JustDeclaredArrayContext ctx) {
boolean constSize = ctx.getChild(0).getChild(0).getText().toLowerCase().equals("const");
Class type = getVariableClass(ctx.getChild(0).getChild(ctx.getChild(0).getChildCount() - 2).getText());
try {
int size = toI(visit(ctx.getChild(2))).intValue();
currentScope.add(new Array(ctx.getChild(0).getChild(ctx.getChild(0).getChildCount() - 1).getText(), type,
size, constSize));
} catch (RuntimeLangException e) {
System.out.println(e.getType());
}
return null;
}
@Override
public String visitNamedVariable(LangParser.NamedVariableContext ctx) {
return ctx.getChild(0).getText();
}
@Override
public String visitArrayElementVariable(LangParser.ArrayElementVariableContext ctx) {
return ctx.getChild(0).getChild(0).getText();
}
@Override
public String visitPointerValueVariable(LangParser.PointerValueVariableContext ctx) {
return ctx.getChild(0).getChild(1).getText();
}
@Override
public String visitPointerAddressVariable(LangParser.PointerAddressVariableContext ctx) {
return ctx.getChild(0).getChild(1).getText();
}
@Override
public String visitConstValue(LangParser.ConstValueContext ctx) {
return ctx.getText();
}
@Override
public String visitArrayElementValue(LangParser.ArrayElementValueContext ctx) {
try {
Variable variable = getVariable(ctx.getChild(0).getChild(0).getText())
.getElement(toI(visit(ctx.getChild(0).getChild(1))).intValue());
if (variable.getValue() != null) {
return toS(variable.getValue());
} else {
System.out.println(RuntimeLangException.Type.INVALID_LENGTH);
}
} catch (RuntimeLangException e) {
System.out.println(e.getType());
}
return "0";
}
@Override
public String visitNamedVariableValue(LangParser.NamedVariableValueContext ctx) {
try {
return toS(getVariable(ctx.getChild(0).getText()).getValue());
} catch (RuntimeLangException e) {
System.out.println(e.getType());
}
return "0";
}
@Override
public String visitPointerValueValue(LangParser.PointerValueValueContext ctx) {
try {
Variable variable = getVariable(ctx.getChild(0).getChild(1).getText());
if (!(variable instanceof Pointer)) {
System.out.println(RuntimeLangException.Type.NO_SUCH_VARIABLE);
} else {
return toS(currentScope.getByAddress((Pointer) variable).getValue());
}
} catch (RuntimeLangException e) {
System.out.println(e.getType());
}
return "0";
}
@Override
public String visitVariableAddressValue(LangParser.VariableAddressValueContext ctx) {
try {
Variable variable = getVariable(ctx.getChild(0).getChild(1).getText());
return toS(currentScope.getVariableAddress(variable));
} catch (RuntimeLangException e) {
System.out.println(e.getType());
}
return "0";
}
@Override
public String visitVariableDeclaration(LangParser.VariableDeclarationContext ctx) {
boolean isConstant = ctx.getChild(0).getChild(0).getText().toLowerCase().equals("const");
Class type = getVariableClass(ctx.getChild(0).getChild(ctx.getChild(0).getChildCount() - 2).getText());
try {
currentScope.add(new SimpleVariable(ctx.getChild(0).getChild(ctx.getChild(0).getChildCount() - 1).getText(),
type, null, isConstant));
} catch (RuntimeLangException e) {
System.out.println(e.getType());
}
return null;
}
@Override
public String visitPointerDeclaration(LangParser.PointerDeclarationContext ctx) {
Class type = getVariableClass(ctx.getChild(0).getChild(ctx.getChild(0).getChildCount() - 2).getText());
boolean constAddress = ctx.getChild(0).getChild(0).getText().toLowerCase().equals("const");
boolean constValue =
ctx.getChild(0).getChild(1 + (constAddress ? 1 : 0)).getText().toLowerCase().equals("const");
int childCount = ctx.getChild(0).getChildCount() - 1;
try {
currentScope.add(new Pointer(ctx.getChild(0).getChild(childCount).getText(), type, constValue, null,
constAddress));
} catch (RuntimeLangException e) {
System.out.println(e.getType());
}
return null;
}
@Override
public String visitArrayDeclaration(LangParser.ArrayDeclarationContext ctx) {
boolean constSize = ctx.getChild(0).getChild(0).getText().toLowerCase().equals("const");
Class type = getVariableClass(ctx.getChild(0).getChild(ctx.getChild(0).getChildCount() - 2).getText());
try {
currentScope
.add(new Array(ctx.getChild(0).getChild(ctx.getChild(0).getChildCount() - 1).getText(), type, null,
constSize));
} catch (RuntimeLangException e) {
System.out.println(e.getType());
}
return null;
}
@Override
public String visitIndex(LangParser.IndexContext ctx) {
return visit(ctx.getChild(1));
}
@Override
public String visitForEachCycle(LangParser.ForEachCycleContext ctx) {
Variable variable = null;
try {
variable = getVariable(ctx.getChild(0).getChild(1).getText());
} catch (RuntimeLangException e) {
System.out.println(e.getType());
}
List<Variable> args = new ArrayList<>();
List<Class> types = new ArrayList<>();
try {
BigInteger length;
if (variable instanceof Array) {
types.add(variable.getElement(0).type);
length = variable.getLength();
} else if (variable != null) {
types.add(variable.type);
length = variable.getLength();
} else {
throw new RuntimeLangException(RuntimeLangException.Type.NO_SUCH_VARIABLE);
}
for (int j = 2; j < ctx.getChild(0).getChild(2).getChildCount(); j += 2) {
if (!ctx.getChild(0).getChild(2).getChild(j).getText().equals(")")) {
types.add(getVariable(ctx.getChild(0).getChild(2).getChild(j).getText()).type);
}
}
ParseTree functionTree = getFunction(ctx.getChild(0).getChild(2).getChild(0).getText(), types);
Function func = currentScope.getFunction(ctx.getChild(0).getChild(2).getChild(0).getText(), types);
for (int i = 0; i < length.intValue(); i++) {
currentScope = new Scope(currentScope);
args.clear();
if (variable instanceof Array) {
args.add(variable.getElement(i));
} else {
args.add(variable);
}
for (int j = 2; j < ctx.getChild(0).getChild(2).getChildCount(); j += 2) {
if (!ctx.getChild(0).getChild(2).getChild(j).getText().equals(")")) {
args.add(getVariable(ctx.getChild(0).getChild(2).getChild(j).getText()));
}
}
for (int j = 0; j < args.size(); j++) {
if (args.get(j) instanceof SimpleVariable) {
currentScope.add(new SimpleVariable(func.args.get(j).name, func.args.get(j).type,
args.get(j).getValue(), false));
} else {
currentScope.add(new Pointer(func.args.get(j).name, func.args.get(j).type, false,
args.get(j).getValue(), false));
}
}
visit(functionTree);
currentScope = currentScope.getParent();
}
} catch (RuntimeLangException e) {
System.out.println(e.getType());
currentScope = currentScope.getParent();
}
return null;
}
@Override
public String visitFunctionCall(LangParser.FunctionCallContext ctx) {
String name = ctx.getChild(0).getChild(0).getText();
List<Variable> args = new ArrayList<>();
List<Class> types = new ArrayList<>();
String result = "0";
ParseTree functionTree;
Function func;
try {
for (int i = 2; i < ctx.getChild(0).getChildCount(); i += 2) {
if (!ctx.getChild(0).getChild(i).getText().equals(")")) {
Variable variable = getVariable(ctx.getChild(0).getChild(i).getText());
args.add(variable);
types.add(variable.getType());
}
}
functionTree = getFunction(name, types);
func = currentScope.getFunction(name, types);
currentScope = new Scope(currentScope);
for (int i = 0; i < args.size(); i++) {
if (args.get(i) instanceof SimpleVariable) {
currentScope.add(new SimpleVariable(func.args.get(i).name, func.args.get(i).type,
args.get(i).getValue(), false));
} else {
currentScope.add(new Pointer(func.args.get(i).name, func.args.get(i).type, false,
args.get(i).getValue(), false));
}
}
result = visit(functionTree);
} catch (RuntimeLangException e) {
System.out.println(e.getType());
}
currentScope = currentScope.getParent();
return result;
}
@Override
public String visitCall(LangParser.CallContext ctx) {
String name = ctx.getChild(0).getChild(0).getText();
List<Variable> args = new ArrayList<>();
List<Class> types = new ArrayList<>();
String result = "0";
ParseTree functionTree;
Function func;
try {
for (int i = 2; i < ctx.getChild(0).getChildCount(); i += 2) {
if (!ctx.getChild(0).getChild(i).getText().equals(")")) {
Variable variable = getVariable(ctx.getChild(0).getChild(i).getText());
args.add(variable);
types.add(variable.getType());
}
}
functionTree = getFunction(name, types);
func = currentScope.getFunction(name, types);
currentScope = new Scope(currentScope);
for (int i = 0; i < args.size(); i++) {
if (args.get(i) instanceof SimpleVariable) {
currentScope.add(new SimpleVariable(func.args.get(i).name, func.args.get(i).type,
args.get(i).getValue(), false));
} else {
currentScope.add(new Pointer(func.args.get(i).name, func.args.get(i).type, false,
args.get(i).getValue(), false));
}
}
result = func.returnType + ":" + visit(functionTree);
} catch (RuntimeLangException e) {
System.out.println(e.getType());
}
currentScope = currentScope.getParent();
return result;
}
@Override
public String visitFuncImpl(LangParser.FuncImplContext ctx) {
List<Argument> args = new ArrayList<>();
for (int i = 3; i < ctx.getChild(0).getChildCount() - 1; i += 2) {
args.add(new Argument(ctx.getChild(0).getChild(i).getChild(1).getText(),
getVariableClass(ctx.getChild(0).getChild(i).getChild(0).getText())));
}
try {
currentScope.addFunction(
new Function(ctx.getChild(0).getChild(1).getText(),
getVariableClass(ctx.getChild(0).getChild(0).getText()), args),
ctx.getChild(1));
} catch (RuntimeLangException e) {
System.out.println(e.getType());
}
return null;
}
@Override
public String visitBody(LangParser.BodyContext ctx) {
String result = null;
for (int i = 0; i < ctx.getChildCount(); i++) {
if (ctx.getChild(i).getChild(0) != null && ctx.getChild(i).getChild(0).getText().toLowerCase().equals(
"break")) {
return "break";
} else if (ctx.getChild(i).getText().toLowerCase().startsWith("return")) {
result = visit(ctx.getChild(i));
} else {
visit(ctx.getChild(i));
}
}
return result;
}
@Override
public String visitWhileCycle(LangParser.WhileCycleContext ctx) {
while (visit(ctx.getChild(0).getChild(0).getChild(2)).equals("1")) {
currentScope = new Scope(currentScope);
String visitResult = visit(ctx.getChild(0).getChild(1));
if (visitResult != null && visitResult.equals("break")) {
currentScope = currentScope.getParent();
return null;
}
currentScope = currentScope.getParent();
}
visit(ctx.getChild(0).getChild(3));
return null;
}
@Override
public String visitIfZero(LangParser.IfZeroContext ctx) {
currentScope = new Scope(currentScope);
if (visit(ctx.getChild(0).getChild(0).getChild(2)).equals("0")) {
visit(ctx.getChild(0).getChild(1));
}
currentScope = currentScope.getParent();
return null;
}
@Override
public String visitIfNotZero(LangParser.IfNotZeroContext ctx) {
currentScope = new Scope(currentScope);
if (!visit(ctx.getChild(0).getChild(0).getChild(2)).equals("0")) {
visit(ctx.getChild(0).getChild(1));
}
currentScope = currentScope.getParent();
return null;
}
@Override
public String visitBreaking(LangParser.BreakingContext ctx) {
return "break";
}
@Override
public String visitReturning(LangParser.ReturningContext ctx) {
return visit(ctx.getChild(0).getChild(1));
}
@Override
public String visitPrint(LangParser.PrintContext ctx) {
System.out.println(visit(ctx.getChild(1)));
return null;
}
@Override
public String visitBodyPart(LangParser.BodyPartContext ctx) {
currentScope = new Scope(currentScope);
for (int i = 1; i < ctx.getChild(0).getChildCount() - 1; i++) {
visit(ctx.getChild(0).getChild(i));
}
currentScope = currentScope.getParent();
return null;
}
private Variable getVariable(String name) throws RuntimeLangException {
return currentScope.get(name);
}
private Class getVariableClass(String string) {
switch (string) {
case "int":
case "Integer":
return Integer.class;
case "long":
case "Long":
return Long.class;
case "byte":
case "Byte":
return Byte.class;
}
System.out.println("No such type: " + string);
return Integer.class;
}
private ParseTree getFunction(String name, List<Class> types) throws RuntimeLangException {
return currentScope.getFunctionTree(name, types);
}
private BigInteger toI(String s) throws RuntimeLangException {
return new BigInteger(s);
}
private String toS(BigInteger i) {
return i.toString();
}
private BigInteger extractResult(Class type, String result) throws RuntimeLangException {
if (result.contains(":")) {
Class resultType = getVariableClass(result.substring(result.lastIndexOf('.') + 1, result.indexOf(':')));
if (!resultType.equals(type)) throw new RuntimeLangException(
RuntimeLangException.Type.ILLEGAL_MODIFICATION);
return toI(result.substring(result.indexOf(":") + 1));
} else {
return toI(result);
}
}
}
|
|
/* Copyright (c) 2013-2014 Boundless and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Distribution License v1.0
* which accompanies this distribution, and is available at
* https://www.eclipse.org/org/documents/edl-v10.html
*
* Contributors:
* Gabriel Roldan (Boundless) - initial implementation
*/
package org.locationtech.geogig.geotools.cli;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import org.eclipse.jdt.annotation.Nullable;
import org.geotools.data.DataStore;
import org.geotools.data.simple.SimpleFeatureSource;
import org.geotools.data.simple.SimpleFeatureStore;
import org.geotools.feature.NameImpl;
import org.geotools.feature.simple.SimpleFeatureTypeImpl;
import org.locationtech.geogig.api.GeoGIG;
import org.locationtech.geogig.api.NodeRef;
import org.locationtech.geogig.api.ObjectId;
import org.locationtech.geogig.api.RevFeatureType;
import org.locationtech.geogig.api.RevObject;
import org.locationtech.geogig.api.RevObject.TYPE;
import org.locationtech.geogig.api.RevTree;
import org.locationtech.geogig.api.plumbing.FindTreeChild;
import org.locationtech.geogig.api.plumbing.ResolveObjectType;
import org.locationtech.geogig.api.plumbing.ResolveTreeish;
import org.locationtech.geogig.api.plumbing.RevObjectParse;
import org.locationtech.geogig.api.plumbing.RevParse;
import org.locationtech.geogig.cli.AbstractCommand;
import org.locationtech.geogig.cli.CLICommand;
import org.locationtech.geogig.cli.CommandFailedException;
import org.locationtech.geogig.cli.GeogigCLI;
import org.locationtech.geogig.cli.InvalidParameterException;
import org.locationtech.geogig.cli.annotation.ReadOnly;
import org.locationtech.geogig.geotools.plumbing.ExportOp;
import org.locationtech.geogig.geotools.plumbing.GeoToolsOpException;
import org.opengis.feature.simple.SimpleFeatureType;
import org.opengis.filter.Filter;
import com.beust.jcommander.Parameter;
import com.google.common.base.Optional;
/**
* Exports features from a geogig feature type into a {@link DataStore} given by the concrete
* subclass.
*
* @see ExportOp
*/
@ReadOnly
public abstract class DataStoreExport extends AbstractCommand implements CLICommand {
@Parameter(description = "<path> <table>", arity = 2)
public List<String> args;
@Parameter(names = { "--overwrite", "-o" }, description = "Overwrite output table")
public boolean overwrite;
@Parameter(names = { "--defaulttype" }, description = "Export only features with the tree default feature type if several types are found")
public boolean defaultType;
@Parameter(names = { "--alter" }, description = "Export all features if several types are found, altering them to adapt to the output feature type")
public boolean alter;
@Parameter(names = { "--featuretype" }, description = "Export only features with the specified feature type if several types are found")
@Nullable
public String sFeatureTypeId;
protected abstract DataStore getDataStore();
/**
* Executes the export command using the provided options.
*/
@Override
protected final void runInternal(GeogigCLI cli) throws IOException {
if (args.isEmpty()) {
printUsage(cli);
throw new CommandFailedException();
}
String path = args.get(0);
String tableName = args.get(1);
checkParameter(tableName != null && !tableName.isEmpty(), "No table name specified");
DataStore dataStore = getDataStore();
try {
exportInternal(cli, path, tableName, dataStore);
} finally {
dataStore.dispose();
}
}
private void exportInternal(GeogigCLI cli, String path, String tableName, DataStore dataStore)
throws IOException {
ObjectId featureTypeId = null;
if (!Arrays.asList(dataStore.getTypeNames()).contains(tableName)) {
SimpleFeatureType outputFeatureType;
if (sFeatureTypeId != null) {
// Check the feature type id string is a correct id
Optional<ObjectId> id = cli.getGeogig().command(RevParse.class)
.setRefSpec(sFeatureTypeId).call();
checkParameter(id.isPresent(), "Invalid feature type reference", sFeatureTypeId);
TYPE type = cli.getGeogig().command(ResolveObjectType.class).setObjectId(id.get())
.call();
checkParameter(type.equals(TYPE.FEATURETYPE),
"Provided reference does not resolve to a feature type: ", sFeatureTypeId);
outputFeatureType = (SimpleFeatureType) cli.getGeogig()
.command(RevObjectParse.class).setObjectId(id.get())
.call(RevFeatureType.class).get().type();
featureTypeId = id.get();
} else {
try {
SimpleFeatureType sft = getFeatureType(path, cli);
outputFeatureType = new SimpleFeatureTypeImpl(new NameImpl(tableName),
sft.getAttributeDescriptors(), sft.getGeometryDescriptor(),
sft.isAbstract(), sft.getRestrictions(), sft.getSuper(),
sft.getDescription());
} catch (GeoToolsOpException e) {
throw new CommandFailedException("No features to export.", e);
}
}
try {
dataStore.createSchema(outputFeatureType);
} catch (IOException e) {
throw new CommandFailedException("Cannot create new table in database", e);
}
} else {
if (!overwrite) {
throw new CommandFailedException(
"The selected table already exists. Use -o to overwrite");
}
}
SimpleFeatureSource featureSource = dataStore.getFeatureSource(tableName);
if (!(featureSource instanceof SimpleFeatureStore)) {
throw new CommandFailedException("Can't write to the selected table");
}
SimpleFeatureStore featureStore = (SimpleFeatureStore) featureSource;
if (overwrite) {
try {
featureStore.removeFeatures(Filter.INCLUDE);
} catch (IOException e) {
throw new CommandFailedException("Error truncating table: " + e.getMessage(), e);
}
}
ExportOp op = cli.getGeogig().command(ExportOp.class).setFeatureStore(featureStore)
.setPath(path).setFilterFeatureTypeId(featureTypeId).setAlter(alter);
if (defaultType) {
op.exportDefaultFeatureType();
}
try {
op.setProgressListener(cli.getProgressListener()).call();
} catch (IllegalArgumentException iae) {
throw new org.locationtech.geogig.cli.InvalidParameterException(iae.getMessage(), iae);
} catch (GeoToolsOpException e) {
switch (e.statusCode) {
case MIXED_FEATURE_TYPES:
throw new CommandFailedException(
"The selected tree contains mixed feature types. Use --defaulttype or --featuretype <feature_type_ref> to export.",
e);
default:
throw new CommandFailedException("Could not export. Error:" + e.statusCode.name(),
e);
}
}
cli.getConsole().println(path + " exported successfully to " + tableName);
}
private SimpleFeatureType getFeatureType(String path, GeogigCLI cli) {
checkParameter(path != null, "No path specified.");
String refspec;
if (path.contains(":")) {
refspec = path;
} else {
refspec = "WORK_HEAD:" + path;
}
checkParameter(!refspec.endsWith(":"), "No path specified.");
final GeoGIG geogig = cli.getGeogig();
Optional<ObjectId> rootTreeId = geogig.command(ResolveTreeish.class)
.setTreeish(refspec.split(":")[0]).call();
checkParameter(rootTreeId.isPresent(), "Couldn't resolve '" + refspec
+ "' to a treeish object");
RevTree rootTree = geogig.getRepository().getTree(rootTreeId.get());
Optional<NodeRef> featureTypeTree = geogig.command(FindTreeChild.class)
.setChildPath(refspec.split(":")[1]).setParent(rootTree).call();
checkParameter(featureTypeTree.isPresent(), "pathspec '" + refspec.split(":")[1]
+ "' did not match any valid path");
Optional<RevObject> revObject = cli.getGeogig().command(RevObjectParse.class)
.setObjectId(featureTypeTree.get().getMetadataId()).call();
if (revObject.isPresent() && revObject.get() instanceof RevFeatureType) {
RevFeatureType revFeatureType = (RevFeatureType) revObject.get();
if (revFeatureType.type() instanceof SimpleFeatureType) {
return (SimpleFeatureType) revFeatureType.type();
} else {
throw new InvalidParameterException(
"Cannot find feature type for the specified path");
}
} else {
throw new InvalidParameterException("Cannot find feature type for the specified path");
}
}
}
|
|
/**
* Copyright 2011-2019 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.vanilla.core.engine;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Deque;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.asakusafw.dag.api.model.PortId;
import com.asakusafw.dag.api.model.basic.BasicEdgeDescriptor.Movement;
import com.asakusafw.dag.api.processor.EdgeIoProcessorContext;
import com.asakusafw.dag.api.processor.EdgeReader;
import com.asakusafw.dag.api.processor.EdgeWriter;
import com.asakusafw.dag.api.processor.ProcessorContext;
import com.asakusafw.dag.api.processor.TaskInfo;
import com.asakusafw.dag.api.processor.TaskProcessor;
import com.asakusafw.dag.api.processor.TaskProcessorContext;
import com.asakusafw.dag.api.processor.TaskSchedule;
import com.asakusafw.dag.api.processor.VertexProcessor;
import com.asakusafw.dag.api.processor.VertexProcessorContext;
import com.asakusafw.dag.api.processor.basic.ForwardEdgeIoProcessorContext;
import com.asakusafw.dag.api.processor.basic.ForwardProcessorContext;
import com.asakusafw.dag.api.processor.extension.ProcessorContextDecorator;
import com.asakusafw.lang.utils.common.Arguments;
import com.asakusafw.lang.utils.common.InterruptibleIo;
import com.asakusafw.lang.utils.common.InterruptibleIo.Closer;
import com.asakusafw.lang.utils.common.Invariants;
import com.asakusafw.lang.utils.common.Lang;
import com.asakusafw.lang.utils.common.Optionals;
import com.asakusafw.vanilla.core.mirror.PortMirror;
import com.asakusafw.vanilla.core.mirror.VertexMirror;
/**
* Executes vertices.
* @since 0.4.0
*/
public class VertexExecutor implements InterruptibleIo.IoRunnable {
static final Logger LOG = LoggerFactory.getLogger(VertexExecutor.class);
private final EdgeIoContext context;
private final VertexMirror vertex;
private final ExecutorService executor;
private final int numberOfThreads;
private final ProcessorContextDecorator decorator;
/**
* Creates a new instance.
* @param context the root context
* @param vertex the target vertex
* @param edges the edge driver
* @param threads the task executor
* @param numberOfThreads the number of available {@code threads}
*/
public VertexExecutor(
ProcessorContext context,
VertexMirror vertex,
EdgeDriver edges,
ExecutorService threads,
int numberOfThreads) {
Arguments.requireNonNull(context);
Arguments.requireNonNull(vertex);
Arguments.requireNonNull(edges);
Arguments.requireNonNull(threads);
Arguments.require(numberOfThreads >= 1);
this.context = new EdgeIoContext(context, vertex, edges);
this.vertex = vertex;
this.executor = threads;
this.numberOfThreads = numberOfThreads;
this.decorator = context.getResource(ProcessorContextDecorator.class)
.orElse(ProcessorContextDecorator.NULL);
}
@Override
public void run() throws IOException, InterruptedException {
long start = System.currentTimeMillis();
String label = "N/A"; //$NON-NLS-1$
try (VertexProcessor processor = vertex.newProcessor(context.getClassLoader())) {
label = processor.toString();
List<TaskProcessorContext> tasks = doInitialize(processor);
doRun(processor, tasks);
} catch (Exception e) {
LOG.error(MessageFormat.format(
"vertex execution failed: {1} ({0})",
vertex.getId().getName(),
label), e);
throw e;
}
doFinalize(label);
if (LOG.isInfoEnabled()) {
LOG.info(MessageFormat.format(
"finish vertex: {2} ({1}) in {0}ms",
System.currentTimeMillis() - start,
vertex.getId().getName(),
label));
}
}
private List<TaskProcessorContext> doInitialize(
VertexProcessor processor) throws IOException, InterruptedException {
VertexProcessorContext vContext = decorator.bless(new VertexContext(context, vertex));
if (LOG.isDebugEnabled()) {
LOG.debug("initialize vertex: processor={}, vertex={}",
processor,
vertex.getId().getName());
}
Optional<? extends TaskSchedule> schedule = processor.initialize(vContext);
// broadcast inputs are only available in VertexProcessor.initialize()
for (PortMirror port : vertex.getInputs()) {
if (port.getMovement() == Movement.BROADCAST) {
context.complete(port.getId());
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("schedule tasks: processor={}, vertex={}",
processor,
vertex.getId().getName());
}
List<TaskProcessorContext> results = new ArrayList<>();
if (schedule.isPresent()) {
List<? extends TaskInfo> tasks = schedule.get().getTasks();
int index = 0;
for (TaskInfo info : tasks) {
results.add(decorator.bless(new TaskContext(context, vertex, index++, tasks.size(), info)));
}
} else {
int taskCount = computeTaskCount(processor);
Invariants.require(taskCount >= 1);
for (int index = 0; index < taskCount; index++) {
results.add(decorator.bless(new TaskContext(context, vertex, index, taskCount, null)));
}
}
return results;
}
private void doRun(
VertexProcessor processor,
List<TaskProcessorContext> tasks) throws IOException, InterruptedException {
int concurrency = computeConcurrency(processor, tasks);
if (LOG.isDebugEnabled()) {
LOG.debug("submit tasks: processor={}, vertex={}, tasks={}, threads={}/{}",
processor,
vertex.getId().getName(),
tasks.size(),
concurrency,
numberOfThreads);
}
BlockingQueue<TaskProcessorContext> queue = new LinkedBlockingQueue<>(tasks);
runTasks(Lang.let(new ArrayList<>(), it -> Lang.repeat(concurrency, () -> {
it.add(new TaskExecutor(vertex, processor, queue));
})));
}
private void doFinalize(String label) throws InterruptedException, IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("finalize vertex: processor={}, vertex={}",
label,
vertex.getId().getName());
}
runTasks(Stream.concat(vertex.getInputs().stream(), vertex.getOutputs().stream())
.<InterruptibleIo.IoRunnable>map(it -> () -> context.complete(it.getId()))
.collect(Collectors.toList()));
}
private void runTasks(List<? extends InterruptibleIo.IoRunnable> tasks) throws InterruptedException, IOException {
Deque<Future<?>> futures = tasks.stream()
.map(it -> executor.submit(() -> {
it.run();
return null;
}))
.collect(Collectors.toCollection(ArrayDeque::new));
while (futures.isEmpty() == false) {
Future<?> first = futures.removeFirst();
try {
first.get(100, TimeUnit.MILLISECONDS);
} catch (TimeoutException e) {
if (LOG.isTraceEnabled()) {
LOG.trace("waiting for task completion: {}", first, e); //$NON-NLS-1$
}
futures.addLast(first);
} catch (ExecutionException e) {
Throwable t = e.getCause();
Lang.rethrow(t, Error.class);
Lang.rethrow(t, RuntimeException.class);
Lang.rethrow(t, IOException.class);
Lang.rethrow(t, InterruptedException.class);
throw new IOException(t);
}
}
}
private int computeTaskCount(VertexProcessor processor) {
if (vertex.getInputs().stream()
.map(PortMirror::getMovement)
.anyMatch(Predicate.isEqual(Movement.SCATTER_GATHER))) {
return context.getNumberOfPartitions();
}
int result = Math.max(numberOfThreads, 1);
if (processor.getMaxConcurrency() >= 1) {
result = Math.min(processor.getMaxConcurrency(), result);
}
return result;
}
private int computeConcurrency(VertexProcessor processor, List<TaskProcessorContext> tasks) {
int result = Math.min(tasks.size(), numberOfThreads);
if (processor.getMaxConcurrency() >= 1) {
result = Math.min(processor.getMaxConcurrency(), result);
}
return result;
}
private static class EdgeIoContext implements EdgeIoProcessorContext, ForwardProcessorContext {
private final ProcessorContext forward;
private final EdgeDriver driver;
private final Map<String, PortId> inputs;
private final Map<String, PortId> outputs;
EdgeIoContext(ProcessorContext forward, VertexMirror vertex, EdgeDriver driver) {
this.forward = forward;
this.driver = driver;
this.inputs = names(vertex.getInputs());
this.outputs = names(vertex.getOutputs());
}
private static Map<String, PortId> names(Collection<? extends PortMirror> ports) {
return ports.stream()
.map(PortMirror::getId)
.collect(Collectors.toMap(PortId::getName, Function.identity()));
}
@Override
public ProcessorContext getForward() {
return forward;
}
@Override
public EdgeReader getInput(String name) throws IOException, InterruptedException {
return getInput(name, 0, 1);
}
EdgeReader getInput(String name, int taskIndex, int taskCount) throws IOException, InterruptedException {
PortId id = Invariants.requireNonNull(inputs.get(name), name);
return driver.acquireInput(id, taskIndex, taskCount);
}
@Override
public EdgeWriter getOutput(String name) throws IOException, InterruptedException {
PortId id = Invariants.requireNonNull(outputs.get(name), name);
return driver.acquireOutput(id);
}
int getNumberOfPartitions() {
return driver.getNumberOfPartitions();
}
void complete(PortId id) throws IOException, InterruptedException {
driver.complete(id);
}
}
private static class VertexContext implements VertexProcessorContext, ForwardEdgeIoProcessorContext {
private final EdgeIoContext forward;
private final String id;
VertexContext(EdgeIoContext forward, VertexMirror vertex) {
this.forward = forward;
this.id = vertex.getId().getName();
}
@Override
public EdgeIoProcessorContext getForward() {
return forward;
}
@Override
public String getVertexId() {
return id;
}
}
private static class TaskContext implements TaskProcessorContext, ForwardEdgeIoProcessorContext {
private final EdgeIoContext forward;
private final String vertexId;
private final String taskId;
private final int taskIndex;
private final int taskCount;
private final TaskInfo info;
TaskContext(EdgeIoContext forward, VertexMirror vertex, int taskIndex, int taskCount, TaskInfo info) {
this.forward = forward;
this.vertexId = vertex.getId().getName();
this.taskId = String.format("%s-%d", vertexId, taskIndex);
this.taskIndex = taskIndex;
this.taskCount = taskCount;
this.info = info;
}
@Override
public EdgeIoProcessorContext getForward() {
return forward;
}
@Override
public EdgeReader getInput(String name) throws IOException, InterruptedException {
return forward.getInput(name, taskIndex, taskCount);
}
@Override
public String getVertexId() {
return vertexId;
}
@Override
public String getTaskId() {
return taskId;
}
@Override
public Optional<TaskInfo> getTaskInfo() {
return Optionals.of(info);
}
}
private static class TaskExecutor implements InterruptibleIo.IoRunnable {
private final VertexMirror vertex;
private final VertexProcessor processor;
private final BlockingQueue<? extends TaskProcessorContext> queue;
TaskExecutor(
VertexMirror vertex,
VertexProcessor processor,
BlockingQueue<? extends TaskProcessorContext> queue) {
Arguments.requireNonNull(vertex);
Arguments.requireNonNull(processor);
Arguments.requireNonNull(queue);
this.vertex = vertex;
this.processor = processor;
this.queue = queue;
}
@Override
public void run() throws IOException, InterruptedException {
try (Closer closer = new Closer()) {
TaskProcessor taskProcessor = null;
while (true) {
TaskProcessorContext next = queue.poll();
if (next == null) {
break;
}
if (taskProcessor == null) {
if (LOG.isTraceEnabled()) {
LOG.trace("initialize task processor on [{}]: {} ({})",
Thread.currentThread().getName(), processor, vertex.getId().getName());
}
taskProcessor = closer.add(processor.createTaskProcessor());
}
LOG.trace("start task: {} ({})", processor, next.getTaskId());
taskProcessor.run(next);
LOG.trace("finish task: {} ({})", processor, next.getTaskId());
}
if (taskProcessor != null) {
if (LOG.isTraceEnabled()) {
LOG.trace("finalize task processor on [{}]: {} ({})",
Thread.currentThread().getName(), processor, vertex.getId().getName());
}
}
}
}
}
}
|
|
package me.anchore.struct;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import java.util.stream.Stream;
/**
* @author anchore
* @date 2017/11/21
*/
public class AvlTree<V> {
NodeKey treeRoot = new NodeKey();
Comparator<V> c;
Map<NodeKey, Node> nodes = new HashMap<>();
public AvlTree(Comparator<V> c) {
this.c = c;
}
public static void main(String[] args) {
Random r = new Random();
AvlTree<Integer> avl = new AvlTree<>(Integer::compareTo);
Integer[] integers = Stream.generate(() -> r.nextInt(15)).distinct().limit(10).toArray(Integer[]::new);
Stream.of(integers).forEach(avl::insert);
System.out.println();
}
private static <T> void delete() {
}
private static int max(int a, int b) {
return a > b ? a : b;
}
void insert(V v) {
if (!has(treeRoot)) {
Node n = new Node(treeRoot, v);
put(treeRoot, n);
return;
}
Node n = new Node(new NodeKey(), v);
insert(get(treeRoot), n);
}
private void insert(Node r, Node n) {
int cmp = c.compare(r.value(), n.value());
if (cmp > 0) {
if (!r.hasLeft()) {
r.setLeft(n);
r.setHeight(height(r));
return;
}
insert(r.left(), n);
int bf = balanceFactor(r.left());
if (bf == 2) {
r.setLeft(llRotate(r.left()));
} else if (bf == -2) {
r.setLeft(lrRotate(r.left()));
}
} else {
if (!r.hasRight()) {
r.setRight(n);
r.setHeight(height(r));
return;
}
insert(r.right(), n);
int bf = balanceFactor(r.right());
if (bf == 2) {
r.setRight(rlRotate(r.right()));
} else if (bf == -2) {
r.setRight(rrRotate(r.right()));
}
}
r.setHeight(height(r));
int bf = balanceFactor(r);
if (bf == 2) {
int lbf = balanceFactor(r.left());
if (lbf == 1) {
r.setSelf(llRotate(r));
} else if (lbf == -1) {
r.setSelf(lrRotate(r));
}
} else if (bf == -2) {
int rbf = balanceFactor(r.right());
if (rbf == 1) {
r.setSelf(rlRotate(r));
} else if (rbf == -1) {
r.setSelf(rrRotate(r));
}
}
r.setHeight(height(r));
}
private Node llRotate(Node root) {
if (root == null) {
return null;
}
Node newRoot = root.left();
root.setLeft(newRoot.right());
newRoot.setRight(root);
root.setHeight(height(root));
newRoot.setHeight(height(newRoot));
return newRoot;
}
private Node rrRotate(Node root) {
if (root == null) {
return null;
}
Node newRoot = root.right();
root.setRight(newRoot.left());
newRoot.setRight(root);
root.setHeight(height(root));
newRoot.setHeight(height(newRoot));
return newRoot;
}
private Node lrRotate(Node root) {
if (root == null) {
return null;
}
Node lc = root.left();
Node tmp = lc.right();
lc.setRight(root.right());
root.setRight(tmp);
lc.setHeight(height(lc));
root.setHeight(height(root));
return root;
}
private Node rlRotate(Node root) {
if (root == null) {
return null;
}
Node rc = root.right();
Node tmp = rc.left();
rc.setLeft(root.left());
root.setLeft(tmp);
rc.setHeight(height(rc));
root.setHeight(height(root));
return root;
}
private int height(Node n) {
if (n == null) {
return 0;
}
int lh = n.hasLeft() ? n.left().height() : 0;
int rh = n.hasRight() ? n.right().height() : 0;
return Math.max(lh, rh) + 1;
}
private int balanceFactor(Node n) {
if (n == null) {
return 0;
}
int lh = n.hasLeft() ? n.left().height() : 0;
int rh = n.hasRight() ? n.right().height() : 0;
return lh - rh;
}
boolean has(NodeKey nodeKey) {
return nodes.containsKey(nodeKey);
}
Node get(NodeKey nodeKey) {
return nodes.get(nodeKey);
}
void put(NodeKey nodeKey, Node node) {
nodes.put(nodeKey, node);
}
static class NodeKey {
@Override
public boolean equals(Object obj) {
return super.equals(obj);
}
@Override
public int hashCode() {
return super.hashCode();
}
}
class Node {
V value;
int height = 1;
NodeKey self;
NodeKey left;
NodeKey right;
Node(NodeKey nodeKey, V value) {
this.value = value;
// this.nodeKey = nodeKey;
left = new NodeKey();
right = new NodeKey();
}
V value() {
return value;
}
NodeKey key() {
return self;
}
void setValue(V value) {
this.value = value;
}
int height() {
return height;
}
void setHeight(int height) {
this.height = height;
}
void setSelf(Node node) {
self = node.key();
}
boolean hasLeft() {
return has(left) && get(left) != null;
}
Node left() {
return get(left);
}
void setLeft(Node node) {
left = node.key();
}
boolean hasRight() {
return has(right) && get(right) != null;
}
Node right() {
return get(right);
}
void setRight(Node node) {
right = node.key();
}
@Override
public String toString() {
return "{ v=" + value +
", h=" + height +
" }";
}
}
}
|
|
/* Matcher.java -- Instance of a regular expression applied to a char sequence.
Copyright (C) 2002, 2004, 2006 Free Software Foundation, Inc.
This file is part of GNU Classpath.
GNU Classpath is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2, or (at your option)
any later version.
GNU Classpath is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with GNU Classpath; see the file COPYING. If not, write to the
Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA.
Linking this library statically or dynamically with other modules is
making a combined work based on this library. Thus, the terms and
conditions of the GNU General Public License cover the whole
combination.
As a special exception, the copyright holders of this library give you
permission to link this library with independent modules to produce an
executable, regardless of the license terms of these independent
modules, and to copy and distribute the resulting executable under
terms of your choice, provided that you also meet, for each linked
independent module, the terms and conditions of the license of that
module. An independent module is a module which is not derived from
or based on this library. If you modify this library, you may extend
this exception to your version of the library, but you are not
obligated to do so. If you do not wish to do so, delete this
exception statement from your version. */
package java.util.regex;
import gnu.java.util.regex.CharIndexed;
import gnu.java.util.regex.RE;
import gnu.java.util.regex.REMatch;
/**
* Instance of a regular expression applied to a char sequence.
*
* @since 1.4
*/
public final class Matcher implements MatchResult
{
private Pattern pattern;
private CharSequence input;
// We use CharIndexed as an input object to the getMatch method in order
// that /\G/ (the end of the previous match) may work. The information
// of the previous match is stored in the CharIndexed object.
private CharIndexed inputCharIndexed;
private int position;
private int appendPosition;
private REMatch match;
Matcher(Pattern pattern, CharSequence input)
{
this.pattern = pattern;
this.input = input;
this.inputCharIndexed = RE.makeCharIndexed(input, 0);
}
/**
* @param sb The target string buffer
* @param replacement The replacement string
*
* @exception IllegalStateException If no match has yet been attempted,
* or if the previous match operation failed
* @exception IndexOutOfBoundsException If the replacement string refers
* to a capturing group that does not exist in the pattern
*/
public Matcher appendReplacement (StringBuffer sb, String replacement)
throws IllegalStateException
{
assertMatchOp();
sb.append(input.subSequence(appendPosition,
match.getStartIndex()).toString());
sb.append(RE.getReplacement(replacement, match,
RE.REG_REPLACE_USE_BACKSLASHESCAPE));
appendPosition = match.getEndIndex();
return this;
}
/**
* @param sb The target string buffer
*/
public StringBuffer appendTail (StringBuffer sb)
{
sb.append(input.subSequence(appendPosition, input.length()).toString());
return sb;
}
/**
* @exception IllegalStateException If no match has yet been attempted,
* or if the previous match operation failed
*/
public int end ()
throws IllegalStateException
{
assertMatchOp();
return match.getEndIndex();
}
/**
* @param group The index of a capturing group in this matcher's pattern
*
* @exception IllegalStateException If no match has yet been attempted,
* or if the previous match operation failed
* @exception IndexOutOfBoundsException If the replacement string refers
* to a capturing group that does not exist in the pattern
*/
public int end (int group)
throws IllegalStateException
{
assertMatchOp();
return match.getEndIndex(group);
}
public boolean find ()
{
boolean first = (match == null);
match = pattern.getRE().getMatch(inputCharIndexed, position);
if (match != null)
{
int endIndex = match.getEndIndex();
// Are we stuck at the same position?
if (!first && endIndex == position)
{
match = null;
// Not at the end of the input yet?
if (position < input.length() - 1)
{
position++;
return find(position);
}
else
return false;
}
position = endIndex;
return true;
}
return false;
}
/**
* @param start The index to start the new pattern matching
*
* @exception IndexOutOfBoundsException If the replacement string refers
* to a capturing group that does not exist in the pattern
*/
public boolean find (int start)
{
match = pattern.getRE().getMatch(inputCharIndexed, start);
if (match != null)
{
position = match.getEndIndex();
return true;
}
return false;
}
/**
* @exception IllegalStateException If no match has yet been attempted,
* or if the previous match operation failed
*/
public String group ()
{
assertMatchOp();
return match.toString();
}
/**
* @param group The index of a capturing group in this matcher's pattern
*
* @exception IllegalStateException If no match has yet been attempted,
* or if the previous match operation failed
* @exception IndexOutOfBoundsException If the replacement string refers
* to a capturing group that does not exist in the pattern
*/
public String group (int group)
throws IllegalStateException
{
assertMatchOp();
return match.toString(group);
}
/**
* @param replacement The replacement string
*/
public String replaceFirst (String replacement)
{
reset();
// Semantics might not quite match
return pattern.getRE().substitute(input, replacement, position,
RE.REG_REPLACE_USE_BACKSLASHESCAPE);
}
/**
* @param replacement The replacement string
*/
public String replaceAll (String replacement)
{
reset();
return pattern.getRE().substituteAll(input, replacement, position,
RE.REG_REPLACE_USE_BACKSLASHESCAPE);
}
public int groupCount ()
{
return pattern.getRE().getNumSubs();
}
public boolean lookingAt ()
{
match = pattern.getRE().getMatch(inputCharIndexed, 0);
if (match != null)
{
if (match.getStartIndex() == 0)
{
position = match.getEndIndex();
return true;
}
match = null;
}
return false;
}
/**
* Attempts to match the entire input sequence against the pattern.
*
* If the match succeeds then more information can be obtained via the
* start, end, and group methods.
*
* @see #start()
* @see #end()
* @see #group()
*/
public boolean matches ()
{
match = pattern.getRE().getMatch(inputCharIndexed, 0, RE.REG_TRY_ENTIRE_MATCH);
if (match != null)
{
if (match.getStartIndex() == 0)
{
position = match.getEndIndex();
if (position == input.length())
return true;
}
match = null;
}
return false;
}
/**
* Returns the Pattern that is interpreted by this Matcher
*/
public Pattern pattern ()
{
return pattern;
}
public Matcher reset ()
{
position = 0;
match = null;
return this;
}
/**
* @param input The new input character sequence
*/
public Matcher reset (CharSequence input)
{
this.input = input;
return reset();
}
/**
* @returns the index of a capturing group in this matcher's pattern
*
* @exception IllegalStateException If no match has yet been attempted,
* or if the previous match operation failed
*/
public int start ()
throws IllegalStateException
{
assertMatchOp();
return match.getStartIndex();
}
/**
* @param group The index of a capturing group in this matcher's pattern
*
* @exception IllegalStateException If no match has yet been attempted,
* or if the previous match operation failed
* @exception IndexOutOfBoundsException If the replacement string refers
* to a capturing group that does not exist in the pattern
*/
public int start (int group)
throws IllegalStateException
{
assertMatchOp();
return match.getStartIndex(group);
}
private void assertMatchOp()
{
if (match == null) throw new IllegalStateException();
}
}
|
|
/**
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.yoko.rmi.impl;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.rmi.Remote;
import java.rmi.RemoteException;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.logging.Logger;
import java.util.logging.Level;
import javax.rmi.CORBA.PortableRemoteObjectDelegate;
import javax.rmi.CORBA.Stub;
import javax.rmi.CORBA.Tie;
import javax.rmi.CORBA.Util;
import org.apache.yoko.rmi.util.ClientUtil;
import org.apache.yoko.rmi.util.GetSystemPropertyAction;
import org.apache.yoko.rmi.util.stub.MethodRef;
import org.apache.yoko.rmi.util.stub.StubClass;
import org.apache.yoko.rmi.util.stub.StubInitializer;
import org.apache.yoko.rmispec.util.UtilLoader;
import org.omg.CORBA.BAD_INV_ORDER;
import org.omg.CORBA.portable.IDLEntity;
import org.omg.CORBA.portable.ObjectImpl;
public class PortableRemoteObjectImpl implements PortableRemoteObjectDelegate {
static final Logger LOGGER = Logger
.getLogger(PortableRemoteObjectImpl.class.getName());
static {
// Initialize the stub handler factory when first loaded to ensure we have
// class loading visibility to the factory.
getRMIStubInitializer();
}
static org.omg.CORBA.ORB getORB() {
return RMIState.current().getORB();
}
static org.omg.PortableServer.POA getPOA() {
return RMIState.current().getPOA();
}
static ClassLoader getClassLoader() {
return RMIState.current().getClassLoader();
}
public void connect(Remote target, Remote source)
throws java.rmi.RemoteException {
if (!(source instanceof javax.rmi.CORBA.Stub))
source = toStub(source);
ObjectImpl obj;
if (target instanceof ObjectImpl) {
obj = (ObjectImpl) target;
} else {
try {
exportObject(target);
} catch (RemoteException ex) {
// ignore "already exported test" //
}
try {
obj = (ObjectImpl) toStub(target);
} catch (java.rmi.NoSuchObjectException ex) {
throw (RemoteException)new RemoteException("cannot convert to stub!").initCause(ex);
}
}
try {
((javax.rmi.CORBA.Stub) source).connect(((ObjectImpl) obj)._orb());
} catch (org.omg.CORBA.BAD_OPERATION bad_operation) {
throw (RemoteException)new RemoteException(bad_operation.getMessage())
.initCause(bad_operation);
}
}
private Object narrowRMI(ObjectImpl narrowFrom, Class<?> narrowTo) {
if (LOGGER.isLoggable(Level.FINE))
LOGGER.fine(String.format("RMI narrowing %s => %s", narrowFrom.getClass().getName(), narrowTo.getName()));
ObjectImpl object = narrowFrom;
final String codebase = getCodebase(narrowFrom);
RMIState state = RMIState.current();
javax.rmi.CORBA.Stub stub;
try {
stub = createStub(state, codebase, narrowTo);
} catch (ClassNotFoundException ex) {
throw (ClassCastException)new ClassCastException(narrowTo.getName()).initCause(ex);
}
org.omg.CORBA.portable.Delegate delegate;
try {
// let the stub adopt narrowFrom's identity
delegate = object._get_delegate();
} catch (org.omg.CORBA.BAD_OPERATION ex) {
// ignore
delegate = null;
}
stub._set_delegate(delegate);
return stub;
}
private String getCodebase(ObjectImpl narrowFrom) {
String codebase;
if (narrowFrom instanceof org.omg.CORBA_2_3.portable.ObjectImpl) {
org.omg.CORBA_2_3.portable.ObjectImpl object_2_3 = (org.omg.CORBA_2_3.portable.ObjectImpl) narrowFrom;
try {
codebase = object_2_3._get_codebase();
} catch (org.omg.CORBA.BAD_OPERATION ex) {
codebase = null;
}
} else {
codebase = null;
}
return codebase;
}
private Object narrowIDL(ObjectImpl narrowFrom, Class<?> narrowTo) {
if (LOGGER.isLoggable(Level.FINE))
LOGGER.fine(String.format("IDL narrowing %s => %s", narrowFrom.getClass().getName(), narrowTo.getName()));
final ClassLoader idlClassLoader = UtilImpl.getClassLoader(narrowTo);
final String codebase = getCodebase(narrowFrom);
final String helperClassName = narrowTo.getName() + "Helper";
try {
final Class<?> helperClass = Util.loadClass(helperClassName, codebase, idlClassLoader);
final Method helperNarrow = AccessController.doPrivileged(new PrivilegedExceptionAction<Method>() {
@Override
public Method run() throws Exception {
return helperClass.getMethod("narrow", org.omg.CORBA.Object.class);
}
});
return helperNarrow.invoke(null, narrowFrom);
} catch (PrivilegedActionException e) {
throw (ClassCastException)new ClassCastException(narrowTo.getName()).initCause(e.getCause());
} catch (Exception e) {
throw (ClassCastException)new ClassCastException(narrowTo.getName()).initCause(e);
}
}
public Object narrow(Object narrowFrom, @SuppressWarnings("rawtypes") Class narrowTo)
throws ClassCastException {
if (narrowFrom == null)
return null;
if (narrowTo.isInstance(narrowFrom))
return narrowFrom;
final String fromClassName = narrowFrom.getClass().getName();
final String toClassName = narrowTo.getName();
if (LOGGER.isLoggable(Level.FINE))
LOGGER.finer(String.format("narrow %s => %s", fromClassName, toClassName));
if (!(narrowFrom instanceof org.omg.CORBA.portable.ObjectImpl))
throw new ClassCastException(String.format(
"object to narrow (runtime type %s) is not an instance of %s",
fromClassName, ObjectImpl.class.getName()));
if (!narrowTo.isInterface())
throw new ClassCastException(String.format("%s is not an interface", toClassName));
final boolean isRemote = Remote.class.isAssignableFrom(narrowTo);
final boolean isIDLEntity = IDLEntity.class.isAssignableFrom(narrowTo);
if (isRemote && isIDLEntity)
throw new ClassCastException(String.format(
"%s invalidly extends both %s and %s",
toClassName, Remote.class.getName(), IDLEntity.class.getName()));
if (isRemote)
return narrowRMI((ObjectImpl) narrowFrom, narrowTo);
if (isIDLEntity)
return narrowIDL((ObjectImpl) narrowFrom, narrowTo);
throw new ClassCastException(String.format(
"%s extends neither %s nor %s",
toClassName, Remote.class.getName(), IDLEntity.class.getName()));
}
static java.rmi.Remote narrow1(RMIState state, ObjectImpl object, Class<?> narrowTo) throws ClassCastException {
Stub stub;
try {
stub = createStub(state, null, narrowTo);
} catch (ClassNotFoundException ex) {
throw (ClassCastException)new ClassCastException(narrowTo.getName()).initCause(ex);
}
org.omg.CORBA.portable.Delegate delegate;
try {
// let the stub adopt narrowFrom's identity
delegate = object._get_delegate();
} catch (org.omg.CORBA.BAD_OPERATION ex) {
// ignore
delegate = null;
}
stub._set_delegate(delegate);
return (java.rmi.Remote) stub;
}
static private Stub createStub(RMIState state, String codebase, Class<?> type) throws ClassNotFoundException {
if (Remote.class == type) {
return new RMIRemoteStub();
}
if (ClientUtil.isRunningAsClientContainer()) {
Stub stub = state.getStaticStub(codebase, type);
if (stub != null) {
return stub;
}
}
return createRMIStub(state, type);
}
static Object[] NO_ARG = new Object[0];
static Stub createRMIStub(RMIState state, Class<?> type) throws ClassNotFoundException {
if (!type.isInterface()) {
throw new RuntimeException("non-interfaces not supported");
}
LOGGER.fine("Creating RMI stub for class " + type.getName());
Constructor<? extends Stub> cons = getRMIStubClassConstructor(state, type);
try {
Stub result = cons.newInstance(NO_ARG);
return result;
} catch (InstantiationException ex) {
throw new RuntimeException(
"internal problem: cannot instantiate stub", ex);
} catch (InvocationTargetException ex) {
throw new RuntimeException(
"internal problem: cannot instantiate stub", ex.getCause());
} catch (IllegalAccessException ex) {
throw new RuntimeException(
"internal problem: cannot instantiate stub", ex);
}
}
private static java.lang.reflect.Method stub_write_replace;
static {
try {
stub_write_replace = RMIStub.class.getDeclaredMethod(
"writeReplace", new Class[0]);
} catch (Throwable ex) {
LOGGER.log(Level.WARNING, "cannot initialize: \n" + ex.getMessage(), ex);
throw new Error("cannot initialize", ex);
}
}
static synchronized Class<?> getRMIStubClass(RMIState state, Class<?> type) throws ClassNotFoundException {
return getRMIStubClassConstructor(state, type).getDeclaringClass();
}
static Constructor<? extends Stub> getRMIStubClassConstructor(RMIState state, Class<?> type) throws ClassNotFoundException {
LOGGER.fine("Requesting stub constructor of class " + type.getName());
@SuppressWarnings("unchecked")
Constructor<? extends Stub> cons = (Constructor<? extends Stub>) state.stub_map.get(type);
if (cons != null) {
LOGGER.fine("Returning cached constructor of class " + cons.getDeclaringClass().getName());
return cons;
}
TypeRepository repository = state.repo;
RemoteDescriptor desc = (RemoteDescriptor) repository.getRemoteInterface(type);
MethodDescriptor[] mdesc = desc.getMethods();
MethodDescriptor[] descriptors = new MethodDescriptor[mdesc.length + 1];
for (int i = 0; i < mdesc.length; i++) {
descriptors[i] = mdesc[i];
}
LOGGER.finer("TYPE ----> " + type);
LOGGER.finer("LOADER --> " + UtilImpl.getClassLoader(type));
LOGGER.finer("CONTEXT -> " + getClassLoader());
MethodRef[] methods = new MethodRef[descriptors.length];
for (int i = 0; i < mdesc.length; i++) {
Method m = descriptors[i].getReflectedMethod();
LOGGER.finer("Method ----> " + m);
methods[i] = new MethodRef(m);
}
methods[mdesc.length] = new MethodRef(stub_write_replace);
Class<?> clazz = null;
try {
/* Construct class! */
clazz = StubClass.make(/* the class loader to use */
UtilImpl.getClassLoader(type),
/* the bean developer's bean class */
RMIStub.class,
/* interfaces */
new Class[] { type },
/* the methods */
methods,
/* contains only ejbCreate */
null,
/* our data objects */
descriptors,
/* the handler method */
getPOAStubInvokeMethod(),
/* package name (use superclass') */
getPackageName(type),
/* provider of handlers */
getRMIStubInitializer());
} catch (java.lang.NoClassDefFoundError ex) {
/* Construct class! */
clazz = StubClass.make(/* the class loader to use */
getClassLoader(),
/* the bean developer's bean class */
RMIStub.class,
/* interfaces */
new Class[] { type },
/* the methods */
methods,
/* contains only ejbCreate */
null,
/* our data objects */
descriptors,
/* the handler method */
getPOAStubInvokeMethod(),
/* package name (use superclass') */
getPackageName(type),
/* provider of handlers */
getRMIStubInitializer());
}
if (clazz != null) {
try {
cons = (Constructor<? extends Stub>) clazz.getConstructor();
state.stub_map.put(type, cons);
} catch (NoSuchMethodException e) {
LOGGER.log(Level.FINER, "constructed stub has no default constructor", e);
}
}
return cons;
}
static String getPackageName(Class clazz) {
String class_name = clazz.getName();
int idx = class_name.lastIndexOf('.');
if (idx == -1) {
return null;
} else {
return class_name.substring(0, idx);
}
}
private static Method poa_stub_invoke_method;
static Method getPOAStubInvokeMethod() {
if (poa_stub_invoke_method == null) {
// NYI: PrivilegedAction
try {
// get the interface method used to invoke the stub handler
poa_stub_invoke_method = (StubHandler.class)
.getDeclaredMethod("invoke", new Class[] {
RMIStub.class, MethodDescriptor.class,
Object[].class });
} catch (NoSuchMethodException ex) {
throw new Error("cannot find RMI Stub handler invoke method", ex);
}
}
return poa_stub_invoke_method;
}
public java.rmi.Remote toStub(java.rmi.Remote value)
throws java.rmi.NoSuchObjectException {
if (value instanceof javax.rmi.CORBA.Stub)
return value;
javax.rmi.CORBA.Tie tie = javax.rmi.CORBA.Util.getTie(value);
if (tie == null) {
throw new java.rmi.NoSuchObjectException("object not exported");
}
RMIServant servant = (RMIServant) tie;
try {
org.omg.PortableServer.POA poa = servant.getRMIState().getPOA();
org.omg.CORBA.Object ref = poa.servant_to_reference(servant);
return (java.rmi.Remote) narrow(ref, servant.getJavaClass());
} catch (org.omg.PortableServer.POAPackage.ServantNotActive ex) {
throw new RuntimeException("internal error: " + ex.getMessage(), ex);
} catch (org.omg.PortableServer.POAPackage.WrongPolicy ex) {
throw new RuntimeException("internal error: " + ex.getMessage(), ex);
}
}
public void exportObject(Remote obj) throws java.rmi.RemoteException {
RMIState state = RMIState.current();
try {
state.checkShutDown();
} catch (BAD_INV_ORDER ex) {
throw new RemoteException("RMIState is deactivated", ex);
}
Tie tie = javax.rmi.CORBA.Util.getTie(obj);
if (tie != null)
throw new java.rmi.RemoteException("object already exported");
RMIServant servant = new RMIServant(state);
javax.rmi.CORBA.Util.registerTarget(servant, obj);
LOGGER.finer("exporting instance of " + obj.getClass().getName()
+ " in " + state.getName());
try {
servant._id = state.getPOA().activate_object(servant);
} catch (org.omg.PortableServer.POAPackage.ServantAlreadyActive ex) {
throw new java.rmi.RemoteException("internal error: " + ex.getMessage(), ex);
} catch (org.omg.PortableServer.POAPackage.WrongPolicy ex) {
throw new java.rmi.RemoteException("internal error: " + ex.getMessage(), ex);
}
}
public void unexportObject(Remote obj)
throws java.rmi.NoSuchObjectException {
javax.rmi.CORBA.Util.unexportObject(obj);
}
// the factory object used for creating stub initializers
static private StubInitializer initializer = null;
// the default stub handler, which is ours without overrides.
private static final String defaultInitializer = "org.apache.yoko.rmi.impl.RMIStubInitializer";
/**
* Get the RMI stub handler initializer to use for RMI invocation
* stubs. The Class in question must implement the StubInitializer method.
*
* @return The class used to create StubHandler instances.
*/
private static StubInitializer getRMIStubInitializer() {
if (initializer == null) {
String factory = (String)AccessController.doPrivileged(new GetSystemPropertyAction("org.apache.yoko.rmi.RMIStubInitializerClass", defaultInitializer));
try {
initializer = (StubInitializer)(UtilLoader.loadServiceClass(factory, "org.apache.yoko.rmi.RMIStubInitializerClass").newInstance());
} catch (Exception e) {
throw (org.omg.CORBA.INITIALIZE)new org.omg.CORBA.INITIALIZE(
"Can not create RMIStubInitializer: " + factory).initCause(e);
}
}
return initializer;
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.security;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.net.InetAddress;
import java.util.Map;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.NameCallback;
import javax.security.auth.callback.PasswordCallback;
import javax.security.auth.callback.TextOutputCallback;
import javax.security.auth.callback.UnsupportedCallbackException;
import javax.security.sasl.RealmCallback;
import javax.security.sasl.Sasl;
import javax.security.sasl.SaslClient;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.security.provider.DigestSaslClientAuthenticationProvider;
import org.apache.hadoop.hbase.security.provider.DigestSaslClientAuthenticationProvider.DigestSaslClientCallbackHandler;
import org.apache.hadoop.hbase.security.provider.GssSaslClientAuthenticationProvider;
import org.apache.hadoop.hbase.security.provider.SimpleSaslClientAuthenticationProvider;
import org.apache.hadoop.hbase.testclassification.SecurityTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.junit.Assert;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.ExpectedException;
import org.mockito.Mockito;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.base.Strings;
@Category({SecurityTests.class, SmallTests.class})
public class TestHBaseSaslRpcClient {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestHBaseSaslRpcClient.class);
static {
System.setProperty("java.security.krb5.realm", "DOMAIN.COM");
System.setProperty("java.security.krb5.kdc", "DOMAIN.COM");
}
static final String DEFAULT_USER_NAME = "principal";
static final String DEFAULT_USER_PASSWORD = "password";
private static final Logger LOG = LoggerFactory.getLogger(TestHBaseSaslRpcClient.class);
@Rule
public ExpectedException exception = ExpectedException.none();
@Test
public void testSaslClientUsesGivenRpcProtection() throws Exception {
Token<? extends TokenIdentifier> token = createTokenMockWithCredentials(DEFAULT_USER_NAME,
DEFAULT_USER_PASSWORD);
DigestSaslClientAuthenticationProvider provider = new DigestSaslClientAuthenticationProvider();
for (SaslUtil.QualityOfProtection qop : SaslUtil.QualityOfProtection.values()) {
String negotiatedQop = new HBaseSaslRpcClient(HBaseConfiguration.create(), provider, token,
Mockito.mock(InetAddress.class), Mockito.mock(SecurityInfo.class), false, qop.name(),
false) {
public String getQop() {
return saslProps.get(Sasl.QOP);
}
}.getQop();
assertEquals(negotiatedQop, qop.getSaslQop());
}
}
@Test
public void testDigestSaslClientCallbackHandler() throws UnsupportedCallbackException {
final Token<? extends TokenIdentifier> token = createTokenMock();
when(token.getIdentifier()).thenReturn(Bytes.toBytes(DEFAULT_USER_NAME));
when(token.getPassword()).thenReturn(Bytes.toBytes(DEFAULT_USER_PASSWORD));
final NameCallback nameCallback = mock(NameCallback.class);
final PasswordCallback passwordCallback = mock(PasswordCallback.class);
final RealmCallback realmCallback = mock(RealmCallback.class);
// We can provide a realmCallback, but HBase presently does nothing with it.
Callback[] callbackArray = {nameCallback, passwordCallback, realmCallback};
final DigestSaslClientCallbackHandler saslClCallbackHandler =
new DigestSaslClientCallbackHandler(token);
saslClCallbackHandler.handle(callbackArray);
verify(nameCallback).setName(anyString());
verify(passwordCallback).setPassword(any());
}
@Test
public void testDigestSaslClientCallbackHandlerWithException() {
final Token<? extends TokenIdentifier> token = createTokenMock();
when(token.getIdentifier()).thenReturn(Bytes.toBytes(DEFAULT_USER_NAME));
when(token.getPassword()).thenReturn(Bytes.toBytes(DEFAULT_USER_PASSWORD));
final DigestSaslClientCallbackHandler saslClCallbackHandler =
new DigestSaslClientCallbackHandler(token);
try {
saslClCallbackHandler.handle(new Callback[] { mock(TextOutputCallback.class) });
} catch (UnsupportedCallbackException expEx) {
//expected
} catch (Exception ex) {
fail("testDigestSaslClientCallbackHandlerWithException error : " + ex.getMessage());
}
}
@Test
public void testHBaseSaslRpcClientCreation() throws Exception {
//creation kerberos principal check section
assertFalse(assertSuccessCreationKerberosPrincipal(null));
assertFalse(assertSuccessCreationKerberosPrincipal("DOMAIN.COM"));
assertFalse(assertSuccessCreationKerberosPrincipal("principal/DOMAIN.COM"));
if (!assertSuccessCreationKerberosPrincipal("principal/[email protected]")) {
// XXX: This can fail if kerberos support in the OS is not sane, see HBASE-10107.
// For now, don't assert, just warn
LOG.warn("Could not create a SASL client with valid Kerberos credential");
}
//creation digest principal check section
assertFalse(assertSuccessCreationDigestPrincipal(null, null));
assertFalse(assertSuccessCreationDigestPrincipal("", ""));
assertFalse(assertSuccessCreationDigestPrincipal("", null));
assertFalse(assertSuccessCreationDigestPrincipal(null, ""));
assertTrue(assertSuccessCreationDigestPrincipal(DEFAULT_USER_NAME, DEFAULT_USER_PASSWORD));
//creation simple principal check section
assertFalse(assertSuccessCreationSimplePrincipal("", ""));
assertFalse(assertSuccessCreationSimplePrincipal(null, null));
assertFalse(assertSuccessCreationSimplePrincipal(DEFAULT_USER_NAME, DEFAULT_USER_PASSWORD));
//exceptions check section
assertTrue(assertIOExceptionThenSaslClientIsNull(DEFAULT_USER_NAME, DEFAULT_USER_PASSWORD));
assertTrue(assertIOExceptionWhenGetStreamsBeforeConnectCall(
DEFAULT_USER_NAME, DEFAULT_USER_PASSWORD));
}
@Test
public void testAuthMethodReadWrite() throws IOException {
DataInputBuffer in = new DataInputBuffer();
DataOutputBuffer out = new DataOutputBuffer();
assertAuthMethodRead(in, AuthMethod.SIMPLE);
assertAuthMethodRead(in, AuthMethod.KERBEROS);
assertAuthMethodRead(in, AuthMethod.DIGEST);
assertAuthMethodWrite(out, AuthMethod.SIMPLE);
assertAuthMethodWrite(out, AuthMethod.KERBEROS);
assertAuthMethodWrite(out, AuthMethod.DIGEST);
}
private void assertAuthMethodRead(DataInputBuffer in, AuthMethod authMethod)
throws IOException {
in.reset(new byte[] {authMethod.code}, 1);
assertEquals(authMethod, AuthMethod.read(in));
}
private void assertAuthMethodWrite(DataOutputBuffer out, AuthMethod authMethod)
throws IOException {
authMethod.write(out);
assertEquals(authMethod.code, out.getData()[0]);
out.reset();
}
private boolean assertIOExceptionWhenGetStreamsBeforeConnectCall(String principal,
String password) throws IOException {
boolean inState = false;
boolean outState = false;
DigestSaslClientAuthenticationProvider provider = new DigestSaslClientAuthenticationProvider() {
@Override
public SaslClient createClient(Configuration conf, InetAddress serverAddress,
SecurityInfo securityInfo, Token<? extends TokenIdentifier> token,
boolean fallbackAllowed, Map<String, String> saslProps) {
return Mockito.mock(SaslClient.class);
}
};
HBaseSaslRpcClient rpcClient = new HBaseSaslRpcClient(HBaseConfiguration.create(), provider,
createTokenMockWithCredentials(principal, password), Mockito.mock(InetAddress.class),
Mockito.mock(SecurityInfo.class), false);
try {
rpcClient.getInputStream();
} catch(IOException ex) {
//Sasl authentication exchange hasn't completed yet
inState = true;
}
try {
rpcClient.getOutputStream();
} catch(IOException ex) {
//Sasl authentication exchange hasn't completed yet
outState = true;
}
return inState && outState;
}
private boolean assertIOExceptionThenSaslClientIsNull(String principal, String password) {
try {
DigestSaslClientAuthenticationProvider provider =
new DigestSaslClientAuthenticationProvider() {
@Override
public SaslClient createClient(Configuration conf, InetAddress serverAddress,
SecurityInfo securityInfo,
Token<? extends TokenIdentifier> token, boolean fallbackAllowed,
Map<String, String> saslProps) {
return null;
}
};
new HBaseSaslRpcClient(HBaseConfiguration.create(), provider,
createTokenMockWithCredentials(principal, password), Mockito.mock(InetAddress.class),
Mockito.mock(SecurityInfo.class), false);
return false;
} catch (IOException ex) {
return true;
}
}
private boolean assertSuccessCreationKerberosPrincipal(String principal) {
HBaseSaslRpcClient rpcClient = null;
try {
rpcClient = createSaslRpcClientForKerberos(principal);
} catch(Exception ex) {
LOG.error(ex.getMessage(), ex);
}
return rpcClient != null;
}
private boolean assertSuccessCreationDigestPrincipal(String principal, String password) {
HBaseSaslRpcClient rpcClient = null;
try {
rpcClient = new HBaseSaslRpcClient(HBaseConfiguration.create(),
new DigestSaslClientAuthenticationProvider(),
createTokenMockWithCredentials(principal, password), Mockito.mock(InetAddress.class),
Mockito.mock(SecurityInfo.class), false);
} catch(Exception ex) {
LOG.error(ex.getMessage(), ex);
}
return rpcClient != null;
}
private boolean assertSuccessCreationSimplePrincipal(String principal, String password) {
HBaseSaslRpcClient rpcClient = null;
try {
rpcClient = createSaslRpcClientSimple(principal, password);
} catch(Exception ex) {
LOG.error(ex.getMessage(), ex);
}
return rpcClient != null;
}
private HBaseSaslRpcClient createSaslRpcClientForKerberos(String principal)
throws IOException {
return new HBaseSaslRpcClient(HBaseConfiguration.create(),
new GssSaslClientAuthenticationProvider(), createTokenMock(),
Mockito.mock(InetAddress.class), Mockito.mock(SecurityInfo.class), false);
}
private Token<? extends TokenIdentifier> createTokenMockWithCredentials(
String principal, String password)
throws IOException {
Token<? extends TokenIdentifier> token = createTokenMock();
if (!Strings.isNullOrEmpty(principal) && !Strings.isNullOrEmpty(password)) {
when(token.getIdentifier()).thenReturn(Bytes.toBytes(DEFAULT_USER_NAME));
when(token.getPassword()).thenReturn(Bytes.toBytes(DEFAULT_USER_PASSWORD));
}
return token;
}
private HBaseSaslRpcClient createSaslRpcClientSimple(String principal, String password)
throws IOException {
return new HBaseSaslRpcClient(HBaseConfiguration.create(),
new SimpleSaslClientAuthenticationProvider(), createTokenMock(),
Mockito.mock(InetAddress.class), Mockito.mock(SecurityInfo.class), false);
}
@SuppressWarnings("unchecked")
private Token<? extends TokenIdentifier> createTokenMock() {
return mock(Token.class);
}
@Test(expected = IOException.class)
public void testFailedEvaluateResponse() throws IOException {
//prep mockin the SaslClient
SimpleSaslClientAuthenticationProvider mockProvider =
Mockito.mock(SimpleSaslClientAuthenticationProvider.class);
SaslClient mockClient = Mockito.mock(SaslClient.class);
Assert.assertNotNull(mockProvider);
Assert.assertNotNull(mockClient);
Mockito.when(mockProvider.createClient(Mockito.any(), Mockito.any(), Mockito.any(),
Mockito.any(), Mockito.anyBoolean(), Mockito.any())).thenReturn(mockClient);
HBaseSaslRpcClient rpcClient = new HBaseSaslRpcClient(HBaseConfiguration.create(),
mockProvider, createTokenMock(),
Mockito.mock(InetAddress.class), Mockito.mock(SecurityInfo.class), false);
//simulate getting an error from a failed saslServer.evaluateResponse
DataOutputBuffer errorBuffer = new DataOutputBuffer();
errorBuffer.writeInt(SaslStatus.ERROR.state);
WritableUtils.writeString(errorBuffer, IOException.class.getName());
WritableUtils.writeString(errorBuffer, "Invalid Token");
DataInputBuffer in = new DataInputBuffer();
in.reset(errorBuffer.getData(), 0, errorBuffer.getLength());
DataOutputBuffer out = new DataOutputBuffer();
//simulate that authentication exchange has completed quickly after sending the token
Mockito.when(mockClient.isComplete()).thenReturn(true);
rpcClient.saslConnect(in, out);
}
}
|
|
package com.deftwun.zombiecopter.box2dJson;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.physics.box2d.Body;
import com.badlogic.gdx.physics.box2d.Fixture;
import com.badlogic.gdx.physics.box2d.Joint;
import com.badlogic.gdx.physics.box2d.World;
import com.badlogic.gdx.utils.Array;
import com.badlogic.gdx.utils.Logger;
import com.badlogic.gdx.utils.ObjectMap;
/*
Manages multiple bodies, joints (..planned), and fixtures. Also able to be
serialized and rebuilt from json data (..using PhysicsModel,BodyModel,FixtureModel,etc..)
*/
public class PhysicsScene{
private Logger logger = new Logger("PhysicsScene",Logger.INFO);
private static long uniqueId = 0;
private Body primaryBody = null;
private World world = null;
private ObjectMap<String, Joint> joints = new ObjectMap<String,Joint>();
private ObjectMap<String, Body> bodies = new ObjectMap<String,Body>();
private ObjectMap<String, Fixture> fixtures = new ObjectMap<String,Fixture>();
private PhysicsSceneListener listener = null;
public interface PhysicsSceneListener{
void fixtureAdded(Fixture f);
void bodyAdded(Body b);
void jointAdded(Joint j);
void fixtureRemoved(Fixture f);
void bodyRemoved(Body b);
void jointRemoved(Joint j);
}
public PhysicsScene(World w){
logger.debug("Creating scene using world");
world = w;
}
public PhysicsScene(){
logger.debug("Creating scene using new default world");
world = new World(new Vector2(),true);
}
public void setPhysicsSceneListener(PhysicsSceneListener l){
listener = l;
}
//Create everything from a PhysicsSceneModel
public void createFromModel(PhysicsSceneModel model){
logger.debug("create from model");
this.destroy();
for (BodyModel bm : model.bodyModels){
createBody(bm);
}
for (JointModel jm : model.jointModels){
createJoint(jm);
}
primaryBody = bodies.get(model.primaryBody,null);
if (primaryBody == null && bodies.size > 0)
primaryBody = bodies.values().toArray().get(0);
}
public void setPrimaryBody(String name){
logger.debug("Set primary body: " + name);
primaryBody = bodies.get(name,null);
}
public Body getPrimaryBody(){
return primaryBody;
}
//Create Body
public Body createBody(BodyModel bm){
logger.debug("create Body from model: " + bm.name);
String name = uniqueBodyName(bm.name);
Body body = bm.toBody(world);
if (primaryBody == null){
primaryBody = body;
}
bodies.put(name,body);
if (listener != null) listener.bodyAdded(body);
for (FixtureModel fm : bm.fixtures){
Fixture f = fm.toFixture(body);
fm.name = uniqueFixtureName(fm.name);
fixtures.put(fm.name,f);
if (listener != null) listener.fixtureAdded(f);
}
return body;
}
//Add Body
public String addBody(Body b){
String name = uniqueBodyName("body");
this.addBody(name, b);
return name;
}
//Add body
public String addBody(String name,Body b){
logger.debug("Add body: " + name);
if (primaryBody == null){
primaryBody = b;
}
bodies.put(name,b);
if (listener != null) listener.bodyAdded(b);
for (Fixture f : b.getFixtureList()){
String fixName = uniqueFixtureName("fixture");
logger.debug("add fixture: " + fixName);
fixtures.put(fixName,f);
if (listener != null) listener.fixtureAdded(f);
}
return name;
}
//Create Joint
public Joint createJoint(JointModel jm){
String name = uniqueJointName(jm.name);
logger.debug("Creating Joint from model: " + name + " = " + jm.bodyA + "+" + jm.bodyB);
Body bodyA = bodies.get(jm.bodyA),
bodyB = bodies.get(jm.bodyB);
if (bodyA == null) logger.error("Can't create Joint. Body: " + jm.bodyA + " not found in scene.");
if (bodyB == null) logger.error("Can't create Joint. Body: " + jm.bodyB + " nof found in scene.");
Joint joint = jm.toJoint(world,bodyA,bodyB);
joints.put(name,joint);
logger.debug("Joint count = " + joints.size);
if (listener != null) listener.jointAdded(joint);
return joint;
}
//Add joint
public String addJoint(Joint j){
String name = uniqueJointName("joint");
return this.addJoint(name,j);
}
//Add joint
public String addJoint(String name, Joint j){
logger.debug("Joint added: " + name);
joints.put(name,j);
if (listener != null) listener.jointAdded(j);
return name;
}
//Check if body exists in the scene
public boolean hasBody(String name){
return bodies.containsKey(name);
}
public boolean hasBody(Body b){
return bodies.containsValue(b,true);
}
//Check if fixture exists in the scene
public boolean hasFixture(String name){
return fixtures.containsKey(name);
}
public boolean hasFixture(Fixture f){
return fixtures.containsValue(f,true);
}
//Check if joint exists in the scene
public boolean hasJoint(String name){
return joints.containsKey(name);
}
public boolean hasJoint(Joint j){
return joints.containsValue(j,true);
}
//Get name of body
public String getName(Body b){
for (ObjectMap.Entry<String,Body> entry : bodies){
if (entry.value == b) return entry.key;
}
return "";
}
//Get name of fixture
public String getName(Fixture f){
for (ObjectMap.Entry<String,Fixture> entry : fixtures){
if (entry.value == f) return entry.key;
}
return "";
}
//Get name of joint
public String getName(Joint j){
for (ObjectMap.Entry<String,Joint> entry : joints){
if (entry.value == j) return entry.key;
}
return "";
}
//Get body
public Body getBody(String name){
return bodies.get(name);
}
//Get fixture
public Fixture getFixture(String name){
return fixtures.get(name);
}
//Get joint
public Joint getJoint(String name){
return joints.get(name);
}
//Get all fixtures
public Array<Fixture> getFixtures(){
return fixtures.values().toArray();
}
//Get all bodies
public Array<Body> getBodies(){
return bodies.values().toArray();
}
//Get all joints
public Array<Joint> getJoints(){
return joints.values().toArray();
}
//Destroy everything
public void destroy(){
logger.debug(String.format("Destroying: %d fixtures, %d bodies, & %d joints...",
fixtures.size,bodies.size,joints.size));
for (Joint j : joints.values()){
world.destroyJoint(j);
if (listener != null) listener.jointRemoved(j);
logger.debug("Joint Destroyed");
}
for (Body b : bodies.values()){
for (Fixture f : b.getFixtureList()){
b.destroyFixture(f);
if (listener != null) listener.fixtureRemoved(f);
logger.debug("Fixture Destroyed");
}
world.destroyBody(b);
if (listener != null) listener.bodyRemoved(b);
logger.debug("Body Destroyed");
}
fixtures.clear();
bodies.clear();
joints.clear();
uniqueId = 0;
primaryBody = null;
logger.debug("---Destroyed");
}
//Destroy body
public void destroyBody(String name){
logger.debug("Destroy body: " + name);
Body b = bodies.get(name,null);
if (b != null){
//Check if its the primary body. If so, then automatically set a new primary
if (b == primaryBody){
if (bodies.size > 0)
primaryBody = bodies.values().toArray().get(0);
else primaryBody = null;
}
world.destroyBody(b);
bodies.remove(name);
if (listener != null) listener.bodyRemoved(b);
}
else logger.debug("Could not destroy body: " + name + " not found");
}
//Destroy Fixture
public void destroyFixture(String name){
logger.debug("Destroy fixture: " + name);
Fixture f = fixtures.get(name);
if (f != null){
f.getBody().destroyFixture(f);
fixtures.remove(name);
if (listener != null) listener.fixtureRemoved(f);
}
else logger.debug("Could not destroy fixture: " + name + " not found");
}
//Destroy joint
public void destroyJoint(String name){
logger.debug("Destroy Joint: " + name);
Joint j = joints.get(name);
if (j != null){
world.destroyJoint(j);
joints.remove(name);
if (listener != null) listener.jointRemoved(j);
}
else logger.debug("Could not destroy joint: " + name + " not found");
}
//Create Scene model
public PhysicsSceneModel toSceneModel(){
logger.debug("Creating Scene Model");
PhysicsSceneModel physicsModel = new PhysicsSceneModel();
//Bodies
for (ObjectMap.Entry<String,Body> bodyEntry : bodies.entries()){
BodyModel bodyModel = new BodyModel(bodyEntry.key,bodyEntry.value);
//clear un-named fixtures. We'll manually add them with names included
bodyModel.fixtures.clear();
//Fixtures
for (ObjectMap.Entry<String,Fixture> fixtureEntry : fixtures.entries()){
if (fixtureEntry.value.getBody() == bodyEntry.value){
FixtureModel fixModel = new FixtureModel(fixtureEntry.key,fixtureEntry.value);
bodyModel.fixtures.add(fixModel);
}
}
physicsModel.bodyModels.add(bodyModel);
}
//Joints
for (ObjectMap.Entry<String,Joint> jointEntry : joints.entries()){
Joint j = jointEntry.value;
JointModel jointModel = new JointModel(jointEntry.key,jointEntry.value,getName(j.getBodyA()), getName(j.getBodyB()));
physicsModel.jointModels.add(jointModel);
}
return physicsModel;
}
//generates a unique body name using the given prefix
private String uniqueBodyName(String prefix){
String s = prefix;
while (bodies.containsKey(prefix))
s = prefix + uniqueId++;
return s;
}
//generates a unique fixture name using the given prefix
private String uniqueFixtureName(String prefix){
String s = prefix;
while (fixtures.containsKey(s)) s = prefix + uniqueId++;
return s;
}
//generates a unique joint name using the given prefix
private String uniqueJointName(String prefix){
String s = prefix;
while (fixtures.containsKey(s)) s = prefix + uniqueId++;
return s;
}
}
|
|
/*
* Copyright 2010-2020 Alfresco Software, Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.engine.test.api.task;
import static java.util.Arrays.asList;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonList;
import static java.util.Collections.singletonMap;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import java.text.SimpleDateFormat;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.activiti.engine.ActivitiException;
import org.activiti.engine.ActivitiIllegalArgumentException;
import org.activiti.engine.ActivitiObjectNotFoundException;
import org.activiti.engine.ActivitiOptimisticLockingException;
import org.activiti.engine.ActivitiTaskAlreadyClaimedException;
import org.activiti.engine.history.HistoricActivityInstance;
import org.activiti.engine.history.HistoricDetail;
import org.activiti.engine.history.HistoricTaskInstance;
import org.activiti.engine.history.HistoricVariableUpdate;
import org.activiti.engine.impl.TaskServiceImpl;
import org.activiti.engine.impl.history.HistoryLevel;
import org.activiti.engine.impl.identity.Authentication;
import org.activiti.engine.impl.persistence.entity.CommentEntity;
import org.activiti.engine.impl.persistence.entity.HistoricDetailVariableInstanceUpdateEntity;
import org.activiti.engine.impl.test.PluggableActivitiTestCase;
import org.activiti.engine.runtime.ProcessInstance;
import org.activiti.engine.task.Attachment;
import org.activiti.engine.task.Comment;
import org.activiti.engine.task.DelegationState;
import org.activiti.engine.task.Event;
import org.activiti.engine.task.IdentityLink;
import org.activiti.engine.task.IdentityLinkType;
import org.activiti.engine.task.Task;
import org.activiti.engine.test.Deployment;
public class TaskServiceTest extends PluggableActivitiTestCase {
public void testSaveTaskUpdate() throws Exception {
SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy hh:mm:ss");
Task task = taskService.newTask();
task.setDescription("description");
task.setName("taskname");
task.setPriority(0);
task.setAssignee("taskassignee");
task.setOwner("taskowner");
Date dueDate = sdf.parse("01/02/2003 04:05:06");
task.setDueDate(dueDate);
taskService.saveTask(task);
// Fetch the task again and update
task = taskService.createTaskQuery().taskId(task.getId()).singleResult();
assertThat(task.getDescription()).isEqualTo("description");
assertThat(task.getName()).isEqualTo("taskname");
assertThat(task.getAssignee()).isEqualTo("taskassignee");
assertThat(task.getOwner()).isEqualTo("taskowner");
assertThat(task.getDueDate()).isEqualTo(dueDate);
assertThat(task.getPriority()).isEqualTo(0);
task.setName("updatedtaskname");
task.setDescription("updateddescription");
task.setPriority(1);
task.setAssignee("updatedassignee");
task.setOwner("updatedowner");
dueDate = sdf.parse("01/02/2003 04:05:06");
task.setDueDate(dueDate);
taskService.saveTask(task);
task = taskService.createTaskQuery().taskId(task.getId()).singleResult();
assertThat(task.getName()).isEqualTo("updatedtaskname");
assertThat(task.getDescription()).isEqualTo("updateddescription");
assertThat(task.getAssignee()).isEqualTo("updatedassignee");
assertThat(task.getOwner()).isEqualTo("updatedowner");
assertThat(task.getDueDate()).isEqualTo(dueDate);
assertThat(task.getPriority()).isEqualTo(1);
if (processEngineConfiguration.getHistoryLevel().isAtLeast(HistoryLevel.AUDIT)) {
HistoricTaskInstance historicTaskInstance = historyService.createHistoricTaskInstanceQuery().taskId(task.getId()).singleResult();
assertThat(historicTaskInstance.getName()).isEqualTo("updatedtaskname");
assertThat(historicTaskInstance.getDescription()).isEqualTo("updateddescription");
assertThat(historicTaskInstance.getAssignee()).isEqualTo("updatedassignee");
assertThat(historicTaskInstance.getOwner()).isEqualTo("updatedowner");
assertThat(historicTaskInstance.getDueDate()).isEqualTo(dueDate);
assertThat(historicTaskInstance.getPriority()).isEqualTo(1);
}
// Finally, delete task
taskService.deleteTask(task.getId(), true);
}
public void testTaskOwner() {
Task task = taskService.newTask();
task.setOwner("johndoe");
taskService.saveTask(task);
// Fetch the task again and update
task = taskService.createTaskQuery().taskId(task.getId()).singleResult();
assertThat(task.getOwner()).isEqualTo("johndoe");
task.setOwner("joesmoe");
taskService.saveTask(task);
task = taskService.createTaskQuery().taskId(task.getId()).singleResult();
assertThat(task.getOwner()).isEqualTo("joesmoe");
// Finally, delete task
taskService.deleteTask(task.getId(), true);
}
public void testTaskComments() {
if (processEngineConfiguration.getHistoryLevel().isAtLeast(HistoryLevel.ACTIVITY)) {
Task task = taskService.newTask();
task.setOwner("johndoe");
taskService.saveTask(task);
String taskId = task.getId();
Authentication.setAuthenticatedUserId("johndoe");
// Fetch the task again and update
taskService
.addComment(
taskId,
null,
"look at this \n isn't this great? slkdjf sldkfjs ldkfjs ldkfjs ldkfj sldkfj sldkfj sldkjg laksfg sdfgsd;flgkj ksajdhf skjdfh ksjdhf skjdhf kalskjgh lskh dfialurhg kajsh dfuieqpgkja rzvkfnjviuqerhogiuvysbegkjz lkhf ais liasduh flaisduh ajiasudh vaisudhv nsfd");
Comment comment = taskService.getTaskComments(taskId).get(0);
assertThat(comment.getUserId()).isEqualTo("johndoe");
assertThat(comment.getTaskId()).isEqualTo(taskId);
assertThat(comment.getProcessInstanceId()).isNull();
assertThat(((Event) comment).getMessage())
.isEqualTo("look at this isn't this great? slkdjf sldkfjs ldkfjs ldkfjs ldkfj sldkfj sldkfj sldkjg laksfg sdfgsd;flgkj ksajdhf skjdfh ksjdhf skjdhf kalskjgh lskh dfialurhg ...");
assertThat(comment.getFullMessage())
.isEqualTo("look at this \n isn't this great? slkdjf sldkfjs ldkfjs ldkfjs ldkfj sldkfj sldkfj sldkjg laksfg sdfgsd;flgkj ksajdhf skjdfh ksjdhf skjdhf kalskjgh lskh dfialurhg kajsh dfuieqpgkja rzvkfnjviuqerhogiuvysbegkjz lkhf ais liasduh flaisduh ajiasudh vaisudhv nsfd");
assertThat(comment.getTime()).isNotNull();
// Finally, delete task
taskService.deleteTask(taskId, true);
}
}
public void testCustomTaskComments() {
if (processEngineConfiguration.getHistoryLevel().isAtLeast(HistoryLevel.ACTIVITY)) {
Task task = taskService.newTask();
task.setOwner("johndoe");
taskService.saveTask(task);
String taskId = task.getId();
Authentication.setAuthenticatedUserId("johndoe");
String customType1 = "Type1";
String customType2 = "Type2";
Comment comment = taskService.addComment(taskId,
null,
"This is a regular comment");
Comment customComment1 = taskService.addComment(taskId,
null,
customType1,
"This is a custom comment of type Type1");
Comment customComment2 = taskService.addComment(taskId,
null,
customType1,
"This is another Type1 comment");
Comment customComment3 = taskService.addComment(taskId,
null,
customType2,
"This is another custom comment. Type2 this time!");
assertThat(comment.getType()).isEqualTo(CommentEntity.TYPE_COMMENT);
assertThat(customComment1.getType()).isEqualTo(customType1);
assertThat(customComment3.getType()).isEqualTo(customType2);
assertThat(taskService.getComment(comment.getId())).isNotNull();
assertThat(taskService.getComment(customComment1.getId())).isNotNull();
List<Comment> regularComments = taskService.getTaskComments(taskId);
assertThat(regularComments).hasSize(1);
assertThat(regularComments.get(0).getFullMessage()).isEqualTo("This is a regular comment");
List<Event> allComments = taskService.getTaskEvents(taskId);
assertThat(allComments).hasSize(4);
List<Comment> type2Comments = taskService.getCommentsByType(customType2);
assertThat(type2Comments).hasSize(1);
assertThat(type2Comments.get(0).getFullMessage()).isEqualTo("This is another custom comment. Type2 this time!");
List<Comment> taskTypeComments = taskService.getTaskComments(taskId, customType1);
assertThat(taskTypeComments).hasSize(2);
// Clean up
taskService.deleteTask(taskId, true);
}
}
public void testTaskAttachments() {
if (processEngineConfiguration.getHistoryLevel().isAtLeast(HistoryLevel.ACTIVITY)) {
Task task = taskService.newTask();
task.setOwner("johndoe");
taskService.saveTask(task);
String taskId = task.getId();
Authentication.setAuthenticatedUserId("johndoe");
// Fetch the task again and update
taskService.createAttachment("web page",
taskId,
null,
"weatherforcast",
"temperatures and more",
"http://weather.com");
Attachment attachment = taskService.getTaskAttachments(taskId).get(0);
assertThat(attachment.getName()).isEqualTo("weatherforcast");
assertThat(attachment.getDescription()).isEqualTo("temperatures and more");
assertThat(attachment.getType()).isEqualTo("web page");
assertThat(attachment.getTaskId()).isEqualTo(taskId);
assertThat(attachment.getProcessInstanceId()).isNull();
assertThat(attachment.getUrl()).isEqualTo("http://weather.com");
assertThat(taskService.getAttachmentContent(attachment.getId())).isNull();
// Finally, clean up
taskService.deleteTask(taskId);
assertThat(taskService.getTaskComments(taskId)).hasSize(0);
assertThat(historyService.createHistoricTaskInstanceQuery().taskId(taskId).list()).hasSize(1);
taskService.deleteTask(taskId,
true);
}
}
public void testSaveTaskAttachment() {
if (processEngineConfiguration.getHistoryLevel().isAtLeast(HistoryLevel.ACTIVITY)) {
Task task = taskService.newTask();
task.setOwner("johndoe");
taskService.saveTask(task);
String taskId = task.getId();
Authentication.setAuthenticatedUserId("johndoe");
// Fetch attachment and update its name
taskService.createAttachment("web page",
taskId,
null,
"weatherforcast",
"temperatures and more",
"http://weather.com");
Attachment attachment = taskService.getTaskAttachments(taskId).get(0);
attachment.setName("UpdatedName");
taskService.saveAttachment(attachment);
// Refetch and verify
attachment = taskService.getTaskAttachments(taskId).get(0);
assertThat(attachment.getName()).isEqualTo("UpdatedName");
// Finally, clean up
taskService.deleteTask(taskId);
assertThat(taskService.getTaskComments(taskId)).hasSize(0);
assertThat(historyService.createHistoricTaskInstanceQuery().taskId(taskId).list()).hasSize(1);
taskService.deleteTask(taskId,
true);
}
}
@Deployment(resources = {"org/activiti/engine/test/api/oneTaskProcess.bpmn20.xml"})
public void testTaskAttachmentWithProcessInstanceId() {
if (processEngineConfiguration.getHistoryLevel().isAtLeast(HistoryLevel.ACTIVITY)) {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
String processInstanceId = processInstance.getId();
taskService.createAttachment("web page",
null,
processInstanceId,
"weatherforcast",
"temperatures and more",
"http://weather.com");
Attachment attachment = taskService.getProcessInstanceAttachments(processInstanceId).get(0);
assertThat(attachment.getName()).isEqualTo("weatherforcast");
assertThat(attachment.getDescription()).isEqualTo("temperatures and more");
assertThat(attachment.getType()).isEqualTo("web page");
assertThat(attachment.getProcessInstanceId()).isEqualTo(processInstanceId);
assertThat(attachment.getTaskId()).isNull();
assertThat(attachment.getUrl()).isEqualTo("http://weather.com");
assertThat(taskService.getAttachmentContent(attachment.getId())).isNull();
// Finally, clean up
taskService.deleteAttachment(attachment.getId());
// TODO: Bad API design. Need to fix attachment/comment properly
((TaskServiceImpl) taskService).deleteComments(null,
processInstanceId);
}
}
@Deployment(resources = {"org/activiti/engine/test/api/oneTaskProcess.bpmn20.xml"})
public void testMultipleProcessesStarted() {
// Start a few process instances
for (int i = 0; i < 20; i++) {
processEngine.getRuntimeService().startProcessInstanceByKey("oneTaskProcess");
}
// See if there are tasks for kermit
List<Task> tasks = processEngine.getTaskService().createTaskQuery().list();
assertThat(tasks).hasSize(20);
}
public void testTaskDelegation() {
Task task = taskService.newTask();
task.setOwner("johndoe");
taskService.saveTask(task);
taskService.delegateTask(task.getId(), "joesmoe");
String taskId = task.getId();
task = taskService.createTaskQuery().taskId(taskId).singleResult();
assertThat(task.getOwner()).isEqualTo("johndoe");
assertThat(task.getAssignee()).isEqualTo("joesmoe");
assertThat(task.getDelegationState()).isEqualTo(DelegationState.PENDING);
// try to complete (should fail)
Task exceptionTask = task;
assertThatExceptionOfType(ActivitiException.class)
.isThrownBy(() -> taskService.complete(exceptionTask.getId()));
taskService.resolveTask(taskId);
task = taskService.createTaskQuery().taskId(taskId).singleResult();
assertThat(task.getOwner()).isEqualTo("johndoe");
assertThat(task.getAssignee()).isEqualTo("johndoe");
assertThat(task.getDelegationState()).isEqualTo(DelegationState.RESOLVED);
task.setAssignee(null);
task.setDelegationState(null);
taskService.saveTask(task);
task = taskService.createTaskQuery().taskId(taskId).singleResult();
assertThat(task.getOwner()).isEqualTo("johndoe");
assertThat(task.getAssignee()).isNull();
assertThat(task.getDelegationState()).isNull();
task.setAssignee("jackblack");
task.setDelegationState(DelegationState.RESOLVED);
taskService.saveTask(task);
task = taskService.createTaskQuery().taskId(taskId).singleResult();
assertThat(task.getOwner()).isEqualTo("johndoe");
assertThat(task.getAssignee()).isEqualTo("jackblack");
assertThat(task.getDelegationState()).isEqualTo(DelegationState.RESOLVED);
// Finally, delete task
taskService.deleteTask(taskId,
true);
}
public void testTaskDelegationThroughServiceCall() {
Task task = taskService.newTask();
task.setOwner("johndoe");
taskService.saveTask(task);
String taskId = task.getId();
// Fetch the task again and update
task = taskService.createTaskQuery().taskId(taskId).singleResult();
taskService.delegateTask(task.getId(),
"joesmoe");
task = taskService.createTaskQuery().taskId(taskId).singleResult();
assertThat(task.getOwner()).isEqualTo("johndoe");
assertThat(task.getAssignee()).isEqualTo("joesmoe");
assertThat(task.getDelegationState()).isEqualTo(DelegationState.PENDING);
taskService.resolveTask(taskId);
task = taskService.createTaskQuery().taskId(taskId).singleResult();
assertThat(task.getOwner()).isEqualTo("johndoe");
assertThat(task.getAssignee()).isEqualTo("johndoe");
assertThat(task.getDelegationState()).isEqualTo(DelegationState.RESOLVED);
// Finally, delete task
taskService.deleteTask(taskId,
true);
}
public void testTaskAssignee() {
Task task = taskService.newTask();
task.setAssignee("johndoe");
taskService.saveTask(task);
// Fetch the task again and update
task = taskService.createTaskQuery().taskId(task.getId()).singleResult();
assertThat(task.getAssignee()).isEqualTo("johndoe");
task.setAssignee("joesmoe");
taskService.saveTask(task);
task = taskService.createTaskQuery().taskId(task.getId()).singleResult();
assertThat(task.getAssignee()).isEqualTo("joesmoe");
// Finally, delete task
taskService.deleteTask(task.getId(),
true);
}
public void testSaveTaskNullTask() {
assertThatExceptionOfType(ActivitiIllegalArgumentException.class)
.isThrownBy(() -> taskService.saveTask(null))
.withMessageContaining("task is null");
}
public void testDeleteTaskNullTaskId() {
assertThatExceptionOfType(ActivitiIllegalArgumentException.class)
.isThrownBy(() -> taskService.deleteTask(null));
}
public void testDeleteTaskUnexistingTaskId() {
// Deleting unexisting task should be silently ignored
taskService.deleteTask("unexistingtaskid");
}
public void testDeleteTasksNullTaskIds() {
assertThatExceptionOfType(ActivitiIllegalArgumentException.class)
.isThrownBy(() -> taskService.deleteTasks(null));
}
public void testDeleteTasksTaskIdsUnexistingTaskId() {
Task existingTask = taskService.newTask();
taskService.saveTask(existingTask);
// The unexisting taskId's should be silently ignored. Existing task should have been deleted.
taskService.deleteTasks(asList("unexistingtaskid1", existingTask.getId()), true);
existingTask = taskService.createTaskQuery().taskId(existingTask.getId()).singleResult();
assertThat(existingTask).isNull();
}
public void testDeleteTaskIdentityLink() {
Task task = null;
try {
task = taskService.newTask();
task.setName("test");
taskService.saveTask(task);
taskService.addCandidateGroup(task.getId(),
"sales");
taskService.addCandidateUser(task.getId(),
"kermit");
assertThat(taskService.createTaskQuery().taskCandidateGroup("sales").singleResult()).isNotNull();
assertThat(taskService.createTaskQuery().taskCandidateUser("kermit").singleResult()).isNotNull();
// Delete identity link for group
taskService.deleteGroupIdentityLink(task.getId(),
"sales",
"candidate");
// Link should be removed
assertThat(taskService.createTaskQuery().taskCandidateGroup("sales").singleResult()).isNull();
// User link should remain unaffected
assertThat(taskService.createTaskQuery().taskCandidateUser("kermit").singleResult()).isNotNull();
} finally {
// Adhoc task not part of deployment, cleanup
if (task != null && task.getId() != null) {
taskService.deleteTask(task.getId());
if (processEngineConfiguration.getHistoryLevel().isAtLeast(HistoryLevel.AUDIT)) {
historyService.deleteHistoricTaskInstance(task.getId());
}
}
}
}
public void testClaimNullArguments() {
assertThatExceptionOfType(ActivitiIllegalArgumentException.class)
.isThrownBy(() -> taskService.claim(null, "userid"))
.withMessageContaining("taskId is null");
}
public void testClaimUnexistingTaskId() {
assertThatExceptionOfType(ActivitiObjectNotFoundException.class)
.isThrownBy(() -> taskService.claim("unexistingtaskid", "user"))
.withMessageContaining("Cannot find task with id unexistingtaskid")
.satisfies(ae -> assertThat(ae.getObjectClass()).isEqualTo(Task.class));
}
public void testClaimAlreadyClaimedTaskByOtherUser() {
Task task = taskService.newTask();
taskService.saveTask(task);
// Claim task the first time
taskService.claim(task.getId(), "firstuser");
assertThatExceptionOfType(ActivitiTaskAlreadyClaimedException.class)
.isThrownBy(() -> taskService.claim(task.getId(), "seconduser"))
.withMessageContaining("Task '" + task.getId() + "' is already claimed by someone else.");
taskService.deleteTask(task.getId(), true);
}
public void testClaimAlreadyClaimedTaskBySameUser() {
Task task = taskService.newTask();
taskService.saveTask(task);
// Claim task the first time
taskService.claim(task.getId(), "user");
task = taskService.createTaskQuery().taskId(task.getId()).singleResult();
// Claim the task again with the same user. No exception should be thrown
taskService.claim(task.getId(), "user");
taskService.deleteTask(task.getId(), true);
}
public void testUnClaimTask() {
Task task = taskService.newTask();
taskService.saveTask(task);
// Claim task the first time
taskService.claim(task.getId(), "user");
task = taskService.createTaskQuery().taskId(task.getId()).singleResult();
assertThat(task.getAssignee()).isEqualTo("user");
// Unclaim the task
taskService.unclaim(task.getId());
task = taskService.createTaskQuery().taskId(task.getId()).singleResult();
assertThat(task.getAssignee()).isNull();
taskService.deleteTask(task.getId(), true);
}
public void testCompleteTaskNullTaskId() {
assertThatExceptionOfType(ActivitiIllegalArgumentException.class)
.isThrownBy(() -> taskService.complete(null))
.withMessageContaining("taskId is null");
}
public void testCompleteTaskUnexistingTaskId() {
assertThatExceptionOfType(ActivitiObjectNotFoundException.class)
.isThrownBy(() -> taskService.complete("unexistingtask"))
.withMessageContaining("Cannot find task with id unexistingtask")
.satisfies(ae -> assertThat(ae.getObjectClass()).isEqualTo(Task.class));
}
public void testCompleteTaskWithParametersNullTaskId() {
assertThatExceptionOfType(ActivitiIllegalArgumentException.class)
.isThrownBy(() -> taskService.complete(null))
.withMessageContaining("taskId is null");
}
public void testCompleteTaskWithParametersUnexistingTaskId() {
assertThatExceptionOfType(ActivitiObjectNotFoundException.class)
.isThrownBy(() -> taskService.complete("unexistingtask"))
.withMessageContaining("Cannot find task with id unexistingtask")
.satisfies(ae -> assertThat(ae.getObjectClass()).isEqualTo(Task.class));
}
public void testCompleteTaskWithParametersNullParameters() {
Task task = taskService.newTask();
taskService.saveTask(task);
String taskId = task.getId();
taskService.complete(taskId, null);
if (processEngineConfiguration.getHistoryLevel().isAtLeast(HistoryLevel.AUDIT)) {
historyService.deleteHistoricTaskInstance(taskId);
}
// Fetch the task again
task = taskService.createTaskQuery().taskId(taskId).singleResult();
assertThat(task).isNull();
}
@SuppressWarnings("unchecked")
public void testCompleteTaskWithParametersEmptyParameters() {
Task task = taskService.newTask();
taskService.saveTask(task);
String taskId = task.getId();
taskService.complete(taskId,
Collections.EMPTY_MAP);
if (processEngineConfiguration.getHistoryLevel().isAtLeast(HistoryLevel.AUDIT)) {
historyService.deleteHistoricTaskInstance(taskId);
}
// Fetch the task again
task = taskService.createTaskQuery().taskId(taskId).singleResult();
assertThat(task).isNull();
}
@Deployment(resources = {"org/activiti/engine/test/api/twoTasksProcess.bpmn20.xml"})
public void testCompleteWithParametersTask() {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("twoTasksProcess");
// Fetch first task
Task task = taskService.createTaskQuery().singleResult();
assertThat(task.getName()).isEqualTo("First task");
// Complete first task
Map<String, Object> taskParams = new HashMap<String, Object>();
taskParams.put("myParam",
"myValue");
taskService.complete(task.getId(),
taskParams);
// Fetch second task
task = taskService.createTaskQuery().singleResult();
assertThat(task.getName()).isEqualTo("Second task");
// Verify task parameters set on execution
Map<String, Object> variables = runtimeService.getVariables(processInstance.getId());
assertThat(variables).hasSize(1);
assertThat(variables.get("myParam")).isEqualTo("myValue");
}
@Deployment(resources = {"org/activiti/engine/test/api/twoTasksProcess.bpmn20.xml"})
public void testCompleteWithParametersTask2() {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("twoTasksProcess");
// Fetch first task
Task task = taskService.createTaskQuery().singleResult();
assertThat(task.getName()).isEqualTo("First task");
// Complete first task
Map<String, Object> taskParams = new HashMap<String, Object>();
taskParams.put("myParam", "myValue");
taskService.complete(task.getId(), taskParams, false); // Only
// difference with previous test
// Fetch second task
task = taskService.createTaskQuery().singleResult();
assertThat(task.getName()).isEqualTo("Second task");
// Verify task parameters set on execution
Map<String, Object> variables = runtimeService.getVariables(processInstance.getId());
assertThat(variables).hasSize(1);
assertThat(variables.get("myParam")).isEqualTo("myValue");
}
@Deployment
public void testCompleteWithTaskLocalParameters() {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("testTaskLocalVars");
// Fetch first task
Task task = taskService.createTaskQuery().singleResult();
// Complete first task
Map<String, Object> taskParams = new HashMap<String, Object>();
taskParams.put("a", 1);
taskParams.put("b", 1);
taskService.complete(task.getId(), taskParams, true);
// Verify vars are not stored process instance wide
assertThat(runtimeService.getVariable(processInstance.getId(), "a")).isNull();
assertThat(runtimeService.getVariable(processInstance.getId(), "b")).isNull();
// verify script listener has done its job
assertThat(runtimeService.getVariable(processInstance.getId(), "sum")).isEqualTo(Integer.valueOf(2));
// Fetch second task
taskService.createTaskQuery().singleResult();
}
@Deployment(resources = {"org/activiti/engine/test/api/oneTaskWithFormKeyProcess.bpmn20.xml"})
public void testCompleteTaskWithFormKey() {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskWithFormProcess");
// Fetch task
Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
assertThat(task.getName()).isEqualTo("my task");
assertThat(task.getFormKey()).isEqualTo("myFormKey");
assertThat(task.getAssignee()).isEqualTo("myAssignee");
assertThat(task.getOwner()).isEqualTo("myOwner");
assertThat(task.getCategory()).isEqualTo("myCategory");
assertThat(task.getPriority()).isEqualTo(60);
assertThat(task.getDueDate()).isNotNull();
// Complete task
taskService.complete(task.getId());
if (processEngineConfiguration.getHistoryLevel().isAtLeast(HistoryLevel.ACTIVITY)) {
HistoricTaskInstance historicTask = historyService.createHistoricTaskInstanceQuery().taskId(task.getId()).singleResult();
assertThat(historicTask.getName()).isEqualTo("my task");
assertThat(historicTask.getFormKey()).isEqualTo("myFormKey");
assertThat(historicTask.getAssignee()).isEqualTo("myAssignee");
assertThat(historicTask.getOwner()).isEqualTo("myOwner");
assertThat(historicTask.getCategory()).isEqualTo("myCategory");
assertThat(historicTask.getPriority()).isEqualTo(60);
assertThat(historicTask.getDueDate()).isNotNull();
}
}
public void testSetAssignee() {
Task task = taskService.newTask();
assertThat(task.getAssignee()).isNull();
taskService.saveTask(task);
// Set assignee
taskService.setAssignee(task.getId(), "user");
// Fetch task again
task = taskService.createTaskQuery().taskId(task.getId()).singleResult();
assertThat(task.getAssignee()).isEqualTo("user");
// Set assignee to null
taskService.setAssignee(task.getId(), null);
taskService.deleteTask(task.getId(), true);
}
public void testSetAssigneeNullTaskId() {
assertThatExceptionOfType(ActivitiIllegalArgumentException.class)
.isThrownBy(() -> taskService.setAssignee(null, "userId"))
.withMessageContaining("taskId is null");
}
public void testSetAssigneeUnexistingTask() {
assertThatExceptionOfType(ActivitiObjectNotFoundException.class)
.isThrownBy(() -> taskService.setAssignee("unexistingTaskId", "user"))
.withMessageContaining("Cannot find task with id unexistingTaskId")
.satisfies(ae -> assertThat(ae.getObjectClass()).isEqualTo(Task.class));
}
public void testAddCandidateUserDuplicate() {
// Check behavior when adding the same user twice as candidate
Task task = taskService.newTask();
taskService.saveTask(task);
taskService.addCandidateUser(task.getId(), "user");
// Add as candidate the second time
taskService.addCandidateUser(task.getId(), "user");
taskService.deleteTask(task.getId(), true);
}
public void testAddCandidateUserNullTaskId() {
assertThatExceptionOfType(ActivitiIllegalArgumentException.class)
.isThrownBy(() -> taskService.addCandidateUser(null, "userId"))
.withMessageContaining("taskId is null");
}
public void testAddCandidateUserNullUserId() {
assertThatExceptionOfType(ActivitiIllegalArgumentException.class)
.isThrownBy(() -> taskService.addCandidateUser("taskId", null))
.withMessageContaining("identityId is null");
}
public void testAddCandidateUserUnexistingTask() {
assertThatExceptionOfType(ActivitiObjectNotFoundException.class)
.isThrownBy(() -> taskService.addCandidateUser("unexistingTaskId", "user"))
.withMessageContaining("Cannot find task with id unexistingTaskId")
.satisfies(ae -> assertThat(ae.getObjectClass()).isEqualTo(Task.class));
}
public void testAddCandidateGroupNullTaskId() {
assertThatExceptionOfType(ActivitiIllegalArgumentException.class)
.isThrownBy(() -> taskService.addCandidateGroup(null, "groupId"))
.withMessageContaining("taskId is null");
}
public void testAddCandidateGroupNullGroupId() {
assertThatExceptionOfType(ActivitiIllegalArgumentException.class)
.isThrownBy(() -> taskService.addCandidateGroup("taskId", null))
.withMessageContaining("identityId is null");
}
public void testAddCandidateGroupUnexistingTask() {
assertThatExceptionOfType(ActivitiObjectNotFoundException.class)
.isThrownBy(() -> taskService.addCandidateGroup("unexistingTaskId", "group"))
.withMessageContaining("Cannot find task with id unexistingTaskId")
.satisfies(ae -> assertThat(ae.getObjectClass()).isEqualTo(Task.class));
}
public void testAddGroupIdentityLinkNullTaskId() {
assertThatExceptionOfType(ActivitiIllegalArgumentException.class)
.isThrownBy(() -> taskService.addGroupIdentityLink(null, "groupId", IdentityLinkType.CANDIDATE))
.withMessageContaining("taskId is null");
}
public void testAddGroupIdentityLinkNullUserId() {
assertThatExceptionOfType(ActivitiIllegalArgumentException.class)
.isThrownBy(() -> taskService.addGroupIdentityLink("taskId", null, IdentityLinkType.CANDIDATE))
.withMessageContaining("identityId is null");
}
public void testAddGroupIdentityLinkUnexistingTask() {
assertThatExceptionOfType(ActivitiObjectNotFoundException.class)
.isThrownBy(() -> taskService.addGroupIdentityLink("unexistingTaskId", "user", IdentityLinkType.CANDIDATE))
.withMessageContaining("Cannot find task with id unexistingTaskId")
.satisfies(ae -> assertThat(ae.getObjectClass()).isEqualTo(Task.class));
}
public void testAddUserIdentityLinkNullTaskId() {
assertThatExceptionOfType(ActivitiIllegalArgumentException.class)
.isThrownBy(() -> taskService.addUserIdentityLink(null, "userId", IdentityLinkType.CANDIDATE))
.withMessageContaining("taskId is null");
}
public void testAddUserIdentityLinkNullUserId() {
assertThatExceptionOfType(ActivitiIllegalArgumentException.class)
.isThrownBy(() -> taskService.addUserIdentityLink("taskId", null, IdentityLinkType.CANDIDATE))
.withMessageContaining("identityId is null");
}
public void testAddUserIdentityLinkUnexistingTask() {
assertThatExceptionOfType(ActivitiObjectNotFoundException.class)
.isThrownBy(() -> taskService.addUserIdentityLink("unexistingTaskId", "user", IdentityLinkType.CANDIDATE))
.withMessageContaining("Cannot find task with id unexistingTaskId")
.satisfies(ae -> assertThat(ae.getObjectClass()).isEqualTo(Task.class));
}
public void testGetIdentityLinksWithCandidateUser() {
Task task = taskService.newTask();
taskService.saveTask(task);
String taskId = task.getId();
taskService.addCandidateUser(taskId, "kermit");
List<IdentityLink> identityLinks = taskService.getIdentityLinksForTask(taskId);
assertThat(identityLinks).hasSize(1);
assertThat(identityLinks.get(0).getUserId()).isEqualTo("kermit");
assertThat(identityLinks.get(0).getGroupId()).isNull();
assertThat(identityLinks.get(0).getType()).isEqualTo(IdentityLinkType.CANDIDATE);
// cleanup
taskService.deleteTask(taskId, true);
}
public void testGetIdentityLinksWithCandidateGroup() {
Task task = taskService.newTask();
taskService.saveTask(task);
String taskId = task.getId();
taskService.addCandidateGroup(taskId,
"muppets");
List<IdentityLink> identityLinks = taskService.getIdentityLinksForTask(taskId);
assertThat(identityLinks).hasSize(1);
assertThat(identityLinks.get(0).getGroupId()).isEqualTo("muppets");
assertThat(identityLinks.get(0).getUserId()).isNull();
assertThat(identityLinks.get(0).getType()).isEqualTo(IdentityLinkType.CANDIDATE);
// cleanup
taskService.deleteTask(taskId,
true);
}
public void testGetIdentityLinksWithAssignee() {
Task task = taskService.newTask();
taskService.saveTask(task);
String taskId = task.getId();
taskService.claim(taskId,
"kermit");
List<IdentityLink> identityLinks = taskService.getIdentityLinksForTask(taskId);
assertThat(identityLinks).hasSize(1);
assertThat(identityLinks.get(0).getUserId()).isEqualTo("kermit");
assertThat(identityLinks.get(0).getGroupId()).isNull();
assertThat(identityLinks.get(0).getType()).isEqualTo(IdentityLinkType.ASSIGNEE);
// cleanup
taskService.deleteTask(taskId,
true);
}
public void testGetIdentityLinksWithNonExistingAssignee() {
Task task = taskService.newTask();
taskService.saveTask(task);
String taskId = task.getId();
taskService.claim(taskId,
"nonExistingAssignee");
List<IdentityLink> identityLinks = taskService.getIdentityLinksForTask(taskId);
assertThat(identityLinks).hasSize(1);
assertThat(identityLinks.get(0).getUserId()).isEqualTo("nonExistingAssignee");
assertThat(identityLinks.get(0).getGroupId()).isNull();
assertThat(identityLinks.get(0).getType()).isEqualTo(IdentityLinkType.ASSIGNEE);
// cleanup
taskService.deleteTask(taskId,
true);
}
public void testGetIdentityLinksWithOwner() {
Task task = taskService.newTask();
taskService.saveTask(task);
String taskId = task.getId();
taskService.claim(taskId,
"kermit");
taskService.delegateTask(taskId,
"fozzie");
List<IdentityLink> identityLinks = taskService.getIdentityLinksForTask(taskId);
assertThat(identityLinks).hasSize(2);
IdentityLink assignee = identityLinks.get(0);
assertThat(assignee.getUserId()).isEqualTo("fozzie");
assertThat(assignee.getGroupId()).isNull();
assertThat(assignee.getType()).isEqualTo(IdentityLinkType.ASSIGNEE);
IdentityLink owner = identityLinks.get(1);
assertThat(owner.getUserId()).isEqualTo("kermit");
assertThat(owner.getGroupId()).isNull();
assertThat(owner.getType()).isEqualTo(IdentityLinkType.OWNER);
// cleanup
taskService.deleteTask(taskId,
true);
}
public void testGetIdentityLinksWithNonExistingOwner() {
Task task = taskService.newTask();
taskService.saveTask(task);
String taskId = task.getId();
taskService.claim(taskId,
"nonExistingOwner");
taskService.delegateTask(taskId,
"nonExistingAssignee");
List<IdentityLink> identityLinks = taskService.getIdentityLinksForTask(taskId);
assertThat(identityLinks).hasSize(2);
IdentityLink assignee = identityLinks.get(0);
assertThat(assignee.getUserId()).isEqualTo("nonExistingAssignee");
assertThat(assignee.getGroupId()).isNull();
assertThat(assignee.getType()).isEqualTo(IdentityLinkType.ASSIGNEE);
IdentityLink owner = identityLinks.get(1);
assertThat(owner.getUserId()).isEqualTo("nonExistingOwner");
assertThat(owner.getGroupId()).isNull();
assertThat(owner.getType()).isEqualTo(IdentityLinkType.OWNER);
// cleanup
taskService.deleteTask(taskId,
true);
}
public void testSetPriority() {
Task task = taskService.newTask();
taskService.saveTask(task);
taskService.setPriority(task.getId(),
12345);
// Fetch task again to check if the priority is set
task = taskService.createTaskQuery().taskId(task.getId()).singleResult();
assertThat(task.getPriority()).isEqualTo(12345);
taskService.deleteTask(task.getId(),
true);
}
public void testSetPriorityUnexistingTaskId() {
assertThatExceptionOfType(ActivitiObjectNotFoundException.class)
.isThrownBy(() -> taskService.setPriority("unexistingtask", 12345))
.withMessageContaining("Cannot find task with id unexistingtask")
.satisfies(ae -> assertThat(ae.getObjectClass()).isEqualTo(Task.class));
}
public void testSetPriorityNullTaskId() {
assertThatExceptionOfType(ActivitiIllegalArgumentException.class)
.isThrownBy(() -> taskService.setPriority(null, 12345))
.withMessageContaining("taskId is null");
}
public void testSetDueDate() {
Task task = taskService.newTask();
taskService.saveTask(task);
// Set the due date to a non-null value
Date now = new Date();
taskService.setDueDate(task.getId(),
now);
// Fetch task to check if the due date was persisted
task = taskService.createTaskQuery().taskId(task.getId()).singleResult();
assertThat(task.getDueDate()).isNotNull();
// Set the due date to null
taskService.setDueDate(task.getId(),
null);
// Re-fetch the task to make sure the due date was set to null
task = taskService.createTaskQuery().taskId(task.getId()).singleResult();
assertThat(task.getDueDate()).isNull();
taskService.deleteTask(task.getId(),
true);
}
public void testSetDueDateUnexistingTaskId() {
assertThatExceptionOfType(ActivitiException.class)
.isThrownBy(() -> taskService.setDueDate("unexistingtask", new Date()))
.withMessageContaining("Cannot find task with id unexistingtask");
}
public void testSetDueDateNullTaskId() {
assertThatExceptionOfType(ActivitiException.class)
.isThrownBy(() -> taskService.setDueDate(null, new Date()))
.withMessageContaining("taskId is null");
}
/**
* @see <a href="https://activiti.atlassian.net/browse/ACT-1059">https://activiti.atlassian.net/browse/ACT-1059</a>
*/
public void testSetDelegationState() {
Task task = taskService.newTask();
task.setOwner("wuzh");
taskService.saveTask(task);
taskService.delegateTask(task.getId(),
"other");
String taskId = task.getId();
task = taskService.createTaskQuery().taskId(taskId).singleResult();
assertThat(task.getOwner()).isEqualTo("wuzh");
assertThat(task.getAssignee()).isEqualTo("other");
assertThat(task.getDelegationState()).isEqualTo(DelegationState.PENDING);
task.setDelegationState(DelegationState.RESOLVED);
taskService.saveTask(task);
task = taskService.createTaskQuery().taskId(taskId).singleResult();
assertThat(task.getOwner()).isEqualTo("wuzh");
assertThat(task.getAssignee()).isEqualTo("other");
assertThat(task.getDelegationState()).isEqualTo(DelegationState.RESOLVED);
taskService.deleteTask(taskId,
true);
}
private void checkHistoricVariableUpdateEntity(String variableName,
String processInstanceId) {
if (processEngineConfiguration.getHistoryLevel().isAtLeast(HistoryLevel.FULL)) {
boolean deletedVariableUpdateFound = false;
List<HistoricDetail> resultSet = historyService.createHistoricDetailQuery().processInstanceId(processInstanceId).list();
for (HistoricDetail currentHistoricDetail : resultSet) {
assertThat(currentHistoricDetail).isInstanceOf(HistoricDetailVariableInstanceUpdateEntity.class);
HistoricDetailVariableInstanceUpdateEntity historicVariableUpdate = (HistoricDetailVariableInstanceUpdateEntity) currentHistoricDetail;
if (historicVariableUpdate.getName().equals(variableName)) {
if (historicVariableUpdate.getValue() == null) {
if (deletedVariableUpdateFound) {
fail("Mismatch: A HistoricVariableUpdateEntity with a null value already found");
} else {
deletedVariableUpdateFound = true;
}
}
}
}
assertThat(deletedVariableUpdateFound).isTrue();
}
}
@Deployment(resources = {"org/activiti/engine/test/api/oneTaskProcess.bpmn20.xml"})
public void testRemoveVariable() {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
Task currentTask = taskService.createTaskQuery().singleResult();
taskService.setVariable(currentTask.getId(), "variable1", "value1");
assertThat(taskService.getVariable(currentTask.getId(), "variable1")).isEqualTo("value1");
assertThat(taskService.getVariableLocal(currentTask.getId(), "variable1")).isNull();
taskService.removeVariable(currentTask.getId(), "variable1");
assertThat(taskService.getVariable(currentTask.getId(), "variable1")).isNull();
checkHistoricVariableUpdateEntity("variable1", processInstance.getId());
}
public void testRemoveVariableNullTaskId() {
assertThatExceptionOfType(ActivitiIllegalArgumentException.class)
.isThrownBy(() -> taskService.removeVariable(null, "variable"))
.withMessageContaining("taskId is null");
}
@Deployment(resources = {"org/activiti/engine/test/api/oneTaskProcess.bpmn20.xml"})
public void testGetVariableByHistoricActivityInstance() {
if (processEngineConfiguration.getHistoryLevel().isAtLeast(HistoryLevel.FULL)) {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
assertThat(processInstance).isNotNull();
Task task = taskService.createTaskQuery().singleResult();
taskService.setVariable(task.getId(),
"variable1",
"value1");
taskService.setVariable(task.getId(),
"variable1",
"value2");
HistoricActivityInstance historicActivitiInstance = historyService.createHistoricActivityInstanceQuery().processInstanceId(processInstance.getId())
.activityId("theTask").singleResult();
assertThat(historicActivitiInstance).isNotNull();
List<HistoricDetail> resultSet = historyService.createHistoricDetailQuery().variableUpdates().activityInstanceId(historicActivitiInstance.getId())
.list();
assertThat(resultSet).hasSize(2);
assertThat(resultSet)
.extracting(h -> ((HistoricVariableUpdate) h).getValue())
.containsExactlyInAnyOrder("value1", "value2");
assertThat(resultSet)
.extracting(h -> ((HistoricVariableUpdate) h).getVariableName())
.containsOnly("variable1");
}
}
@Deployment(resources = {"org/activiti/engine/test/api/oneTaskProcess.bpmn20.xml"})
public void testRemoveVariables() {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
Task currentTask = taskService.createTaskQuery().singleResult();
Map<String, Object> varsToDelete = new HashMap<String, Object>();
varsToDelete.put("variable1", "value1");
varsToDelete.put("variable2", "value2");
taskService.setVariables(currentTask.getId(), varsToDelete);
taskService.setVariable(currentTask.getId(), "variable3", "value3");
assertThat(taskService.getVariable(currentTask.getId(), "variable1")).isEqualTo("value1");
assertThat(taskService.getVariable(currentTask.getId(), "variable2")).isEqualTo("value2");
assertThat(taskService.getVariable(currentTask.getId(), "variable3")).isEqualTo("value3");
assertThat(taskService.getVariableLocal(currentTask.getId(), "variable1")).isNull();
assertThat(taskService.getVariableLocal(currentTask.getId(), "variable2")).isNull();
assertThat(taskService.getVariableLocal(currentTask.getId(), "variable3")).isNull();
taskService.removeVariables(currentTask.getId(), varsToDelete.keySet());
assertThat(taskService.getVariable(currentTask.getId(), "variable1")).isNull();
assertThat(taskService.getVariable(currentTask.getId(), "variable2")).isNull();
assertThat(taskService.getVariable(currentTask.getId(), "variable3")).isEqualTo("value3");
assertThat(taskService.getVariableLocal(currentTask.getId(), "variable1")).isNull();
assertThat(taskService.getVariableLocal(currentTask.getId(), "variable2")).isNull();
assertThat(taskService.getVariableLocal(currentTask.getId(), "variable3")).isNull();
checkHistoricVariableUpdateEntity("variable1", processInstance.getId());
checkHistoricVariableUpdateEntity("variable2", processInstance.getId());
}
public void testRemoveVariablesNullTaskId() {
assertThatExceptionOfType(ActivitiIllegalArgumentException.class)
.isThrownBy(() -> taskService.removeVariables(null, emptyList()))
.withMessageContaining("taskId is null");
}
@Deployment(resources = {"org/activiti/engine/test/api/oneTaskProcess.bpmn20.xml"})
public void testRemoveVariableLocal() {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
Task currentTask = taskService.createTaskQuery().singleResult();
taskService.setVariableLocal(currentTask.getId(), "variable1", "value1");
assertThat(taskService.getVariable(currentTask.getId(), "variable1")).isEqualTo("value1");
assertThat(taskService.getVariableLocal(currentTask.getId(), "variable1")).isEqualTo("value1");
taskService.removeVariableLocal(currentTask.getId(), "variable1");
assertThat(taskService.getVariable(currentTask.getId(), "variable1")).isNull();
assertThat(taskService.getVariableLocal(currentTask.getId(), "variable1")).isNull();
checkHistoricVariableUpdateEntity("variable1", processInstance.getId());
}
public void testRemoveVariableLocalNullTaskId() {
assertThatExceptionOfType(ActivitiException.class)
.isThrownBy(() -> taskService.removeVariableLocal(null, "variable"))
.withMessageContaining("taskId is null");
}
@Deployment(resources = {"org/activiti/engine/test/api/oneTaskProcess.bpmn20.xml"})
public void testRemoveVariablesLocal() {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
Task currentTask = taskService.createTaskQuery().singleResult();
Map<String, Object> varsToDelete = new HashMap<String, Object>();
varsToDelete.put("variable1", "value1");
varsToDelete.put("variable2", "value2");
taskService.setVariablesLocal(currentTask.getId(), varsToDelete);
taskService.setVariableLocal(currentTask.getId(), "variable3", "value3");
assertThat(taskService.getVariable(currentTask.getId(), "variable1")).isEqualTo("value1");
assertThat(taskService.getVariable(currentTask.getId(), "variable2")).isEqualTo("value2");
assertThat(taskService.getVariable(currentTask.getId(), "variable3")).isEqualTo("value3");
assertThat(taskService.getVariableLocal(currentTask.getId(), "variable1")).isEqualTo("value1");
assertThat(taskService.getVariableLocal(currentTask.getId(), "variable2")).isEqualTo("value2");
assertThat(taskService.getVariableLocal(currentTask.getId(), "variable3")).isEqualTo("value3");
taskService.removeVariables(currentTask.getId(), varsToDelete.keySet());
assertThat(taskService.getVariable(currentTask.getId(), "variable1")).isNull();
assertThat(taskService.getVariable(currentTask.getId(), "variable2")).isNull();
assertThat(taskService.getVariable(currentTask.getId(), "variable3")).isEqualTo("value3");
assertThat(taskService.getVariableLocal(currentTask.getId(), "variable1")).isNull();
assertThat(taskService.getVariableLocal(currentTask.getId(), "variable2")).isNull();
assertThat(taskService.getVariableLocal(currentTask.getId(), "variable3")).isEqualTo("value3");
checkHistoricVariableUpdateEntity("variable1", processInstance.getId());
checkHistoricVariableUpdateEntity("variable2", processInstance.getId());
}
public void testRemoveVariablesLocalNullTaskId() {
assertThatExceptionOfType(ActivitiIllegalArgumentException.class)
.isThrownBy(() -> taskService.removeVariablesLocal(null, emptyList()))
.withMessageContaining("taskId is null");
}
@Deployment(resources = {"org/activiti/engine/test/api/oneTaskProcess.bpmn20.xml"})
public void testUserTaskOptimisticLocking() {
runtimeService.startProcessInstanceByKey("oneTaskProcess");
Task task1 = taskService.createTaskQuery().singleResult();
Task task2 = taskService.createTaskQuery().singleResult();
task1.setDescription("test description one");
taskService.saveTask(task1);
assertThatExceptionOfType(ActivitiOptimisticLockingException.class)
.isThrownBy(() -> {
task2.setDescription("test description two");
taskService.saveTask(task2);
});
}
public void testDeleteTaskWithDeleteReason() {
// ACT-900: deleteReason can be manually specified - can only be
// validated when historyLevel > ACTIVITY
if (processEngineConfiguration.getHistoryLevel().isAtLeast(HistoryLevel.ACTIVITY)) {
Task task = taskService.newTask();
task.setName("test task");
taskService.saveTask(task);
assertThat(task.getId()).isNotNull();
taskService.deleteTask(task.getId(),
"deleted for testing purposes");
HistoricTaskInstance historicTaskInstance = historyService.createHistoricTaskInstanceQuery().taskId(task.getId()).singleResult();
assertThat(historicTaskInstance).isNotNull();
assertThat(historicTaskInstance.getDeleteReason()).isEqualTo("deleted for testing purposes");
// Delete historic task that is left behind, will not be cleaned up
// because this is not part of a process
taskService.deleteTask(task.getId(),
true);
}
}
public void testResolveTaskNullTaskId() {
assertThatExceptionOfType(ActivitiException.class)
.isThrownBy(() -> taskService.resolveTask(null))
.withMessageContaining("taskId is null");
}
public void testResolveTaskUnexistingTaskId() {
assertThatExceptionOfType(ActivitiException.class)
.isThrownBy(() -> taskService.resolveTask("blergh"))
.withMessageContaining("Cannot find task with id");
}
public void testResolveTaskWithParametersNullParameters() {
Task task = taskService.newTask();
task.setDelegationState(DelegationState.PENDING);
taskService.saveTask(task);
String taskId = task.getId();
taskService.resolveTask(taskId,
null);
if (processEngineConfiguration.getHistoryLevel().isAtLeast(HistoryLevel.AUDIT)) {
historyService.deleteHistoricTaskInstance(taskId);
}
// Fetch the task again
task = taskService.createTaskQuery().taskId(taskId).singleResult();
assertThat(task.getDelegationState()).isEqualTo(DelegationState.RESOLVED);
taskService.deleteTask(taskId,
true);
}
public void testResolveTaskWithParametersEmptyParameters() {
Task task = taskService.newTask();
task.setDelegationState(DelegationState.PENDING);
taskService.saveTask(task);
String taskId = task.getId();
taskService.resolveTask(taskId, emptyMap());
if (processEngineConfiguration.getHistoryLevel().isAtLeast(HistoryLevel.AUDIT)) {
historyService.deleteHistoricTaskInstance(taskId);
}
// Fetch the task again
task = taskService.createTaskQuery().taskId(taskId).singleResult();
assertThat(task.getDelegationState()).isEqualTo(DelegationState.RESOLVED);
taskService.deleteTask(taskId, true);
}
@Deployment(resources = {"org/activiti/engine/test/api/twoTasksProcess.bpmn20.xml"})
public void testResolveWithParametersTask() {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("twoTasksProcess");
// Fetch first task
Task task = taskService.createTaskQuery().singleResult();
assertThat(task.getName()).isEqualTo("First task");
taskService.delegateTask(task.getId(), "johndoe");
// Resolve first task
Map<String, Object> taskParams = new HashMap<String, Object>();
taskParams.put("myParam", "myValue");
taskService.resolveTask(task.getId(), taskParams);
// Verify that task is resolved
task = taskService.createTaskQuery().taskDelegationState(DelegationState.RESOLVED).singleResult();
assertThat(task.getName()).isEqualTo("First task");
// Verify task parameters set on execution
Map<String, Object> variables = runtimeService.getVariables(processInstance.getId());
assertThat(variables).hasSize(1);
assertThat(variables.get("myParam")).isEqualTo("myValue");
}
@Deployment(resources = {"org/activiti/engine/test/api/oneTaskProcess.bpmn20.xml"})
public void testDeleteTaskPartOfProcess() {
runtimeService.startProcessInstanceByKey("oneTaskProcess");
Task task = taskService.createTaskQuery().singleResult();
assertThat(task).isNotNull();
assertThatExceptionOfType(ActivitiException.class)
.isThrownBy(() -> taskService.deleteTask(task.getId()))
.withMessage("The task cannot be deleted because is part of a running process");
assertThatExceptionOfType(ActivitiException.class)
.isThrownBy(() -> taskService.deleteTask(task.getId(), true))
.withMessage("The task cannot be deleted because is part of a running process");
assertThatExceptionOfType(ActivitiException.class)
.isThrownBy(() -> taskService.deleteTask(task.getId(), "test"))
.withMessage("The task cannot be deleted because is part of a running process");
assertThatExceptionOfType(ActivitiException.class)
.isThrownBy(() -> taskService.deleteTasks(singletonList(task.getId())))
.withMessage("The task cannot be deleted because is part of a running process");
assertThatExceptionOfType(ActivitiException.class)
.isThrownBy(() -> taskService.deleteTasks(singletonList(task.getId()), true))
.withMessage("The task cannot be deleted because is part of a running process");
assertThatExceptionOfType(ActivitiException.class)
.isThrownBy(() -> taskService.deleteTasks(singletonList(task.getId()), "test"))
.withMessage("The task cannot be deleted because is part of a running process");
}
@Deployment
public void testFormKeyExpression() {
runtimeService.startProcessInstanceByKey("testFormExpression",
singletonMap("var", "abc"));
Task task = taskService.createTaskQuery().singleResult();
assertThat(task.getFormKey()).isEqualTo("first-form.json");
taskService.complete(task.getId());
task = taskService.createTaskQuery().singleResult();
assertThat(task.getFormKey()).isEqualTo("form-abc.json");
task.setFormKey("form-changed.json");
taskService.saveTask(task);
task = taskService.createTaskQuery().singleResult();
assertThat(task.getFormKey()).isEqualTo("form-changed.json");
if (processEngineConfiguration.getHistoryLevel().isAtLeast(HistoryLevel.AUDIT)) {
HistoricTaskInstance historicTaskInstance = historyService.createHistoricTaskInstanceQuery().taskId(task.getId()).singleResult();
assertThat(historicTaskInstance.getFormKey()).isEqualTo("form-changed.json");
}
}
@Deployment(resources = {"org/activiti/engine/test/api/oneTaskProcess.bpmn20.xml"})
public void testGetVariableLocalWithCast() {
runtimeService.startProcessInstanceByKey("oneTaskProcess");
Task currentTask = taskService.createTaskQuery().singleResult();
taskService.setVariableLocal(currentTask.getId(), "variable1", "value1");
String variable = taskService.getVariableLocal(currentTask.getId(), "variable1", String.class);
assertThat(variable).isEqualTo("value1");
}
@Deployment(resources = {"org/activiti/engine/test/api/oneTaskProcess.bpmn20.xml"})
public void testGetVariableLocalNotExistingWithCast() {
runtimeService.startProcessInstanceByKey("oneTaskProcess");
Task currentTask = taskService.createTaskQuery().singleResult();
String variable = taskService.getVariableLocal(currentTask.getId(), "variable1", String.class);
assertThat(variable).isNull();
}
@Deployment(resources = {"org/activiti/engine/test/api/oneTaskProcess.bpmn20.xml"})
public void testGetVariableLocalWithInvalidCast() {
runtimeService.startProcessInstanceByKey("oneTaskProcess");
Task currentTask = taskService.createTaskQuery().singleResult();
taskService.setVariableLocal(currentTask.getId(),
"variable1",
"value1");
assertThatExceptionOfType(ClassCastException.class)
.isThrownBy(() -> taskService.getVariableLocal(currentTask.getId(), "variable1", Boolean.class));
}
@Deployment(resources = {"org/activiti/engine/test/api/oneTaskProcess.bpmn20.xml"})
public void testGetVariableWithCast() {
runtimeService.startProcessInstanceByKey("oneTaskProcess");
Task currentTask = taskService.createTaskQuery().singleResult();
taskService.setVariable(currentTask.getId(), "variable1", "value1");
String variable = taskService.getVariable(currentTask.getId(), "variable1", String.class);
assertThat(variable).isEqualTo("value1");
}
@Deployment(resources = {"org/activiti/engine/test/api/oneTaskProcess.bpmn20.xml"})
public void testGetVariableNotExistingWithCast() {
runtimeService.startProcessInstanceByKey("oneTaskProcess");
Task currentTask = taskService.createTaskQuery().singleResult();
String variable = taskService.getVariable(currentTask.getId(), "variable1", String.class);
assertThat(variable).isNull();
}
@Deployment(resources = {"org/activiti/engine/test/api/oneTaskProcess.bpmn20.xml"})
public void testGetVariableWithInvalidCast() {
runtimeService.startProcessInstanceByKey("oneTaskProcess");
Task currentTask = taskService.createTaskQuery().singleResult();
taskService.setVariable(currentTask.getId(), "variable1", "value1");
assertThatExceptionOfType(ClassCastException.class)
.isThrownBy(() -> taskService.getVariable(currentTask.getId(), "variable1", Boolean.class));
}
public void testClaimTime() {
Task task = taskService.newTask();
taskService.saveTask(task);
assertThat(task.getClaimTime()).isNull();
// Claim task
taskService.claim(task.getId(), "user");
task = taskService.createTaskQuery().taskId(task.getId()).singleResult();
assertThat(task.getClaimTime()).isNotNull();
// Unclaim task
taskService.unclaim(task.getId());
task = taskService.createTaskQuery().taskId(task.getId()).singleResult();
assertThat(task.getClaimTime()).isNull();
taskService.deleteTask(task.getId(), true);
}
}
|
|
// The MIT License (MIT)
//
// Copyright (c) 2019 Timothy D. Jones
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package io.github.jonestimd.swing.component;
import java.awt.event.FocusAdapter;
import java.awt.event.FocusEvent;
import java.text.Format;
import java.text.ParseException;
import javax.swing.ComboBoxModel;
import javax.swing.JComboBox;
import javax.swing.SwingUtilities;
import javax.swing.ToolTipManager;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import javax.swing.plaf.basic.BasicComboBoxEditor;
import io.github.jonestimd.swing.validation.ValidatedTextField;
import io.github.jonestimd.swing.validation.Validator;
/**
* Provides the text field ({@link ValidatedTextField}) for an editable {@link BeanListComboBox}.
* Uses {@link Format#parseObject(String)} to create an item from the input text.
* @param <T> {@link BeanListComboBox} list item class
*/
public class BeanListComboBoxEditor<T> extends BasicComboBoxEditor {
private ComboBoxModel<T> model;
private Format format;
private PrefixSelector<T> prefixSelector;
private boolean autoSelecting = false;
private DocumentListener documentHandler = new DocumentListener() {
public void changedUpdate(DocumentEvent e) {
documentChange();
}
public void insertUpdate(DocumentEvent e) {
documentChange();
}
public void removeUpdate(DocumentEvent e) {
}
};
/**
* Create a combo box editor with the default {@link PrefixSelector} (first match alphabetically).
*/
public BeanListComboBoxEditor(JComboBox<T> comboBox, Format format, Validator<String> validator) {
this(comboBox, format, validator, new FormatPrefixSelector<>(format));
}
public BeanListComboBoxEditor(JComboBox<T> comboBox, Format format, Validator<String> validator, PrefixSelector<T> prefixSelector) {
this.model = comboBox.getModel();
this.format = format;
this.prefixSelector = prefixSelector;
editor = new BorderlessTextField("", 9, validator);
editor.getDocument().addDocumentListener(documentHandler);
editor.addFocusListener(new FocusAdapter() {
@Override
public void focusLost(FocusEvent e) {
if (e.getOppositeComponent() != comboBox && getItem() != null) {
editor.setText(itemToString(getItem()));
}
}
});
}
public boolean isAutoSelecting() {
return autoSelecting;
}
@Override
public ValidatedTextField getEditorComponent() {
return (ValidatedTextField) super.getEditorComponent();
}
@Override
public void setItem(Object anObject) {
super.setItem(anObject == null ? null : format.format(anObject));
}
@Override
public T getItem() {
return getItem((String) super.getItem());
}
public boolean isNew(Object item) {
return item != null && indexOf(format.format(item)) < 0;
}
protected T getItem(String displayText) {
if (displayText != null && displayText.length() > 0) {
int index = indexOf(displayText);
return (index >= 0 ? model.getElementAt(index) : parseInput(displayText));
}
return null;
}
@SuppressWarnings("unchecked")
protected T parseInput(String displayText) {
try {
return (T) format.parseObject(displayText);
} catch (ParseException ex) {
return null;
}
}
private int indexOf(String displayText) {
for (int i = 0; i < model.getSize(); i++) {
if (displayText.equalsIgnoreCase(format.format(model.getElementAt(i)))) {
return i;
}
}
return -1;
}
protected String itemToString(Object item) {
return item == null ? null : format.format(item);
}
protected String getFirstMatch(String displayText) {
Object item = prefixSelector.selectMatch(model, displayText);
if (item != null) {
autoSelecting = true;
model.setSelectedItem(item);
autoSelecting = false;
return format.format(item);
}
return null;
}
private void documentChange() {
String text = editor.getText();
String selected = itemToString(model.getSelectedItem());
if (text.length() > 0 && (selected == null || !selected.equalsIgnoreCase(text))) {
SwingUtilities.invokeLater(this::autoComplete);
}
}
private void autoComplete() {
if (editor.getSelectedText() == null) {
autoCompleteFirstMatch(editor.getText());
}
}
private void autoCompleteFirstMatch(String text) {
if (text.length() > 0) {
int position = editor.getCaretPosition();
String match = getFirstMatch(text);
if (match != null) {
editor.getDocument().removeDocumentListener(documentHandler);
editor.setText(text + match.substring(text.length()));
editor.setCaretPosition(text.length());
editor.setSelectionStart(Math.min(position, text.length()));
editor.setSelectionEnd(match.length());
editor.getDocument().addDocumentListener(documentHandler);
}
}
}
private class BorderlessTextField extends ValidatedTextField {
public BorderlessTextField(String value, int columns, Validator<String> validator) {
super(validator);
setText(value == null ? "" : value);
setColumns(columns);
}
// workaround for 4530952
@Override
public void setText(String s) {
if (!getText().equals(s)) {
super.setText(s);
}
}
@Override
public void validateValue() {
super.validateValue();
if (getToolTipText() == null) {
if (getValidationMessages() == null) {
ToolTipManager.sharedInstance().unregisterComponent(this);
}
else {
ToolTipManager.sharedInstance().registerComponent(this);
}
}
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.streaming.async;
import java.io.IOError;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.Semaphore;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.netty.buffer.ByteBuf;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelPipeline;
import io.netty.util.AttributeKey;
import io.netty.util.concurrent.Future;
import io.netty.util.concurrent.GenericFutureListener;
import org.apache.cassandra.concurrent.DebuggableThreadPoolExecutor;
import org.apache.cassandra.concurrent.NamedThreadFactory;
import org.apache.cassandra.config.Config;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.io.util.DataOutputBufferFixed;
import org.apache.cassandra.io.util.DataOutputStreamPlus;
import org.apache.cassandra.net.async.ByteBufDataOutputStreamPlus;
import org.apache.cassandra.net.async.NettyFactory;
import org.apache.cassandra.net.async.OutboundConnectionIdentifier;
import org.apache.cassandra.streaming.StreamConnectionFactory;
import org.apache.cassandra.streaming.StreamSession;
import org.apache.cassandra.streaming.StreamingMessageSender;
import org.apache.cassandra.streaming.messages.IncomingStreamMessage;
import org.apache.cassandra.streaming.messages.KeepAliveMessage;
import org.apache.cassandra.streaming.messages.OutgoingStreamMessage;
import org.apache.cassandra.streaming.messages.StreamInitMessage;
import org.apache.cassandra.streaming.messages.StreamMessage;
import org.apache.cassandra.utils.FBUtilities;
/**
* Responsible for sending {@link StreamMessage}s to a given peer. We manage an array of netty {@link Channel}s
* for sending {@link OutgoingStreamMessage} instances; all other {@link StreamMessage} types are sent via
* a special control channel. The reason for this is to treat those messages carefully and not let them get stuck
* behind a stream transfer.
*
* One of the challenges when sending streams is we might need to delay shipping the stream if:
*
* - we've exceeded our network I/O use due to rate limiting (at the cassandra level)
* - the receiver isn't keeping up, which causes the local TCP socket buffer to not empty, which causes epoll writes to not
* move any bytes to the socket, which causes buffers to stick around in user-land (a/k/a cassandra) memory.
*
* When those conditions occur, it's easy enough to reschedule processing the stream once the resources pick up
* (we acquire the permits from the rate limiter, or the socket drains). However, we need to ensure that
* no other messages are submitted to the same channel while the current stream is still being processed.
*/
public class NettyStreamingMessageSender implements StreamingMessageSender
{
private static final Logger logger = LoggerFactory.getLogger(NettyStreamingMessageSender.class);
private static final int DEFAULT_MAX_PARALLEL_TRANSFERS = FBUtilities.getAvailableProcessors();
private static final int MAX_PARALLEL_TRANSFERS = Integer.parseInt(System.getProperty(Config.PROPERTY_PREFIX + "streaming.session.parallelTransfers", Integer.toString(DEFAULT_MAX_PARALLEL_TRANSFERS)));
// a simple mechansim for allowing a degree of fairnes across multiple sessions
private static final Semaphore fileTransferSemaphore = new Semaphore(DEFAULT_MAX_PARALLEL_TRANSFERS, true);
private final StreamSession session;
private final boolean isPreview;
private final int protocolVersion;
private final OutboundConnectionIdentifier connectionId;
private final StreamConnectionFactory factory;
private volatile boolean closed;
/**
* A special {@link Channel} for sending non-stream streaming messages, basically anything that isn't an
* {@link OutgoingStreamMessage} (or an {@link IncomingStreamMessage}, but a node doesn't send that, it's only received).
*/
private Channel controlMessageChannel;
// note: this really doesn't need to be a LBQ, just something that's thread safe
private final Collection<ScheduledFuture<?>> channelKeepAlives = new LinkedBlockingQueue<>();
private final ThreadPoolExecutor fileTransferExecutor;
/**
* A mapping of each {@link #fileTransferExecutor} thread to a channel that can be written to (on that thread).
*/
private final ConcurrentMap<Thread, Channel> threadToChannelMap = new ConcurrentHashMap<>();
/**
* A netty channel attribute used to indicate if a channel is currently transferring a stream. This is primarily used
* to indicate to the {@link KeepAliveTask} if it is safe to send a {@link KeepAliveMessage}, as sending the
* (application level) keep-alive in the middle of a stream would be bad news.
*/
@VisibleForTesting
static final AttributeKey<Boolean> TRANSFERRING_FILE_ATTR = AttributeKey.valueOf("transferringFile");
public NettyStreamingMessageSender(StreamSession session, OutboundConnectionIdentifier connectionId, StreamConnectionFactory factory, int protocolVersion, boolean isPreview)
{
this.session = session;
this.protocolVersion = protocolVersion;
this.connectionId = connectionId;
this.factory = factory;
this.isPreview = isPreview;
String name = session.peer.toString().replace(':', '.');
fileTransferExecutor = new DebuggableThreadPoolExecutor(1, MAX_PARALLEL_TRANSFERS, 1L, TimeUnit.SECONDS, new LinkedBlockingQueue<>(),
new NamedThreadFactory("NettyStreaming-Outbound-" + name));
fileTransferExecutor.allowCoreThreadTimeOut(true);
}
@Override
public void initialize()
{
StreamInitMessage message = new StreamInitMessage(FBUtilities.getBroadcastAddressAndPort(),
session.sessionIndex(),
session.planId(),
session.streamOperation(),
session.getPendingRepair(),
session.getPreviewKind());
sendMessage(message);
}
public boolean hasControlChannel()
{
return controlMessageChannel != null;
}
public void injectControlMessageChannel(Channel channel)
{
this.controlMessageChannel = channel;
channel.attr(TRANSFERRING_FILE_ATTR).set(Boolean.FALSE);
scheduleKeepAliveTask(channel);
}
private void setupControlMessageChannel() throws IOException
{
if (controlMessageChannel == null)
{
controlMessageChannel = createChannel();
scheduleKeepAliveTask(controlMessageChannel);
}
}
private void scheduleKeepAliveTask(Channel channel)
{
int keepAlivePeriod = DatabaseDescriptor.getStreamingKeepAlivePeriod();
if (logger.isDebugEnabled())
logger.debug("{} Scheduling keep-alive task with {}s period.", createLogTag(session, channel), keepAlivePeriod);
KeepAliveTask task = new KeepAliveTask(channel, session);
ScheduledFuture<?> scheduledFuture = channel.eventLoop().scheduleAtFixedRate(task, 0, keepAlivePeriod, TimeUnit.SECONDS);
channelKeepAlives.add(scheduledFuture);
task.future = scheduledFuture;
}
private Channel createChannel() throws IOException
{
Channel channel = factory.createConnection(connectionId, protocolVersion);
ChannelPipeline pipeline = channel.pipeline();
pipeline.addLast(NettyFactory.instance.streamingGroup, NettyFactory.INBOUND_STREAM_HANDLER_NAME, new StreamingInboundHandler(connectionId.remote(), protocolVersion, session));
channel.attr(TRANSFERRING_FILE_ATTR).set(Boolean.FALSE);
logger.debug("Creating channel id {} local {} remote {}", channel.id(), channel.localAddress(), channel.remoteAddress());
return channel;
}
static String createLogTag(StreamSession session, Channel channel)
{
StringBuilder sb = new StringBuilder(64);
sb.append("[Stream");
if (session != null)
sb.append(" #").append(session.planId());
if (channel != null)
sb.append(" channel: ").append(channel.id());
sb.append(']');
return sb.toString();
}
@Override
public void sendMessage(StreamMessage message)
{
if (closed)
throw new RuntimeException("stream has been closed, cannot send " + message);
if (message instanceof OutgoingStreamMessage)
{
if (isPreview)
throw new RuntimeException("Cannot send stream data messages for preview streaming sessions");
if (logger.isDebugEnabled())
logger.debug("{} Sending {}", createLogTag(session, null), message);
fileTransferExecutor.submit(new FileStreamTask((OutgoingStreamMessage)message));
return;
}
try
{
setupControlMessageChannel();
sendControlMessage(controlMessageChannel, message, future -> onControlMessageComplete(future, message));
}
catch (Exception e)
{
close();
session.onError(e);
}
}
private void sendControlMessage(Channel channel, StreamMessage message, GenericFutureListener listener) throws IOException
{
if (logger.isDebugEnabled())
logger.debug("{} Sending {}", createLogTag(session, channel), message);
// we anticipate that the control messages are rather small, so allocating a ByteBuf shouldn't blow out of memory.
long messageSize = StreamMessage.serializedSize(message, protocolVersion);
if (messageSize > 1 << 30)
{
throw new IllegalStateException(String.format("%s something is seriously wrong with the calculated stream control message's size: %d bytes, type is %s",
createLogTag(session, channel), messageSize, message.type));
}
// as control messages are (expected to be) small, we can simply allocate a ByteBuf here, wrap it, and send via the channel
ByteBuf buf = channel.alloc().directBuffer((int) messageSize, (int) messageSize);
ByteBuffer nioBuf = buf.nioBuffer(0, (int) messageSize);
@SuppressWarnings("resource")
DataOutputBufferFixed out = new DataOutputBufferFixed(nioBuf);
StreamMessage.serialize(message, out, protocolVersion, session);
assert nioBuf.position() == nioBuf.limit();
buf.writerIndex(nioBuf.position());
ChannelFuture channelFuture = channel.writeAndFlush(buf);
channelFuture.addListener(future -> listener.operationComplete(future));
}
/**
* Decides what to do after a {@link StreamMessage} is processed.
*
* Note: this is called from the netty event loop.
*
* @return null if the message was processed sucessfully; else, a {@link java.util.concurrent.Future} to indicate
* the status of aborting any remaining tasks in the session.
*/
java.util.concurrent.Future onControlMessageComplete(Future<?> future, StreamMessage msg)
{
ChannelFuture channelFuture = (ChannelFuture)future;
Throwable cause = future.cause();
if (cause == null)
return null;
Channel channel = channelFuture.channel();
logger.error("{} failed to send a stream message/data to peer {}: msg = {}",
createLogTag(session, channel), connectionId, msg, future.cause());
// StreamSession will invoke close(), but we have to mark this sender as closed so the session doesn't try
// to send any failure messages
return session.onError(cause);
}
class FileStreamTask implements Runnable
{
/**
* Time interval, in minutes, to wait between logging a message indicating that we're waiting on a semaphore
* permit to become available.
*/
private static final int SEMAPHORE_UNAVAILABLE_LOG_INTERVAL = 3;
/**
* Even though we expect only an {@link OutgoingStreamMessage} at runtime, the type here is {@link StreamMessage}
* to facilitate simpler testing.
*/
private final StreamMessage msg;
FileStreamTask(OutgoingStreamMessage ofm)
{
this.msg = ofm;
}
/**
* For testing purposes
*/
FileStreamTask(StreamMessage msg)
{
this.msg = msg;
}
@Override
public void run()
{
if (!acquirePermit(SEMAPHORE_UNAVAILABLE_LOG_INTERVAL))
return;
try
{
Channel channel = getOrCreateChannel();
if (!channel.attr(TRANSFERRING_FILE_ATTR).compareAndSet(false, true))
throw new IllegalStateException("channel's transferring state is currently set to true. refusing to start new stream");
// close the DataOutputStreamPlus as we're done with it - but don't close the channel
try (DataOutputStreamPlus outPlus = ByteBufDataOutputStreamPlus.create(session, channel, 1 << 20))
{
StreamMessage.serialize(msg, outPlus, protocolVersion, session);
channel.flush();
}
finally
{
channel.attr(TRANSFERRING_FILE_ATTR).set(Boolean.FALSE);
}
}
catch (Exception e)
{
session.onError(e);
}
finally
{
fileTransferSemaphore.release();
}
}
boolean acquirePermit(int logInterval)
{
long logIntervalNanos = TimeUnit.MINUTES.toNanos(logInterval);
long timeOfLastLogging = System.nanoTime();
while (true)
{
if (closed)
return false;
try
{
if (fileTransferSemaphore.tryAcquire(1, TimeUnit.SECONDS))
return true;
// log a helpful message to operators in case they are wondering why a given session might not be making progress.
long now = System.nanoTime();
if (now - timeOfLastLogging > logIntervalNanos)
{
timeOfLastLogging = now;
OutgoingStreamMessage ofm = (OutgoingStreamMessage)msg;
if (logger.isInfoEnabled())
logger.info("{} waiting to acquire a permit to begin streaming {}. This message logs every {} minutes",
createLogTag(session, null), ofm.getName(), logInterval);
}
}
catch (InterruptedException ie)
{
//ignore
}
}
}
private Channel getOrCreateChannel()
{
Thread currentThread = Thread.currentThread();
try
{
Channel channel = threadToChannelMap.get(currentThread);
if (channel != null)
return channel;
channel = createChannel();
threadToChannelMap.put(currentThread, channel);
return channel;
}
catch (Exception e)
{
throw new IOError(e);
}
}
/**
* For testing purposes
*/
void injectChannel(Channel channel)
{
Thread currentThread = Thread.currentThread();
if (threadToChannelMap.get(currentThread) != null)
throw new IllegalStateException("previous channel already set");
threadToChannelMap.put(currentThread, channel);
}
/**
* For testing purposes
*/
void unsetChannel()
{
threadToChannelMap.remove(Thread.currentThread());
}
}
/**
* Periodically sends the {@link KeepAliveMessage}.
*
* NOTE: this task, and the callback function {@link #keepAliveListener(Future)} is executed in the netty event loop.
*/
class KeepAliveTask implements Runnable
{
private final Channel channel;
private final StreamSession session;
/**
* A reference to the scheduled task for this instance so that it may be cancelled.
*/
ScheduledFuture<?> future;
KeepAliveTask(Channel channel, StreamSession session)
{
this.channel = channel;
this.session = session;
}
public void run()
{
// if the channel has been closed, cancel the scheduled task and return
if (!channel.isOpen() || closed)
{
future.cancel(false);
return;
}
// if the channel is currently processing streaming, skip this execution. As this task executes
// on the event loop, even if there is a race with a FileStreamTask which changes the channel attribute
// after we check it, the FileStreamTask cannot send out any bytes as this KeepAliveTask is executing
// on the event loop (and FileStreamTask publishes it's buffer to the channel, consumed after we're done here).
if (channel.attr(TRANSFERRING_FILE_ATTR).get())
return;
try
{
if (logger.isTraceEnabled())
logger.trace("{} Sending keep-alive to {}.", createLogTag(session, channel), session.peer);
sendControlMessage(channel, new KeepAliveMessage(), this::keepAliveListener);
}
catch (IOException ioe)
{
future.cancel(false);
}
}
private void keepAliveListener(Future<? super Void> future)
{
if (future.isSuccess() || future.isCancelled())
return;
if (logger.isDebugEnabled())
logger.debug("{} Could not send keep-alive message (perhaps stream session is finished?).",
createLogTag(session, channel), future.cause());
}
}
/**
* For testing purposes only.
*/
void setClosed()
{
closed = true;
}
void setControlMessageChannel(Channel channel)
{
controlMessageChannel = channel;
}
int semaphoreAvailablePermits()
{
return fileTransferSemaphore.availablePermits();
}
@Override
public boolean connected()
{
return !closed;
}
@Override
public void close()
{
closed = true;
if (logger.isDebugEnabled())
logger.debug("{} Closing stream connection channels on {}", createLogTag(session, null), connectionId);
for (ScheduledFuture<?> future : channelKeepAlives)
future.cancel(false);
channelKeepAlives.clear();
List<Future<Void>> futures = new ArrayList<>(threadToChannelMap.size());
for (Channel channel : threadToChannelMap.values())
futures.add(channel.close());
FBUtilities.waitOnFutures(futures, 10, TimeUnit.SECONDS);
threadToChannelMap.clear();
fileTransferExecutor.shutdownNow();
if (controlMessageChannel != null)
controlMessageChannel.close();
}
@Override
public OutboundConnectionIdentifier getConnectionId()
{
return connectionId;
}
}
|
|
package org.nuxeo.intellij.ui;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.swing.*;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.nuxeo.intellij.NuxeoSDK;
import org.nuxeo.intellij.NuxeoSDKManager;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.CommonShortcuts;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.options.SearchableConfigurable;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.InputValidator;
import com.intellij.openapi.ui.MasterDetailsComponent;
import com.intellij.openapi.ui.MasterDetailsStateService;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Conditions;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.IconUtil;
/**
* Panel for Nuxeo SDKs entry in Preferences/Nuxeo.
*/
public class NuxeoSDKsPanel extends MasterDetailsComponent implements
SearchableConfigurable {
private final Project project;
private final NuxeoSDKManager nuxeoSDKManager;
@NotNull
private final AtomicBoolean initialized = new AtomicBoolean(false);
private final List<ApplyListener> applyListeners = new ArrayList<ApplyListener>();
public NuxeoSDKsPanel(Project project) {
this.project = project;
this.nuxeoSDKManager = NuxeoSDKManager.getInstance(project);
initTree();
}
@Override
protected MasterDetailsStateService getStateService() {
return MasterDetailsStateService.getInstance(project);
}
@Override
protected String getComponentStateKey() {
return "Nuxeo.UI";
}
protected void processRemovedItems() {
Map<String, NuxeoSDK> nuxeoSDKs = getAllNuxeoSDKs();
final List<NuxeoSDK> deleted = new ArrayList<NuxeoSDK>();
for (NuxeoSDK nuxeoSDK : nuxeoSDKManager.getNuxeoSDKs()) {
if (!nuxeoSDKs.containsValue(nuxeoSDK)) {
deleted.add(nuxeoSDK);
}
}
for (NuxeoSDK nuxeoSDK : deleted) {
nuxeoSDKManager.removeNuxeoSDK(nuxeoSDK);
}
}
protected boolean wasObjectStored(Object o) {
return nuxeoSDKManager.getNuxeoSDKs().contains(o);
}
@Nls
public String getDisplayName() {
return "Nuxeo SDKs";
}
@NonNls
public String getHelpTopic() {
return "nuxeo.sdks";
}
public void apply() throws ConfigurationException {
final Set<String> names = new HashSet<String>();
for (int i = 0; i < myRoot.getChildCount(); i++) {
MyNode node = (MyNode) myRoot.getChildAt(i);
final String name = ((NuxeoSDKConfigurable) node.getConfigurable()).getEditableObject().getName();
if (names.contains(name)) {
selectNodeInTree(name);
throw new ConfigurationException("Duplicate Nuxeo SDK name: \'"
+ name + "\'");
}
names.add(name);
}
super.apply();
for (ApplyListener listener : applyListeners) {
listener.onApply();
}
}
public Map<String, NuxeoSDK> getAllNuxeoSDKs() {
final Map<String, NuxeoSDK> nuxeoSDKs = new HashMap<String, NuxeoSDK>();
if (!initialized.get()) {
for (NuxeoSDK nuxeoSDK : nuxeoSDKManager.getNuxeoSDKs()) {
nuxeoSDKs.put(nuxeoSDK.getName(), nuxeoSDK);
}
} else {
for (int i = 0; i < myRoot.getChildCount(); i++) {
MyNode node = (MyNode) myRoot.getChildAt(i);
final NuxeoSDK nuxeoSDK = ((NuxeoSDKConfigurable) node.getConfigurable()).getEditableObject();
nuxeoSDKs.put(nuxeoSDK.getName(), nuxeoSDK);
}
}
return nuxeoSDKs;
}
@Override
public void disposeUIResources() {
super.disposeUIResources();
initialized.set(false);
}
@Override
@Nullable
protected ArrayList<AnAction> createActions(boolean fromPopup) {
ArrayList<AnAction> result = new ArrayList<AnAction>();
result.add(new AnAction("Add", "Add", IconUtil.getAddIcon()) {
{
registerCustomShortcutSet(CommonShortcuts.INSERT, myTree);
}
public void actionPerformed(AnActionEvent event) {
final VirtualFile sdk = NuxeoSDKChooser.chooseNuxeoSDK(project);
if (sdk == null)
return;
final String name = askForNuxeoSDKName("Register Nuxeo SDK", "");
if (name == null)
return;
final NuxeoSDK nuxeoSDK = new NuxeoSDK(name, sdk.getPath());
addNuxeoSDKNode(nuxeoSDK);
}
});
result.add(new MyDeleteAction(forAll(Conditions.alwaysTrue())));
return result;
}
@Nullable
private String askForNuxeoSDKName(String title, String initialName) {
return Messages.showInputDialog("New Nuxeo SDK name:", title,
Messages.getQuestionIcon(), initialName, new InputValidator() {
public boolean checkInput(String s) {
return !getAllNuxeoSDKs().containsKey(s)
&& s.length() > 0;
}
public boolean canClose(String s) {
return checkInput(s);
}
});
}
private void addNuxeoSDKNode(NuxeoSDK nuxeoSDK) {
final NuxeoSDKConfigurable nuxeoSDKConfigurable = new NuxeoSDKConfigurable(
project, nuxeoSDK, TREE_UPDATER);
nuxeoSDKConfigurable.setModified(true);
final MyNode node = new MyNode(nuxeoSDKConfigurable);
addNode(node, myRoot);
selectNodeInTree(node);
}
private void reloadTree() {
myRoot.removeAllChildren();
Collection<NuxeoSDK> nuxeoSDKs = nuxeoSDKManager.getNuxeoSDKs();
for (NuxeoSDK nuxeoSDK : nuxeoSDKs) {
NuxeoSDK clone = new NuxeoSDK(nuxeoSDK);
addNode(new MyNode(new NuxeoSDKConfigurable(project, clone,
TREE_UPDATER)), myRoot);
}
initialized.set(true);
}
public void reset() {
reloadTree();
super.reset();
}
@Override
protected String getEmptySelectionString() {
return "Select a Nuxeo SDK to view or edit its details here";
}
public void addItemsChangeListener(final Runnable runnable) {
addItemsChangeListener(new ItemsChangeListener() {
public void itemChanged(@Nullable
Object deletedItem) {
SwingUtilities.invokeLater(runnable);
}
public void itemsExternallyChanged() {
SwingUtilities.invokeLater(runnable);
}
});
}
@NotNull
public String getId() {
return getHelpTopic();
}
public Runnable enableSearch(String option) {
return null;
}
public void registerApplyListener(ApplyListener listener) {
applyListeners.add(listener);
}
public static interface ApplyListener {
public void onApply();
}
}
|
|
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver14;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import com.google.common.collect.ImmutableList;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFTableFeaturePropWriteSetfieldVer14 implements OFTableFeaturePropWriteSetfield {
private static final Logger logger = LoggerFactory.getLogger(OFTableFeaturePropWriteSetfieldVer14.class);
// version: 1.4
final static byte WIRE_VERSION = 5;
final static int MINIMUM_LENGTH = 4;
private final static List<U32> DEFAULT_OXM_IDS = ImmutableList.<U32>of();
// OF message fields
private final List<U32> oxmIds;
//
// Immutable default instance
final static OFTableFeaturePropWriteSetfieldVer14 DEFAULT = new OFTableFeaturePropWriteSetfieldVer14(
DEFAULT_OXM_IDS
);
// package private constructor - used by readers, builders, and factory
OFTableFeaturePropWriteSetfieldVer14(List<U32> oxmIds) {
if(oxmIds == null) {
throw new NullPointerException("OFTableFeaturePropWriteSetfieldVer14: property oxmIds cannot be null");
}
this.oxmIds = oxmIds;
}
// Accessors for OF message fields
@Override
public int getType() {
return 0xc;
}
@Override
public List<U32> getOxmIds() {
return oxmIds;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
public OFTableFeaturePropWriteSetfield.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFTableFeaturePropWriteSetfield.Builder {
final OFTableFeaturePropWriteSetfieldVer14 parentMessage;
// OF message fields
private boolean oxmIdsSet;
private List<U32> oxmIds;
BuilderWithParent(OFTableFeaturePropWriteSetfieldVer14 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public int getType() {
return 0xc;
}
@Override
public List<U32> getOxmIds() {
return oxmIds;
}
@Override
public OFTableFeaturePropWriteSetfield.Builder setOxmIds(List<U32> oxmIds) {
this.oxmIds = oxmIds;
this.oxmIdsSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFTableFeaturePropWriteSetfield build() {
List<U32> oxmIds = this.oxmIdsSet ? this.oxmIds : parentMessage.oxmIds;
if(oxmIds == null)
throw new NullPointerException("Property oxmIds must not be null");
//
return new OFTableFeaturePropWriteSetfieldVer14(
oxmIds
);
}
}
static class Builder implements OFTableFeaturePropWriteSetfield.Builder {
// OF message fields
private boolean oxmIdsSet;
private List<U32> oxmIds;
@Override
public int getType() {
return 0xc;
}
@Override
public List<U32> getOxmIds() {
return oxmIds;
}
@Override
public OFTableFeaturePropWriteSetfield.Builder setOxmIds(List<U32> oxmIds) {
this.oxmIds = oxmIds;
this.oxmIdsSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
//
@Override
public OFTableFeaturePropWriteSetfield build() {
List<U32> oxmIds = this.oxmIdsSet ? this.oxmIds : DEFAULT_OXM_IDS;
if(oxmIds == null)
throw new NullPointerException("Property oxmIds must not be null");
return new OFTableFeaturePropWriteSetfieldVer14(
oxmIds
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFTableFeaturePropWriteSetfield> {
@Override
public OFTableFeaturePropWriteSetfield readFrom(ByteBuf bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property type == 0xc
short type = bb.readShort();
if(type != (short) 0xc)
throw new OFParseError("Wrong type: Expected=0xc(0xc), got="+type);
int length = U16.f(bb.readShort());
if(length < MINIMUM_LENGTH)
throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
List<U32> oxmIds = ChannelUtils.readList(bb, length - (bb.readerIndex() - start), U32.READER);
// align message to 8 bytes (length does not contain alignment)
bb.skipBytes(((length + 7)/8 * 8 ) - length );
OFTableFeaturePropWriteSetfieldVer14 tableFeaturePropWriteSetfieldVer14 = new OFTableFeaturePropWriteSetfieldVer14(
oxmIds
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", tableFeaturePropWriteSetfieldVer14);
return tableFeaturePropWriteSetfieldVer14;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFTableFeaturePropWriteSetfieldVer14Funnel FUNNEL = new OFTableFeaturePropWriteSetfieldVer14Funnel();
static class OFTableFeaturePropWriteSetfieldVer14Funnel implements Funnel<OFTableFeaturePropWriteSetfieldVer14> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFTableFeaturePropWriteSetfieldVer14 message, PrimitiveSink sink) {
// fixed value property type = 0xc
sink.putShort((short) 0xc);
// FIXME: skip funnel of length
FunnelUtils.putList(message.oxmIds, sink);
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFTableFeaturePropWriteSetfieldVer14> {
@Override
public void write(ByteBuf bb, OFTableFeaturePropWriteSetfieldVer14 message) {
int startIndex = bb.writerIndex();
// fixed value property type = 0xc
bb.writeShort((short) 0xc);
// length is length of variable message, will be updated at the end
int lengthIndex = bb.writerIndex();
bb.writeShort(U16.t(0));
ChannelUtils.writeList(bb, message.oxmIds);
// update length field
int length = bb.writerIndex() - startIndex;
int alignedLength = ((length + 7)/8 * 8);
bb.setShort(lengthIndex, length);
// align message to 8 bytes
bb.writeZero(alignedLength - length);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFTableFeaturePropWriteSetfieldVer14(");
b.append("oxmIds=").append(oxmIds);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFTableFeaturePropWriteSetfieldVer14 other = (OFTableFeaturePropWriteSetfieldVer14) obj;
if (oxmIds == null) {
if (other.oxmIds != null)
return false;
} else if (!oxmIds.equals(other.oxmIds))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((oxmIds == null) ? 0 : oxmIds.hashCode());
return result;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jasper.compiler;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.*;
import javax.servlet.jsp.tagext.FunctionInfo;
import org.apache.jasper.Constants;
import org.apache.jasper.JasperException;
import org.apache.tomcat.util.security.PrivilegedGetTccl;
/**
* This class generates functions mappers for the EL expressions in the page.
* Instead of a global mapper, a mapper is used for ecah call to EL
* evaluator, thus avoiding the prefix overlapping and redefinition
* issues.
*
* @author Kin-man Chung
*/
public class ELFunctionMapper {
private int currFunc = 0;
StringBuffer ds; // Contains codes to initialize the functions mappers.
StringBuffer ss; // Contains declarations of the functions mappers.
/**
* Creates the functions mappers for all EL expressions in the JSP page.
*
* @param compiler Current compiler, mainly for accessing error dispatcher.
* @param page The current compilation unit.
*/
public static void map(Compiler compiler, Node.Nodes page)
throws JasperException {
ELFunctionMapper map = new ELFunctionMapper();
map.ds = new StringBuffer();
map.ss = new StringBuffer();
page.visit(map.new ELFunctionVisitor());
// Append the declarations to the root node
String ds = map.ds.toString();
if (ds.length() > 0) {
Node root = page.getRoot();
new Node.Declaration(map.ss.toString(), null, root);
new Node.Declaration("static {\n" + ds + "}\n", null, root);
}
}
/**
* A visitor for the page. The places where EL is allowed are scanned
* for functions, and if found functions mappers are created.
*/
class ELFunctionVisitor extends Node.Visitor {
/**
* Use a global name map to facilitate reuse of function maps.
* The key used is prefix:function:uri.
*/
private HashMap<String, String> gMap = new HashMap<String, String>();
public void visit(Node.ParamAction n) throws JasperException {
doMap(n.getValue());
visitBody(n);
}
public void visit(Node.IncludeAction n) throws JasperException {
doMap(n.getPage());
visitBody(n);
}
public void visit(Node.ForwardAction n) throws JasperException {
doMap(n.getPage());
visitBody(n);
}
public void visit(Node.SetProperty n) throws JasperException {
doMap(n.getValue());
visitBody(n);
}
public void visit(Node.UseBean n) throws JasperException {
doMap(n.getBeanName());
visitBody(n);
}
public void visit(Node.PlugIn n) throws JasperException {
doMap(n.getHeight());
doMap(n.getWidth());
visitBody(n);
}
public void visit(Node.JspElement n) throws JasperException {
Node.JspAttribute[] attrs = n.getJspAttributes();
for (int i = 0; attrs != null && i < attrs.length; i++) {
doMap(attrs[i]);
}
doMap(n.getNameAttribute());
visitBody(n);
}
public void visit(Node.UninterpretedTag n) throws JasperException {
Node.JspAttribute[] attrs = n.getJspAttributes();
for (int i = 0; attrs != null && i < attrs.length; i++) {
doMap(attrs[i]);
}
visitBody(n);
}
public void visit(Node.CustomTag n) throws JasperException {
Node.JspAttribute[] attrs = n.getJspAttributes();
for (int i = 0; attrs != null && i < attrs.length; i++) {
doMap(attrs[i]);
}
visitBody(n);
}
public void visit(Node.ELExpression n) throws JasperException {
doMap(n.getEL());
}
private void doMap(Node.JspAttribute attr)
throws JasperException {
if (attr != null) {
doMap(attr.getEL());
}
}
/**
* Creates function mappers, if needed, from ELNodes
*/
private void doMap(ELNode.Nodes el)
throws JasperException {
// Only care about functions in ELNode's
class Fvisitor extends ELNode.Visitor {
ArrayList<ELNode.Function> funcs =
new ArrayList<ELNode.Function>();
HashMap<String, String> keyMap = new HashMap<String, String>();
public void visit(ELNode.Function n) throws JasperException {
String key = n.getPrefix() + ":" + n.getName();
if (! keyMap.containsKey(key)) {
keyMap.put(key,"");
funcs.add(n);
}
}
}
if (el == null) {
return;
}
// First locate all unique functions in this EL
Fvisitor fv = new Fvisitor();
el.visit(fv);
ArrayList functions = fv.funcs;
if (functions.size() == 0) {
return;
}
// Reuse a previous map if possible
String decName = matchMap(functions);
if (decName != null) {
el.setMapName(decName);
return;
}
// Generate declaration for the map statically
decName = getMapName();
ss.append("static private org.apache.jasper.runtime.ProtectedFunctionMapper " + decName + ";\n");
ds.append(" " + decName + "= ");
ds.append("org.apache.jasper.runtime.ProtectedFunctionMapper");
// Special case if there is only one function in the map
String funcMethod = null;
if (functions.size() == 1) {
funcMethod = ".getMapForFunction";
} else {
ds.append(".getInstance();\n");
funcMethod = " " + decName + ".mapFunction";
}
// Setup arguments for either getMapForFunction or mapFunction
for (int i = 0; i < functions.size(); i++) {
ELNode.Function f = (ELNode.Function)functions.get(i);
FunctionInfo funcInfo = f.getFunctionInfo();
String key = f.getPrefix()+ ":" + f.getName();
ds.append(funcMethod + "(\"" + key + "\", " +
getCanonicalName(funcInfo.getFunctionClass()) +
".class, " + '\"' + f.getMethodName() + "\", " +
"new Class[] {");
String params[] = f.getParameters();
for (int k = 0; k < params.length; k++) {
if (k != 0) {
ds.append(", ");
}
int iArray = params[k].indexOf('[');
if (iArray < 0) {
ds.append(params[k] + ".class");
}
else {
String baseType = params[k].substring(0, iArray);
ds.append("java.lang.reflect.Array.newInstance(");
ds.append(baseType);
ds.append(".class,");
// Count the number of array dimension
int aCount = 0;
for (int jj = iArray; jj < params[k].length(); jj++ ) {
if (params[k].charAt(jj) == '[') {
aCount++;
}
}
if (aCount == 1) {
ds.append("0).getClass()");
} else {
ds.append("new int[" + aCount + "]).getClass()");
}
}
}
ds.append("});\n");
// Put the current name in the global function map
gMap.put(f.getPrefix() + ':' + f.getName() + ':' + f.getUri(),
decName);
}
el.setMapName(decName);
}
/**
* Find the name of the function mapper for an EL. Reuse a
* previously generated one if possible.
* @param functions An ArrayList of ELNode.Function instances that
* represents the functions in an EL
* @return A previous generated function mapper name that can be used
* by this EL; null if none found.
*/
private String matchMap(ArrayList functions) {
String mapName = null;
for (int i = 0; i < functions.size(); i++) {
ELNode.Function f = (ELNode.Function)functions.get(i);
String temName = (String) gMap.get(f.getPrefix() + ':' +
f.getName() + ':' + f.getUri());
if (temName == null) {
return null;
}
if (mapName == null) {
mapName = temName;
} else if (!temName.equals(mapName)) {
// If not all in the previous match, then no match.
return null;
}
}
return mapName;
}
/*
* @return An unique name for a function mapper.
*/
private String getMapName() {
return "_jspx_fnmap_" + currFunc++;
}
/**
* Convert a binary class name into a canonical one that can be used
* when generating Java source code.
*
* @param className Binary class name
* @return Canonical equivalent
*/
private String getCanonicalName(String className) throws JasperException {
Class<?> clazz;
ClassLoader tccl;
if (Constants.IS_SECURITY_ENABLED) {
PrivilegedAction<ClassLoader> pa = new PrivilegedGetTccl();
tccl = AccessController.doPrivileged(pa);
} else {
tccl = Thread.currentThread().getContextClassLoader();
}
try {
clazz = Class.forName(className, false, tccl);
} catch (ClassNotFoundException e) {
throw new JasperException(e);
}
return clazz.getCanonicalName();
}
}
}
|
|
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver15;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFBarrierRequestVer15 implements OFBarrierRequest {
private static final Logger logger = LoggerFactory.getLogger(OFBarrierRequestVer15.class);
// version: 1.5
final static byte WIRE_VERSION = 6;
final static int LENGTH = 8;
private final static long DEFAULT_XID = 0x0L;
// OF message fields
private final long xid;
//
// Immutable default instance
final static OFBarrierRequestVer15 DEFAULT = new OFBarrierRequestVer15(
DEFAULT_XID
);
// package private constructor - used by readers, builders, and factory
OFBarrierRequestVer15(long xid) {
this.xid = xid;
}
// Accessors for OF message fields
@Override
public OFVersion getVersion() {
return OFVersion.OF_15;
}
@Override
public OFType getType() {
return OFType.BARRIER_REQUEST;
}
@Override
public long getXid() {
return xid;
}
public OFBarrierRequest.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFBarrierRequest.Builder {
final OFBarrierRequestVer15 parentMessage;
// OF message fields
private boolean xidSet;
private long xid;
BuilderWithParent(OFBarrierRequestVer15 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_15;
}
@Override
public OFType getType() {
return OFType.BARRIER_REQUEST;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFBarrierRequest.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public OFBarrierRequest build() {
long xid = this.xidSet ? this.xid : parentMessage.xid;
//
return new OFBarrierRequestVer15(
xid
);
}
}
static class Builder implements OFBarrierRequest.Builder {
// OF message fields
private boolean xidSet;
private long xid;
@Override
public OFVersion getVersion() {
return OFVersion.OF_15;
}
@Override
public OFType getType() {
return OFType.BARRIER_REQUEST;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFBarrierRequest.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
//
@Override
public OFBarrierRequest build() {
long xid = this.xidSet ? this.xid : DEFAULT_XID;
return new OFBarrierRequestVer15(
xid
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFBarrierRequest> {
@Override
public OFBarrierRequest readFrom(ByteBuf bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property version == 6
byte version = bb.readByte();
if(version != (byte) 0x6)
throw new OFParseError("Wrong version: Expected=OFVersion.OF_15(6), got="+version);
// fixed value property type == 20
byte type = bb.readByte();
if(type != (byte) 0x14)
throw new OFParseError("Wrong type: Expected=OFType.BARRIER_REQUEST(20), got="+type);
int length = U16.f(bb.readShort());
if(length != 8)
throw new OFParseError("Wrong length: Expected=8(8), got="+length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
long xid = U32.f(bb.readInt());
OFBarrierRequestVer15 barrierRequestVer15 = new OFBarrierRequestVer15(
xid
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", barrierRequestVer15);
return barrierRequestVer15;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFBarrierRequestVer15Funnel FUNNEL = new OFBarrierRequestVer15Funnel();
static class OFBarrierRequestVer15Funnel implements Funnel<OFBarrierRequestVer15> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFBarrierRequestVer15 message, PrimitiveSink sink) {
// fixed value property version = 6
sink.putByte((byte) 0x6);
// fixed value property type = 20
sink.putByte((byte) 0x14);
// fixed value property length = 8
sink.putShort((short) 0x8);
sink.putLong(message.xid);
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFBarrierRequestVer15> {
@Override
public void write(ByteBuf bb, OFBarrierRequestVer15 message) {
// fixed value property version = 6
bb.writeByte((byte) 0x6);
// fixed value property type = 20
bb.writeByte((byte) 0x14);
// fixed value property length = 8
bb.writeShort((short) 0x8);
bb.writeInt(U32.t(message.xid));
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFBarrierRequestVer15(");
b.append("xid=").append(xid);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFBarrierRequestVer15 other = (OFBarrierRequestVer15) obj;
if( xid != other.xid)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * (int) (xid ^ (xid >>> 32));
return result;
}
}
|
|
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.vespa.orchestrator.model;
import com.yahoo.vespa.applicationmodel.ClusterId;
import com.yahoo.vespa.applicationmodel.HostName;
import com.yahoo.vespa.applicationmodel.ServiceCluster;
import com.yahoo.vespa.applicationmodel.ServiceInstance;
import com.yahoo.vespa.applicationmodel.ServiceStatus;
import com.yahoo.vespa.applicationmodel.ServiceType;
import com.yahoo.vespa.orchestrator.controller.ClusterControllerClientFactory;
import com.yahoo.vespa.orchestrator.policy.ClusterParams;
import com.yahoo.vespa.orchestrator.policy.HostStateChangeDeniedException;
import com.yahoo.vespa.orchestrator.policy.HostedVespaPolicy;
import com.yahoo.vespa.orchestrator.policy.SuspensionReasons;
import com.yahoo.vespa.orchestrator.status.HostInfos;
import com.yahoo.vespa.orchestrator.status.HostStatus;
import java.time.Clock;
import java.time.Duration;
import java.time.Instant;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* @author hakonhall
*/
class ClusterApiImpl implements ClusterApi {
static final Duration downMoratorium = Duration.ofSeconds(30);
private final ApplicationApi applicationApi;
private final ServiceCluster serviceCluster;
private final NodeGroup nodeGroup;
private final HostInfos hostInfos;
private final ClusterControllerClientFactory clusterControllerClientFactory;
private final Clock clock;
private final Set<ServiceInstance> servicesInGroup;
private final Set<ServiceInstance> servicesNotInGroup;
/** Lazily initialized in servicesDownAndNotInGroup(), do not access directly. */
private Set<ServiceInstance> servicesDownAndNotInGroup = null;
/*
* There are two sources for the number of config servers in a cluster. The config server config and the node
* repository.
*
* The actual number of config servers in the zone-config-servers application/cluster may be less than
* the configured number.
*
* For example: If only 2/3 have been provisioned so far, or 1 is being reprovisioned. In these cases it is
* important for the Orchestrator to count that third config server as down.
*/
private final int missingServices;
private final String descriptionOfMissingServices;
public ClusterApiImpl(ApplicationApi applicationApi,
ServiceCluster serviceCluster,
NodeGroup nodeGroup,
HostInfos hostInfos,
ClusterControllerClientFactory clusterControllerClientFactory,
ClusterParams clusterParams,
Clock clock) {
this.applicationApi = applicationApi;
this.serviceCluster = serviceCluster;
this.nodeGroup = nodeGroup;
this.hostInfos = hostInfos;
this.clusterControllerClientFactory = clusterControllerClientFactory;
this.clock = clock;
Map<Boolean, Set<ServiceInstance>> serviceInstancesByLocality =
serviceCluster.serviceInstances().stream()
.collect(
Collectors.groupingBy(
instance -> nodeGroup.contains(instance.hostName()),
Collectors.toSet()));
servicesInGroup = serviceInstancesByLocality.getOrDefault(true, Collections.emptySet());
servicesNotInGroup = serviceInstancesByLocality.getOrDefault(false, Collections.emptySet());
int serviceInstances = serviceCluster.serviceInstances().size();
if (clusterParams.size().isPresent() && serviceInstances < clusterParams.size().getAsInt()) {
missingServices = clusterParams.size().getAsInt() - serviceInstances;
descriptionOfMissingServices = missingServices + " missing " + serviceCluster.nodeDescription(missingServices > 1);
} else {
missingServices = 0;
descriptionOfMissingServices = "NA";
}
}
@Override
public NodeGroup getNodeGroup() {
return nodeGroup;
}
@Override
public ClusterId clusterId() {
return serviceCluster.clusterId();
}
@Override
public ServiceType serviceType() {
return serviceCluster.serviceType();
}
@Override
public String serviceDescription(boolean plural) {
return serviceCluster.serviceDescription(plural);
}
@Override
public boolean isStorageCluster() {
return VespaModelUtil.isStorage(serviceCluster);
}
@Override
public ApplicationApi getApplication() {
return applicationApi;
}
@Override
public boolean isConfigServerLike() {
return serviceCluster.isConfigServerLike();
}
@Override
public Optional<SuspensionReasons> allServicesDown() {
SuspensionReasons reasons = new SuspensionReasons();
for (ServiceInstance service : servicesInGroup) {
if (hostStatus(service.hostName()).isSuspended()) {
reasons.mergeWith(SuspensionReasons.nothingNoteworthy());
continue;
}
if (service.serviceStatus() == ServiceStatus.DOWN) {
Optional<Instant> since = service.serviceStatusInfo().since();
if (since.isEmpty()) {
reasons.mergeWith(SuspensionReasons.isDown(service));
continue;
}
// Make sure services truly are down for some period of time before we allow suspension.
// On the other hand, a service coming down and up repeatedly should probably
// also be allowed... difficult without keeping track of history in a better way.
final Duration downDuration = Duration.between(since.get(), clock.instant());
if (downDuration.compareTo(downMoratorium) > 0) {
reasons.mergeWith(SuspensionReasons.downSince(service, since.get(), downDuration));
continue;
}
}
return Optional.empty();
}
return Optional.of(reasons);
}
int missingServices() { return missingServices; }
@Override
public boolean noServicesOutsideGroupIsDown() throws HostStateChangeDeniedException {
return servicesDownAndNotInGroup().size() + missingServices == 0;
}
@Override
public int percentageOfServicesDownOutsideGroup() {
int numberOfServicesDown = servicesDownAndNotInGroup().size() + missingServices;
return numberOfServicesDown * 100 / (serviceCluster.serviceInstances().size() + missingServices);
}
@Override
public int percentageOfServicesDownIfGroupIsAllowedToBeDown() {
int numberOfServicesDown = servicesDownAndNotInGroup().size() + missingServices + servicesInGroup.size();
return numberOfServicesDown * 100 / (serviceCluster.serviceInstances().size() + missingServices);
}
/**
* A description of the hosts outside the group that are allowed to be down,
* and a description of the services outside the group and outside of the allowed services
* that are down.
*/
@Override
public String downDescription() {
StringBuilder description = new StringBuilder();
Set<HostName> suspended = servicesNotInGroup.stream()
.map(ServiceInstance::hostName)
.filter(hostName -> hostStatus(hostName).isSuspended())
.collect(Collectors.toSet());
if (suspended.size() > 0) {
description.append(" ");
final int nodeLimit = 3;
description.append(suspended.stream().sorted().distinct().limit(nodeLimit).collect(Collectors.toList()).toString());
if (suspended.size() > nodeLimit) {
description.append(" and " + (suspended.size() - nodeLimit) + " others");
}
description.append(" are suspended.");
}
Set<ServiceInstance> downElsewhere = servicesDownAndNotInGroup().stream()
.filter(serviceInstance -> !suspended.contains(serviceInstance.hostName()))
.collect(Collectors.toSet());
final int downElsewhereTotal = downElsewhere.size() + missingServices;
if (downElsewhereTotal > 0) {
description.append(" ");
final int serviceLimit = 2; // services info is verbose
description.append(Stream.concat(
downElsewhere.stream().map(ServiceInstance::toString).sorted(),
missingServices > 0 ? Stream.of(descriptionOfMissingServices) : Stream.of())
.limit(serviceLimit)
.collect(Collectors.toList())
.toString());
if (downElsewhereTotal > serviceLimit) {
description.append(" and " + (downElsewhereTotal - serviceLimit) + " others");
}
description.append(" are down.");
}
return description.toString();
}
private Optional<StorageNode> storageNodeInGroup(Predicate<ServiceInstance> storageServicePredicate) {
if (!VespaModelUtil.isStorage(serviceCluster)) {
return Optional.empty();
}
Set<StorageNode> storageNodes = new HashSet<>();
for (ServiceInstance serviceInstance : servicesInGroup) {
if (!storageServicePredicate.test(serviceInstance)) {
continue;
}
HostName hostName = serviceInstance.hostName();
if (nodeGroup.contains(hostName)) {
if (storageNodes.contains(hostName)) {
throw new IllegalStateException("Found more than 1 storage service instance on " + hostName
+ ": last service instance is " + serviceInstance.configId()
+ " in storage cluster " + clusterInfo());
}
StorageNode storageNode = new StorageNodeImpl(
nodeGroup.getApplication(),
clusterId(),
serviceInstance,
clusterControllerClientFactory);
storageNodes.add(storageNode);
}
}
if (storageNodes.size() > 1) {
throw new IllegalStateException("Found more than 1 storage node (" + storageNodes
+ ") in the same cluster (" + clusterInfo() + ") in the same node group ("
+ getNodeGroup().toCommaSeparatedString() + "): E.g. suspension of such a setup is not supported "
+ " by the Cluster Controller and is dangerous w.r.t. data redundancy.");
}
return storageNodes.stream().findFirst();
}
@Override
public Optional<StorageNode> storageNodeInGroup() {
return storageNodeInGroup(serviceInstance-> true);
}
@Override
public Optional<StorageNode> upStorageNodeInGroup() {
return storageNodeInGroup(serviceInstance-> !serviceEffectivelyDown(serviceInstance));
}
@Override
public String clusterInfo() {
return "{ clusterId=" + clusterId() + ", serviceType=" + serviceType() + " }";
}
private Set<ServiceInstance> servicesDownAndNotInGroup() {
if (servicesDownAndNotInGroup == null) {
servicesDownAndNotInGroup = servicesNotInGroup.stream().filter(this::serviceEffectivelyDown).collect(Collectors.toSet());
}
return servicesDownAndNotInGroup;
}
private HostStatus hostStatus(HostName hostName) {
return hostInfos.getOrNoRemarks(hostName).status();
}
private boolean serviceEffectivelyDown(ServiceInstance service) throws HostStateChangeDeniedException {
if (hostStatus(service.hostName()).isSuspended()) {
return true;
}
switch (service.serviceStatus()) {
case DOWN: return true;
case UNKNOWN:
throw new HostStateChangeDeniedException(
nodeGroup,
HostedVespaPolicy.UNKNOWN_SERVICE_STATUS,
"Service status of " + service.descriptiveName() + " is not yet known");
default:
return false;
}
}
}
|
|
/*
* Copyright 2020 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.persistence;
import com.thoughtworks.go.database.Database;
import com.thoughtworks.go.database.QueryExtensions;
import com.thoughtworks.go.domain.PipelineTimelineEntry;
import com.thoughtworks.go.server.cache.GoCache;
import com.thoughtworks.go.server.domain.PipelineTimeline;
import com.thoughtworks.go.server.domain.user.PipelineSelections;
import org.apache.commons.lang3.StringUtils;
import org.hibernate.HibernateException;
import org.hibernate.SQLQuery;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.orm.hibernate3.HibernateCallback;
import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
import org.springframework.stereotype.Component;
import java.math.BigInteger;
import java.util.*;
/**
* @understands how to store and retrieve piplines from the database
*/
@Component
public class PipelineRepository extends HibernateDaoSupport {
private static final Logger LOGGER = LoggerFactory.getLogger(PipelineRepository.class);
private final QueryExtensions queryExtensions;
private GoCache goCache;
@Autowired
public PipelineRepository(SessionFactory sessionFactory, GoCache goCache, Database databaseStrategy) {
this.goCache = goCache;
this.queryExtensions = databaseStrategy.getQueryExtensions();
setSessionFactory(sessionFactory);
}
public static int updateNaturalOrderForPipeline(Session session, Long pipelineId, double naturalOrder) {
String sql = "UPDATE pipelines SET naturalOrder = :naturalOrder WHERE id = :pipelineId";
SQLQuery query = session.createSQLQuery(sql);
query.setLong("pipelineId", pipelineId);
query.setDouble("naturalOrder", naturalOrder);
return query.executeUpdate();
}
public void updatePipelineTimeline(final PipelineTimeline pipelineTimeline, final List<PipelineTimelineEntry> tempEntriesForRollback) {
getHibernateTemplate().execute(new HibernateCallback() {
private static final int PIPELINE_NAME = 0;
private static final int ID = 1;
private static final int COUNTER = 2;
private static final int MODIFIED_TIME = 3;
private static final int FINGERPRINT = 4;
private static final int NATURAL_ORDER = 5;
private static final int REVISION = 6;
private static final int FOLDER = 7;
private static final int MOD_ID = 8;
private static final int PMR_ID = 9;
@Override
public Object doInHibernate(Session session) throws HibernateException {
LOGGER.info("Start updating pipeline timeline");
List<Object[]> matches = retrieveTimeline(session, pipelineTimeline);
List<PipelineTimelineEntry> newPipelines = populateFrom(matches);
addEntriesToPipelineTimeline(newPipelines, pipelineTimeline, tempEntriesForRollback);
updateNaturalOrdering(session, newPipelines);
LOGGER.info("Pipeline timeline updated");
return null;
}
private void updateNaturalOrdering(Session session, List<PipelineTimelineEntry> pipelines) {
for (PipelineTimelineEntry pipeline : pipelines) {
if (pipeline.hasBeenUpdated()) {
updateNaturalOrderForPipeline(session, pipeline.getId(), pipeline.naturalOrder());
}
}
}
private List<Object[]> loadTimeline(SQLQuery query) {
long startedAt = System.currentTimeMillis();
List<Object[]> matches = (List<Object[]>) query.list();
long duration = System.currentTimeMillis() - startedAt;
if (duration > 1000) {
LOGGER.warn("updating in memory pipeline-timeline took: {} ms", duration);
}
return matches;
}
private List<Object[]> retrieveTimeline(Session session, PipelineTimeline pipelineTimeline) {
SQLQuery query = session.createSQLQuery(queryExtensions.retrievePipelineTimeline());
query.setLong("pipelineId", pipelineTimeline.maximumId());
List<Object[]> matches = loadTimeline(query);
sortTimeLineByPidAndPmrId(matches);
return matches;
}
private void sortTimeLineByPidAndPmrId(List<Object[]> matches) {
matches.sort((m1, m2) -> {
long id1 = id(m1);
long id2 = id(m2);
if (id1 == id2) {
return (int) (pmrId(m1) - pmrId(m2));
}
return (int) (id1 - id2);
});
}
private List<PipelineTimelineEntry> populateFrom(List<Object[]> matches) {
ArrayList<PipelineTimelineEntry> newPipelines = new ArrayList<>();
if (matches.isEmpty()) {
return newPipelines;
}
Map<String, List<PipelineTimelineEntry.Revision>> revisions = new HashMap<>();
String name = null;
long curId = -1;
Integer counter = null;
double naturalOrder = 0.0;
PipelineTimelineEntry entry = null;
for (int i = 0; i < matches.size(); i++) {
Object[] row = matches.get(i);
long id = id(row);
if (curId != id) {
name = pipelineName(row);
curId = id;
counter = counter(row);
revisions = new HashMap<>();
naturalOrder = naturalOrder(row);
}
String fingerprint = fingerprint(row);
if (!revisions.containsKey(fingerprint)) {
revisions.put(fingerprint, new ArrayList<>());
}
revisions.get(fingerprint).add(rev(row));
int nextI = i + 1;
if (((nextI < matches.size() && id(matches.get(nextI)) != curId) ||//new pipeline instance starts in next record, so capture this one
nextI == matches.size())) {//this is the last record, so capture it
entry = new PipelineTimelineEntry(name, curId, counter, revisions, naturalOrder);
newPipelines.add(entry);
}
}
return newPipelines;
}
private String folder(Object[] row) {
return (String) row[FOLDER];
}
private PipelineTimelineEntry.Revision rev(Object[] row) {
return new PipelineTimelineEntry.Revision(modifiedTime(row), stringRevision(row), folder(row), modId(row));
}
private long pmrId(Object[] row) {
return ((BigInteger) row[PMR_ID]).longValue();
}
private long modId(Object[] row) {
return ((BigInteger) row[MOD_ID]).longValue();
}
private double naturalOrder(Object[] row) {
return (Double) row[NATURAL_ORDER];
}
private Date modifiedTime(Object[] row) {
return (Date) row[MODIFIED_TIME];
}
private String stringRevision(Object[] row) {
return (String) row[REVISION];
}
private String fingerprint(Object[] row) {
return String.valueOf(row[FINGERPRINT]);
}
private String pipelineName(Object[] row) {
return (String) row[PIPELINE_NAME];
}
private int counter(Object[] row) {
return row[COUNTER] == null ? -1 : ((BigInteger) row[COUNTER]).intValue();
}
private long id(Object[] first) {
return ((BigInteger) first[ID]).longValue();
}
});
}
private void addEntriesToPipelineTimeline(List<PipelineTimelineEntry> newEntries, PipelineTimeline pipelineTimeline, List<PipelineTimelineEntry> tempEntriesForRollback) {
for (PipelineTimelineEntry newEntry : newEntries) {
tempEntriesForRollback.add(newEntry);
pipelineTimeline.add(newEntry);
}
}
public long saveSelectedPipelines(PipelineSelections pipelineSelections) {
removePipelineSelectionFromCacheForUserId(pipelineSelections);
removePipelineSelectionFromCacheForCookie(pipelineSelections);
getHibernateTemplate().saveOrUpdate(pipelineSelections);
return pipelineSelections.getId();
}
public PipelineSelections findPipelineSelectionsById(long id) {
PipelineSelections pipelineSelections;
String key = pipelineSelectionForCookieKey(id);
if (goCache.isKeyInCache(key)) {
return (PipelineSelections) goCache.get(key);
}
synchronized (key) {
if (goCache.isKeyInCache(key)) {
return (PipelineSelections) goCache.get(key);
}
pipelineSelections = getHibernateTemplate().get(PipelineSelections.class, id);
if (null != pipelineSelections) {
goCache.put(key, pipelineSelections);
}
return pipelineSelections;
}
}
public PipelineSelections findPipelineSelectionsById(String id) {
if (StringUtils.isEmpty(id)) {
return null;
}
return findPipelineSelectionsById(Long.parseLong(id));
}
public PipelineSelections findPipelineSelectionsByUserId(Long userId) {
if (userId == null) {
return null;
}
PipelineSelections pipelineSelections;
String key = pipelineSelectionForUserIdKey(userId);
if (goCache.isKeyInCache(key)) {
return (PipelineSelections) goCache.get(key);
}
synchronized (key) {
if (goCache.isKeyInCache(key)) {
return (PipelineSelections) goCache.get(key);
}
List list = getHibernateTemplate().find("FROM PipelineSelections WHERE userId = ?", new Object[]{userId});
if (list.isEmpty()) {
pipelineSelections = null;
} else {
pipelineSelections = (PipelineSelections) list.get(0);
}
goCache.put(key, pipelineSelections);
return pipelineSelections;
}
}
private void removePipelineSelectionFromCacheForCookie(PipelineSelections pipelineSelections) {
String pipelineSelectionCookieKey = pipelineSelectionForCookieKey(pipelineSelections.getId());
synchronized (pipelineSelectionCookieKey) {
goCache.remove(pipelineSelectionCookieKey);
}
}
private void removePipelineSelectionFromCacheForUserId(PipelineSelections pipelineSelections) {
String pipelineSelectionUserIdKey = pipelineSelectionForUserIdKey(pipelineSelections.userId());
synchronized (pipelineSelectionUserIdKey) {
goCache.remove(pipelineSelectionUserIdKey);
}
}
String pipelineSelectionForUserIdKey(Long userId) {
return (PipelineRepository.class.getName() + "_userIdPipelineSelection_" + userId).intern();
}
String pipelineSelectionForCookieKey(long id) {
return (PipelineRepository.class.getName() + "_cookiePipelineSelection_" + id).intern();
}
}
|
|
/*
* Copyright (C) 2005-2008 Jive Software, 2022 Ignite Realtime Foundation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.openfire.commands;
import org.dom4j.DocumentHelper;
import org.dom4j.Element;
import org.jivesoftware.openfire.IQHandlerInfo;
import org.jivesoftware.openfire.XMPPServer;
import org.jivesoftware.openfire.auth.UnauthorizedException;
import org.jivesoftware.openfire.commands.admin.*;
import org.jivesoftware.openfire.commands.admin.group.*;
import org.jivesoftware.openfire.commands.admin.user.*;
import org.jivesoftware.openfire.commands.event.*;
import org.jivesoftware.openfire.commands.generic.Ping;
import org.jivesoftware.openfire.disco.*;
import org.jivesoftware.openfire.handler.IQHandler;
import org.xmpp.forms.DataForm;
import org.xmpp.packet.IQ;
import org.xmpp.packet.JID;
import java.util.*;
/**
* An AdHocCommandHandler is responsible for providing discoverable information about the
* supported commands and for handling commands requests. This is an implementation of JEP-50:
* Ad-Hoc Commands.<p>
*
* Ad-hoc commands that require user interaction will have one or more stages. For each stage the
* user will complete a data form and send it back to the server. The data entered by the user is
* kept in a SessionData. Instances of {@link AdHocCommand} are stateless. In order to prevent
* "bad" users from consuming all system memory there exists a limit of simultaneous commands that
* a user might perform. Configure the system property {@code "xmpp.command.limit"} to control
* this limit. User sessions will also timeout and their data destroyed if they have not been
* executed within a time limit since the session was created. The default timeout value is 10
* minutes. The timeout value can be modified by setting the system property
* {@code "xmpp.command.timeout"}.<p>
*
* New commands can be added dynamically by sending the message {@link #addCommand(AdHocCommand)}.
* The command will immediately appear in the disco#items list and might be executed by those
* users with enough execution permissions.
*
* @author Gaston Dombiak
*/
public class AdHocCommandHandler extends IQHandler
implements ServerFeaturesProvider, DiscoInfoProvider, DiscoItemsProvider {
private static final String NAMESPACE = "http://jabber.org/protocol/commands";
private String serverName;
private IQHandlerInfo info;
private IQDiscoInfoHandler infoHandler;
private IQDiscoItemsHandler itemsHandler;
/**
* Manager that keeps the list of ad-hoc commands and processing command requests.
*/
private AdHocCommandManager manager;
public AdHocCommandHandler() {
super("Ad-Hoc Commands Handler");
info = new IQHandlerInfo("command", NAMESPACE);
manager = new AdHocCommandManager();
}
@Override
public IQ handleIQ(IQ packet) throws UnauthorizedException {
return manager.process(packet);
}
@Override
public IQHandlerInfo getInfo() {
return info;
}
@Override
public Iterator<String> getFeatures() {
return Collections.singleton(NAMESPACE).iterator();
}
@Override
public Iterator<Element> getIdentities(String name, String node, JID senderJID) {
Element identity = DocumentHelper.createElement("identity");
identity.addAttribute("category", "automation");
identity.addAttribute("type", NAMESPACE.equals(node) ? "command-list" : "command-node");
return Collections.singleton(identity).iterator();
}
@Override
public Iterator<String> getFeatures(String name, String node, JID senderJID) {
return Arrays.asList(NAMESPACE, "jabber:x:data").iterator();
}
@Override
public Set<DataForm> getExtendedInfos(String name, String node, JID senderJID) {
return new HashSet<>();
}
@Override
public boolean hasInfo(String name, String node, JID senderJID) {
if (NAMESPACE.equals(node)) {
return true;
}
else {
// Only include commands that the sender can execute
AdHocCommand command = manager.getCommand(node);
return command != null && command.hasPermission(senderJID);
}
}
@Override
public Iterator<DiscoItem> getItems(String name, String node, JID senderJID) {
List<DiscoItem> answer = new ArrayList<>();
if (!NAMESPACE.equals(node)) {
answer = Collections.emptyList();
}
else {
for (AdHocCommand command : manager.getCommands()) {
// Only include commands that the sender can invoke (i.e. has enough permissions)
if (command.hasPermission(senderJID)) {
final DiscoItem item = new DiscoItem(new JID(serverName),
command.getLabel(), command.getCode(), null);
answer.add(item);
}
}
}
return answer.iterator();
}
@Override
public void initialize(XMPPServer server) {
super.initialize(server);
serverName = server.getServerInfo().getXMPPDomain();
infoHandler = server.getIQDiscoInfoHandler();
itemsHandler = server.getIQDiscoItemsHandler();
}
@Override
public void start() throws IllegalStateException {
super.start();
infoHandler.setServerNodeInfoProvider(NAMESPACE, this);
itemsHandler.setServerNodeInfoProvider(NAMESPACE, this);
// Add the "out of the box" commands
addDefaultCommands();
}
@Override
public void stop() {
super.stop();
infoHandler.removeServerNodeInfoProvider(NAMESPACE);
itemsHandler.removeServerNodeInfoProvider(NAMESPACE);
// Stop commands
for (AdHocCommand command : manager.getCommands()) {
stopCommand(command);
}
}
/**
* Adds a new command to the list of supported ad-hoc commands by this server. The new
* command will appear in the discoverable items list and will be executed for those users
* with enough permission.
*
* @param command the new ad-hoc command to add.
*/
public void addCommand(AdHocCommand command) {
manager.addCommand(command);
startCommand(command);
}
/**
* Removes the command from the list of ad-hoc commands supported by this server. The command
* will no longer appear in the discoverable items list.
*
* @param command the ad-hoc command to remove.
*/
public void removeCommand(AdHocCommand command) {
if (manager.removeCommand(command)) {
stopCommand(command);
}
}
private void addDefaultCommands() {
// TODO Complete when out of the box commands are implemented
addCommand(new GetNumberActiveUsers());
addCommand(new GetNumberOnlineUsers());
addCommand(new GetNumberUserSessions());
addCommand(new GetListActiveUsers());
addCommand(new GetUsersPresence());
addCommand(new GetListGroups());
addCommand(new GetListGroupUsers());
addCommand(new AddGroupUsers());
addCommand(new DeleteGroupUsers());
addCommand(new AddGroup());
addCommand(new UpdateGroup());
addCommand(new DeleteGroup());
addCommand(new AddUser());
addCommand(new DeleteUser());
addCommand(new AuthenticateUser());
addCommand(new ChangeUserPassword());
addCommand(new UserProperties());
addCommand(new PacketsNotification());
addCommand(new GetServerStats());
addCommand(new HttpBindStatus());
addCommand(new UserCreated());
addCommand(new UserModified());
addCommand(new UserDeleting());
addCommand(new GroupCreated());
addCommand(new GroupDeleting());
addCommand(new GroupModified());
addCommand(new GroupMemberAdded());
addCommand(new GroupMemberRemoved());
addCommand(new GroupAdminAdded());
addCommand(new GroupAdminRemoved());
addCommand(new VCardCreated());
addCommand(new VCardDeleting());
addCommand(new VCardModified());
addCommand(new GetAdminConsoleInfo());
addCommand(new Ping());
}
private void startCommand(AdHocCommand command) {
infoHandler.setServerNodeInfoProvider(command.getCode(), this);
itemsHandler.setServerNodeInfoProvider(command.getCode(), this);
}
private void stopCommand(AdHocCommand command) {
infoHandler.removeServerNodeInfoProvider(command.getCode());
itemsHandler.removeServerNodeInfoProvider(command.getCode());
}
}
|
|
/*******************************************************************************
* Licensed to UbiCollab.org under one or more contributor
* license agreements. See the NOTICE file distributed
* with this work for additional information regarding
* copyright ownership. UbiCollab.org licenses this file
* to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
******************************************************************************/
package ntnu.stud.valens.demonstration.activity;
import java.util.ArrayList;
import java.util.List;
import ntnu.stud.valens.demonstration.R;
import ntnu.stud.valens.demonstration.connectivity.ContentProviderHelper;
import org.achartengine.ChartFactory;
import org.achartengine.GraphicalView;
import org.achartengine.chart.BarChart.Type;
import org.achartengine.model.XYMultipleSeriesDataset;
import org.achartengine.model.XYSeries;
import org.achartengine.renderer.XYMultipleSeriesRenderer;
import org.achartengine.renderer.XYSeriesRenderer;
import android.app.Activity;
import android.graphics.Color;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.ArrayAdapter;
import android.widget.LinearLayout;
import android.widget.Spinner;
/**
* Class for handling the functionality of the statistics screen
*
* @author fiLLLip, Elias
*
*/
public class Statistics extends Activity implements OnItemSelectedListener {
private static final String TAG = "no.ntnu.stud.fallprevention.activity";
private Spinner timeSpan, dataType;
private GraphicalView mChart;
private XYMultipleSeriesDataset mDataset = new XYMultipleSeriesDataset();
private XYMultipleSeriesRenderer mRenderer = new XYMultipleSeriesRenderer();
private XYSeries mCurrentSeries;
private XYSeriesRenderer mCurrentRenderer;
/**
* Creates the viewable contents for the screen, e.g spinners and
* XYCoordinate System.
*/
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_statistics);
// Fill the time span spinner with some info
timeSpan = (Spinner) findViewById(R.id.time_span_spinner);
// Create an ArrayAdapter using the string array and a default spinner
// layout
ArrayAdapter<CharSequence> timeAdapter = ArrayAdapter
.createFromResource(this, R.array.time_period_array,
android.R.layout.simple_spinner_item);
// Specify the layout to use when the list of choices appears
timeAdapter
.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
// Apply the adapter to the spinner
timeSpan.setAdapter(timeAdapter);
timeSpan.setOnItemSelectedListener(this);
// Do the same for the other spinner
dataType = (Spinner) findViewById(R.id.data_type_spinner);
// Create an ArrayAdapter using the string array and a default spinner
// layout
ArrayAdapter<CharSequence> dataAdapter = ArrayAdapter
.createFromResource(this, R.array.data_type_array,
android.R.layout.simple_spinner_item);
// Specify the layout to use when the list of choices appears
dataAdapter
.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
// Apply the adapter to the spinner
dataType.setAdapter(dataAdapter);
dataType.setOnItemSelectedListener(this);
// Create the chart
initChart();
}
/**
* Initializes the chart, defines the visuals and which data series to
* display.
*/
private void initChart() {
mCurrentSeries = new XYSeries(""); // We don't want any label, it is
// just annoying.
mDataset.addSeries(mCurrentSeries);
mCurrentRenderer = new XYSeriesRenderer();
mCurrentRenderer.setFillPoints(true);
mCurrentRenderer.setLineWidth(1);
mCurrentRenderer.setDisplayChartValues(true);
mRenderer.addSeriesRenderer(mCurrentRenderer);
mRenderer.setLabelsTextSize(25);
mRenderer.setLegendTextSize(25);
mRenderer.setYLabelsColor(0, Color.BLACK);
mRenderer.setXLabelsColor(Color.BLACK);
mRenderer.setMarginsColor(Color.WHITE);
mRenderer.setBarSpacing(1d);
mChart = ChartFactory.getBarChartView(this, mDataset, mRenderer, Type.DEFAULT);
mChart.setBackgroundColor(Color.WHITE);
LinearLayout layout = (LinearLayout) findViewById(R.id.linear_layout);
layout.addView(mChart);
}
/**
* Fetches data from the content provider, based on the positions of the
* spinners. The displays the data in the chart.
*
* @param timeSpinner
* - the index of the position of the time window spinner.
* @param dataSourceSpinner
* - the index of the position of the data source spinner.
*/
private void setData(int timeSpinner, int dataSourceSpinner) {
Log.v(TAG, "GET DATA: " + timeSpinner + ", " + dataSourceSpinner);
// Clear the existing data, otherwise the graph would display multiple
// lines at the same time
mCurrentSeries.clear();
// Fetch information from the CP. A complex if-else chain is required to
// get the correct information
List<Double> data;
if (dataSourceSpinner == 1) {
// Data source is gait speed, still needs to find the time window
int time;
if (timeSpinner == 0) {
time = 7;
} else if (timeSpinner == 1) {
time = 30;
} else {
time = 90;
}
// Set the X and Y titles
mRenderer.setYTitle(getResources().getString(
R.string.statistics_y_label_gait_s));
mRenderer.setXTitle(getResources().getString(
R.string.statistics_x_label_days));
// Actually fetch data
data = new ContentProviderHelper(getApplicationContext())
.cpGetSpeedHistory(time);
// Transform to actual (x, y)-pairs
data = createXYPairs(data, 1);
} else if (dataSourceSpinner == 2) {
// Data source is gait variability, still needs to find the time
// window
int time;
if (timeSpinner == 0) {
time = 7;
} else if (timeSpinner == 1) {
time = 30;
} else {
time = 90;
}
// Set the X and Y titles
mRenderer.setYTitle(getResources().getString(
R.string.statistics_y_label_gait_v));
mRenderer.setXTitle(getResources().getString(
R.string.statistics_x_label_days));
// Actually fetch data
data = new ContentProviderHelper(getApplicationContext())
.cpGetVariabilityHistory(time);
// Transform to actual (x, y)-pairs
data = createXYPairs(data, 1);
} else {
// Data source is steps, still need to find the time window.
// Additionally, in the case of steps, one needs to decide the
// interval width one wants to count steps in.
int length, interval, unit;
if (timeSpinner == 0) {
// 1 week = 84 bi-hour intervals.
unit = 2;
interval = 2;
length = 84;
mRenderer.setXTitle(getResources().getString(
R.string.statistics_x_label_hours));
} else if (timeSpinner == 1) {
// 1 month = 30 day intervals.
unit = 1;
interval = 24;
length = 30;
mRenderer.setXTitle(getResources().getString(
R.string.statistics_x_label_days));
} else {
// 3 months = 90 day intervals
unit = 1;
interval = 24;
length = 90;
mRenderer.setXTitle(getResources().getString(
R.string.statistics_x_label_days));
}
// Set label on Y axis. Because the X axis label varies based on
// time gap, we needed to set that in the previous if-else chain.
mRenderer.setYTitle(getResources().getString(
R.string.statistics_y_label_steps));
// Really fetch data
data = new ContentProviderHelper(getApplicationContext())
.cpGetStepsHistory(length, interval);
// Transform data into actual (x,y)-pairs.
data = createXYPairs(data, unit);
}
// We have a list of alternating x- and y-values, with x coming first.
// Therefore we need to iterate through the list and extract the (x,
// y)-pairs before adding the to the mCurrentSeries.
double x = 0;
double y = 0;
boolean xSet = false;
for (double dp : data) {
if (xSet) {
y = dp;
mCurrentSeries.add(x, y);
xSet = false;
} else {
x = dp;
xSet = true;
}
}
}
/**
* Creates realistic (x, y)-pairs out of a list of unrealistic (x, y)-pairs.
*
* This is needed because the x values provided by the ContentProviderHelper
* are not the ones we want to display. Therefore, this method exists to
* turn the x values into the ones that the statistics screen wants to
* display.
*
* The method gives the newest data point (each y value) a x value 0, and
* every other data point the value of the data point that's next minus the
* unit parameter.
*
* @param data
* - a list of doubles, where the elements are alternatingly x
* and y elements, starting with x.
* @param units
* - the number of units of the unit in question each data point
* represents.
* @return a list of doubles, where the elements are alternatingly x and y
* elements, starting with x.
*/
private List<Double> createXYPairs(List<Double> data, int units) {
List<Double> mReturner = new ArrayList<Double>();
double y = 0;
double x = -((units * data.size()) / 2);
boolean xSet = false;
for (double dp : data) {
if (xSet) {
y = dp;
mReturner.add(x);
mReturner.add(y);
xSet = false;
} else {
x += units;
xSet = true;
}
}
return mReturner;
}
/**
* Repaints the graph, and fetches the data to make sure it is updated
* according the spinner values.
*
*/
private void repaint() {
setData(timeSpan.getSelectedItemPosition(),
dataType.getSelectedItemPosition());
mChart.repaint();
}
/**
* Fired when the user selects an item from one of the spinners. (And once
* when the screen is created). Repaints the graph to keep it updated with
* the spinner settings.
*/
@Override
public void onItemSelected(AdapterView<?> parent, View view, int pos,
long id) {
Log.v(TAG, "Item selected");
repaint();
}
// This method is required by the interface, but to our knowledge it is never called.
@Override
public void onNothingSelected(AdapterView<?> parent) {
}
}
|
|
package com.sequenceiq.cloudbreak.cm.polling.task;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.when;
import java.math.BigDecimal;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import com.cloudera.api.swagger.HostsResourceApi;
import com.cloudera.api.swagger.client.ApiClient;
import com.cloudera.api.swagger.client.ApiException;
import com.cloudera.api.swagger.model.ApiHost;
import com.cloudera.api.swagger.model.ApiHostList;
import com.sequenceiq.cloudbreak.api.endpoint.v4.stacks.base.InstanceStatus;
import com.sequenceiq.cloudbreak.cluster.service.ClusterEventService;
import com.sequenceiq.cloudbreak.cm.client.ClouderaManagerApiPojoFactory;
import com.sequenceiq.cloudbreak.cm.polling.ClouderaManagerCommandPollerObject;
import com.sequenceiq.cloudbreak.domain.stack.Stack;
import com.sequenceiq.cloudbreak.domain.stack.instance.InstanceGroup;
import com.sequenceiq.cloudbreak.domain.stack.instance.InstanceMetaData;
@SuppressWarnings("checkstyle:Regexp")
@RunWith(MockitoJUnitRunner.class)
public class ClouderaManagerHostStatusCheckerTest {
private static final String VIEWTYPE = "FULL";
@Mock
private ClouderaManagerApiPojoFactory clouderaManagerApiPojoFactory;
@Mock
private ClusterEventService clusterEventService;
@Mock
private HostsResourceApi hostsResourceApi;
private ClouderaManagerHostStatusChecker underTest;
@Before
public void init() {
underTest = new ClouderaManagerHostStatusChecker(clouderaManagerApiPojoFactory, clusterEventService, false);
when(clouderaManagerApiPojoFactory.getHostsResourceApi(any(ApiClient.class))).thenReturn(hostsResourceApi);
}
@Test
public void shouldBeFalseWhenNoHostsReturned() throws ApiException {
InstanceMetaData instanceMetaData = validInstanceMetadata();
when(hostsResourceApi.readHosts(null, null, VIEWTYPE)).thenReturn(new ApiHostList().items(Collections.emptyList()));
boolean result = underTest.doStatusCheck(getPollerObject(instanceMetaData));
assertFalse(result);
}
@Test
public void shouldBeFalseWhenHostsReturnedHasNoHeartbeat() throws ApiException {
InstanceMetaData instanceMetaData = validInstanceMetadata();
ApiHost apiHost = new ApiHost().ipAddress(instanceMetaData.getPrivateIp());
when(hostsResourceApi.readHosts(null, null, VIEWTYPE)).thenReturn(new ApiHostList().items(List.of(apiHost)));
boolean result = underTest.doStatusCheck(getPollerObject(instanceMetaData));
assertFalse(result);
}
@Test
public void shouldBeFalseWhenHostsReturnedHasOldHeartbeat() throws ApiException {
InstanceMetaData instanceMetaData = validInstanceMetadata();
ApiHost apiHost = new ApiHost()
.ipAddress(instanceMetaData.getPrivateIp())
.lastHeartbeat(Instant.now().minus(5, ChronoUnit.MINUTES).toString());
when(hostsResourceApi.readHosts(null, null, VIEWTYPE)).thenReturn(new ApiHostList().items(List.of(apiHost)));
boolean result = underTest.doStatusCheck(getPollerObject(instanceMetaData));
assertFalse(result);
}
@Test
public void shouldBeTrueWhenHostsReturnedHasRecentHeartbeat() throws ApiException {
InstanceMetaData instanceMetaData = validInstanceMetadata();
ApiHost apiHost = getValidApiHost(instanceMetaData);
when(hostsResourceApi.readHosts(null, null, VIEWTYPE)).thenReturn(new ApiHostList().items(List.of(apiHost)));
boolean result = underTest.doStatusCheck(getPollerObject(instanceMetaData));
assertTrue(result);
}
@Test
public void shouldBeFalseWhenHostsReturnedHasDifferentIp() throws ApiException {
InstanceMetaData instanceMetaData = validInstanceMetadata();
ApiHost apiHost = new ApiHost()
.ipAddress("2.2.2.2")
.lastHeartbeat(Instant.now().plus(5, ChronoUnit.MINUTES).toString());
when(hostsResourceApi.readHosts(null, null, VIEWTYPE)).thenReturn(new ApiHostList().items(List.of(apiHost)));
boolean result = underTest.doStatusCheck(getPollerObject(instanceMetaData));
assertFalse(result);
}
@Test
public void shouldBeFalseWhenHostsHasMissingHost() throws ApiException {
InstanceMetaData instanceMetaData = validInstanceMetadata();
InstanceMetaData instanceMetaData2 = validInstanceMetadata();
instanceMetaData2.setPrivateIp("2.2.2.2");
ApiHost apiHost = getValidApiHost(instanceMetaData);
when(hostsResourceApi.readHosts(null, null, VIEWTYPE)).thenReturn(new ApiHostList().items(List.of(apiHost)));
boolean result = underTest.doStatusCheck(getPollerObject(instanceMetaData, instanceMetaData2));
assertFalse(result);
}
@Test
public void shouldBeTrueWhenMultipleValidHosts() throws ApiException {
InstanceMetaData instanceMetaData = validInstanceMetadata();
InstanceMetaData instanceMetaData2 = validInstanceMetadata();
instanceMetaData2.setPrivateIp("2.2.2.2");
ApiHost apiHost = getValidApiHost(instanceMetaData);
ApiHost apiHost2 = getValidApiHost(instanceMetaData2);
when(hostsResourceApi.readHosts(null, null, VIEWTYPE)).thenReturn(new ApiHostList().items(List.of(apiHost, apiHost2)));
boolean result = underTest.doStatusCheck(getPollerObject(instanceMetaData, instanceMetaData2));
assertTrue(result);
}
@Test
public void shouldBeFalseWhenOneHostHasDifferentIp() throws ApiException {
InstanceMetaData instanceMetaData = validInstanceMetadata();
InstanceMetaData instanceMetaData2 = validInstanceMetadata();
instanceMetaData2.setPrivateIp("2.2.2.2");
ApiHost apiHost = getValidApiHost(instanceMetaData);
ApiHost apiHost2 = getValidApiHost(instanceMetaData2).ipAddress("3.3.3.3");
when(hostsResourceApi.readHosts(null, null, VIEWTYPE)).thenReturn(new ApiHostList().items(List.of(apiHost, apiHost2)));
boolean result = underTest.doStatusCheck(getPollerObject(instanceMetaData, instanceMetaData2));
assertFalse(result);
}
@Test
public void shouldBeTrueWhenOneInstanceHasNoDiscoveryFqdn() throws ApiException {
InstanceMetaData instanceMetaData = validInstanceMetadata();
InstanceMetaData instanceMetaData2 = validInstanceMetadata();
instanceMetaData2.setPrivateIp("2.2.2.2");
instanceMetaData2.setDiscoveryFQDN(null);
ApiHost apiHost = getValidApiHost(instanceMetaData);
when(hostsResourceApi.readHosts(null, null, VIEWTYPE)).thenReturn(new ApiHostList().items(List.of(apiHost)));
boolean result = underTest.doStatusCheck(getPollerObject(instanceMetaData, instanceMetaData2));
assertTrue(result);
}
@Test
public void shouldBeTrueWhenOneInstanceIsTerminated() throws ApiException {
InstanceMetaData instanceMetaData = validInstanceMetadata();
InstanceMetaData instanceMetaData2 = validInstanceMetadata();
instanceMetaData2.setPrivateIp("2.2.2.2");
instanceMetaData2.setInstanceStatus(InstanceStatus.TERMINATED);
ApiHost apiHost = getValidApiHost(instanceMetaData);
when(hostsResourceApi.readHosts(null, null, VIEWTYPE)).thenReturn(new ApiHostList().items(List.of(apiHost)));
boolean result = underTest.doStatusCheck(getPollerObject(instanceMetaData, instanceMetaData2));
assertTrue(result);
}
@Test
public void shouldBeTrueWhenOneInstanceIsDeletedOnProvider() throws ApiException {
InstanceMetaData instanceMetaData = validInstanceMetadata();
InstanceMetaData instanceMetaData2 = validInstanceMetadata();
instanceMetaData2.setPrivateIp("2.2.2.2");
instanceMetaData2.setInstanceStatus(InstanceStatus.DELETED_ON_PROVIDER_SIDE);
ApiHost apiHost = getValidApiHost(instanceMetaData);
when(hostsResourceApi.readHosts(null, null, VIEWTYPE)).thenReturn(new ApiHostList().items(List.of(apiHost)));
boolean result = underTest.doStatusCheck(getPollerObject(instanceMetaData, instanceMetaData2));
assertTrue(result);
}
@Test
public void shouldBeTrueWhenOneInstanceIsStopped() throws ApiException {
InstanceMetaData instanceMetaData = validInstanceMetadata();
InstanceMetaData instanceMetaData2 = validInstanceMetadata();
instanceMetaData2.setPrivateIp("2.2.2.2");
instanceMetaData2.setInstanceStatus(InstanceStatus.STOPPED);
ApiHost apiHost = getValidApiHost(instanceMetaData);
when(hostsResourceApi.readHosts(null, null, VIEWTYPE)).thenReturn(new ApiHostList().items(List.of(apiHost)));
boolean result = underTest.doStatusCheck(getPollerObject(instanceMetaData, instanceMetaData2));
assertTrue(result);
}
@Test
public void shouldBeTrueWhenOneInstanceIsFailed() throws ApiException {
InstanceMetaData instanceMetaData = validInstanceMetadata();
InstanceMetaData instanceMetaData2 = validInstanceMetadata();
instanceMetaData2.setPrivateIp("2.2.2.2");
instanceMetaData2.setInstanceStatus(InstanceStatus.FAILED);
ApiHost apiHost = getValidApiHost(instanceMetaData);
when(hostsResourceApi.readHosts(null, null, VIEWTYPE)).thenReturn(new ApiHostList().items(List.of(apiHost)));
boolean result = underTest.doStatusCheck(getPollerObject(instanceMetaData, instanceMetaData2));
assertTrue(result);
}
@Test
public void shouldBeTrueWhenOneInstanceIsOrchestrationFailed() throws ApiException {
InstanceMetaData instanceMetaData = validInstanceMetadata();
InstanceMetaData instanceMetaData2 = validInstanceMetadata();
instanceMetaData2.setPrivateIp("2.2.2.2");
instanceMetaData2.setInstanceStatus(InstanceStatus.ORCHESTRATION_FAILED);
ApiHost apiHost = getValidApiHost(instanceMetaData);
when(hostsResourceApi.readHosts(null, null, VIEWTYPE)).thenReturn(new ApiHostList().items(List.of(apiHost)));
boolean result = underTest.doStatusCheck(getPollerObject(instanceMetaData, instanceMetaData2));
assertTrue(result);
}
private ApiHost getValidApiHost(InstanceMetaData instanceMetaData) {
return new ApiHost()
.ipAddress(instanceMetaData.getPrivateIp())
.lastHeartbeat(Instant.now().plus(5, ChronoUnit.MINUTES).toString());
}
private InstanceMetaData validInstanceMetadata() {
InstanceMetaData instanceMetaData = new InstanceMetaData();
instanceMetaData.setDiscoveryFQDN("ins1");
instanceMetaData.setInstanceStatus(InstanceStatus.SERVICES_RUNNING);
instanceMetaData.setPrivateIp("1.1.1.1");
return instanceMetaData;
}
private ClouderaManagerCommandPollerObject getPollerObject(InstanceMetaData... instanceMetaDatas) {
Stack stack = new Stack();
InstanceGroup instanceGroup = new InstanceGroup();
instanceGroup.setInstanceMetaData(Set.of(instanceMetaDatas));
stack.setInstanceGroups(Set.of(instanceGroup));
return new ClouderaManagerCommandPollerObject(stack, new ApiClient(), BigDecimal.ONE);
}
}
|
|
/*
* Copyright (C) 2015-2018 SoftIndex LLC.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.datakernel.datastream.csp;
import io.datakernel.bytebuf.ByteBuf;
import io.datakernel.bytebuf.ByteBufPool;
import io.datakernel.common.MemSize;
import io.datakernel.csp.ChannelConsumer;
import io.datakernel.csp.ChannelOutput;
import io.datakernel.datastream.AbstractStreamConsumer;
import io.datakernel.datastream.StreamConsumer;
import io.datakernel.datastream.StreamDataAcceptor;
import io.datakernel.promise.Promise;
import io.datakernel.serializer.BinarySerializer;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Duration;
import java.util.ArrayDeque;
import java.util.function.BiConsumer;
import static io.datakernel.common.Preconditions.checkArgument;
import static io.datakernel.common.Utils.nullify;
import static io.datakernel.eventloop.RunnableWithContext.wrapContext;
import static java.lang.Math.max;
/**
* An adapter that converts a {@link ChannelConsumer} of {@link ByteBuf ByteBufs} to a {@link StreamConsumer} of some type,
* that is serialized into binary data using given {@link BinarySerializer}.
*/
public final class ChannelSerializer<T> extends AbstractStreamConsumer<T> implements WithStreamToChannel<ChannelSerializer<T>, T, ByteBuf> {
private static final Logger logger = LoggerFactory.getLogger(ChannelSerializer.class);
/**
* Binary format: 1-byte varlen message size + message, for messages with max size up to 128 bytes
* This is the most efficient and fast binary representation for both serializer and deserializer.
* <p>
* It is still possible to change max size at any time, switching from 1-byte to 2-byte or 3-byte header size or vice versa,
* because varlen encoding of message size is fully backward and forward compatible.
*/
public static final MemSize MAX_SIZE_1 = MemSize.bytes(128); // (1 << (1 * 7))
/**
* Binary format: 2-byte varlen message size + message, for messages with max size up to 16KB
*/
public static final MemSize MAX_SIZE_2 = MemSize.kilobytes(16); // (1 << (2 * 7))
/**
* Binary format: 3-byte varlen message size + message, for messages with max size up to 2MB
* Messages with size >2MB are not supported
*/
public static final MemSize MAX_SIZE_3 = MemSize.megabytes(2); // (1 << (3 * 7))
/**
* Default setting for max message size (2MB).
* Messages with size >2MB are not supported
* <p>
* Because varlen encoding of message size is fully backward and forward compatible,
* even for smaller messages it is possible to start with default max message size (2MB),
* and fine-tune performance by switching to 1-byte or 2-byte encoding at later time.
*/
public static final MemSize MAX_SIZE = MAX_SIZE_3;
private final BinarySerializer<T> serializer;
private static final ArrayIndexOutOfBoundsException OUT_OF_BOUNDS_EXCEPTION = new ArrayIndexOutOfBoundsException("Message overflow");
public static final MemSize DEFAULT_INITIAL_BUFFER_SIZE = MemSize.kilobytes(16);
private MemSize initialBufferSize = DEFAULT_INITIAL_BUFFER_SIZE;
private MemSize maxMessageSize = MAX_SIZE;
private boolean explicitEndOfStream = false;
@Nullable
private Duration autoFlushInterval;
private BiConsumer<T, Throwable> serializationErrorHandler = ($, e) -> closeEx(e);
private Input input;
private ChannelConsumer<ByteBuf> output;
private final ArrayDeque<ByteBuf> bufs = new ArrayDeque<>();
private boolean flushing;
// region creators
private ChannelSerializer(BinarySerializer<T> serializer) {
this.serializer = serializer;
}
/**
* Creates a new instance of the serializer for type T
*/
public static <T> ChannelSerializer<T> create(BinarySerializer<T> serializer) {
return new ChannelSerializer<>(serializer);
}
/**
* Sets the initial buffer size - a buffer of this size will
* be allocated first when trying to serialize incoming item
* <p>
* Defaults to 16kb
*/
public ChannelSerializer<T> withInitialBufferSize(MemSize bufferSize) {
this.initialBufferSize = bufferSize;
return this;
}
/**
* Sets the max message size - when a single message takes more
* than this amount of memory to be serialized, this transformer
* will be closed with {@link #OUT_OF_BOUNDS_EXCEPTION out of bounds excetion}
* unless {@link #withSkipSerializationErrors} was used to ignore such errors.
*/
public ChannelSerializer<T> withMaxMessageSize(MemSize maxMessageSize) {
checkArgument(maxMessageSize.compareTo(MemSize.ZERO) > 0 && maxMessageSize.compareTo(MAX_SIZE_3) <= 0,
"Maximum message size cannot be less than 0 bytes or larger than 2 megabytes");
this.maxMessageSize = maxMessageSize;
return this;
}
/**
* Sets the auto flush interval - when this is set the
* transformer will automatically flush itself at a given interval
*/
public ChannelSerializer<T> withAutoFlushInterval(@Nullable Duration autoFlushInterval) {
this.autoFlushInterval = autoFlushInterval;
return this;
}
/**
* Enables skipping of serialization errors.
* <p>
* When this method is called, the transformer ignores errors and just logs them,
* the default behaviour is closing serializer with the error.
*/
public ChannelSerializer<T> withSkipSerializationErrors() {
return withSerializationErrorHandler((item, e) -> logger.warn("Skipping serialization error for {} in {}", item, this, e));
}
/**
* Sets a serialization error handler for this serializer. Handler accepts serialized item and serialization error.
* The default handler simply closes serializer with received error.
*/
public ChannelSerializer<T> withSerializationErrorHandler(BiConsumer<T, Throwable> handler) {
this.serializationErrorHandler = handler;
return this;
}
public ChannelSerializer<T> withExplicitEndOfStream() {
return withExplicitEndOfStream(true);
}
public ChannelSerializer<T> withExplicitEndOfStream(boolean explicitEndOfStream) {
this.explicitEndOfStream = explicitEndOfStream;
return this;
}
@Override
public ChannelOutput<ByteBuf> getOutput() {
return output -> {
this.output = output;
resume(input);
};
}
// endregion
@Override
protected void onInit() {
input = new Input(serializer, initialBufferSize.toInt(), maxMessageSize.toInt(), autoFlushInterval, serializationErrorHandler);
}
@Override
protected void onStarted() {
if (output != null) {
resume(input);
}
}
@Override
protected void onEndOfStream() {
input.flush();
}
@Override
protected void onError(Throwable e) {
output.closeEx(e);
}
@Override
protected void onCleanup() {
bufs.forEach(ByteBuf::recycle);
bufs.clear();
input.buf = nullify(input.buf, ByteBuf::recycle);
}
private void doFlush() {
if (flushing) return;
if (!bufs.isEmpty()) {
flushing = true;
output.accept(bufs.poll())
.whenResult(() -> {
flushing = false;
doFlush();
})
.whenException(this::closeEx);
} else {
if (isEndOfStream()) {
flushing = true;
Promise.complete()
.then(() -> (explicitEndOfStream ?
output.accept(ByteBuf.wrapForReading(new byte[]{0})) :
Promise.complete()))
.then(output::acceptEndOfStream)
.whenResult(this::acknowledge);
} else {
resume(input);
}
}
}
private final class Input implements StreamDataAcceptor<T> {
private final BinarySerializer<T> serializer;
private ByteBuf buf = ByteBuf.empty();
private int estimatedMessageSize;
private final int headerSize;
private final int maxMessageSize;
private final int initialBufferSize;
private final int autoFlushIntervalMillis;
private boolean flushPosted;
private final BiConsumer<T, Throwable> serializationErrorHandler;
public Input(@NotNull BinarySerializer<T> serializer, int initialBufferSize, int maxMessageSize, @Nullable Duration autoFlushInterval, BiConsumer<T, Throwable> serializationErrorHandler) {
this.serializationErrorHandler = serializationErrorHandler;
this.serializer = serializer;
this.maxMessageSize = maxMessageSize;
this.headerSize = varintSize(maxMessageSize - 1);
this.estimatedMessageSize = 1;
this.initialBufferSize = initialBufferSize;
this.autoFlushIntervalMillis = autoFlushInterval == null ? -1 : (int) autoFlushInterval.toMillis();
}
@Override
public void accept(T item) {
int positionBegin;
int positionItem;
for (; ; ) {
if (buf.writeRemaining() < headerSize + estimatedMessageSize + (estimatedMessageSize >>> 2)) {
onFullBuffer();
}
positionBegin = buf.tail();
positionItem = positionBegin + headerSize;
buf.tail(positionItem);
try {
buf.tail(serializer.encode(buf.array(), buf.tail(), item));
} catch (ArrayIndexOutOfBoundsException e) {
onUnderEstimate(positionBegin);
continue;
} catch (Exception e) {
onSerializationError(item, positionBegin, e);
return;
}
break;
}
int positionEnd = buf.tail();
int messageSize = positionEnd - positionItem;
if (messageSize > estimatedMessageSize) {
if (messageSize < maxMessageSize) {
estimatedMessageSize = messageSize;
} else {
onSerializationError(item, positionBegin, OUT_OF_BOUNDS_EXCEPTION);
return;
}
}
writeSize(buf.array(), positionBegin, messageSize);
}
private void writeSize(byte[] buf, int pos, int size) {
if (headerSize == 1) {
buf[pos] = (byte) size;
return;
}
buf[pos] = (byte) ((size & 0x7F) | 0x80);
size >>>= 7;
if (headerSize == 2) {
buf[pos + 1] = (byte) size;
return;
}
assert headerSize == 3;
buf[pos + 1] = (byte) ((size & 0x7F) | 0x80);
size >>>= 7;
buf[pos + 2] = (byte) size;
}
private ByteBuf allocateBuffer() {
return ByteBufPool.allocate(max(initialBufferSize, headerSize + estimatedMessageSize + (estimatedMessageSize >>> 2)));
}
private void onFullBuffer() {
flush();
buf = allocateBuffer();
if (!flushPosted) {
postFlush();
}
}
private void onUnderEstimate(int positionBegin) {
buf.tail(positionBegin);
int writeRemaining = buf.writeRemaining();
flush();
buf = ByteBufPool.allocate(max(initialBufferSize, writeRemaining + (writeRemaining >>> 1) + 1));
}
private void onSerializationError(T item, int positionBegin, Exception e) {
buf.tail(positionBegin);
serializationErrorHandler.accept(item, e);
}
private void flush() {
if (buf == null) return;
if (buf.canRead()) {
if (!bufs.isEmpty()) {
suspend();
}
bufs.add(buf);
estimatedMessageSize -= estimatedMessageSize >>> 8;
} else {
buf.recycle();
}
buf = ByteBuf.empty();
doFlush();
}
private void postFlush() {
flushPosted = true;
if (autoFlushIntervalMillis == -1)
return;
if (autoFlushIntervalMillis == 0) {
eventloop.postLast(wrapContext(this, () -> {
flushPosted = false;
flush();
}));
} else {
eventloop.delayBackground(autoFlushIntervalMillis, wrapContext(this, () -> {
flushPosted = false;
flush();
}));
}
}
}
private static int varintSize(int value) {
return 1 + (31 - Integer.numberOfLeadingZeros(value)) / 7;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.arrow.vector;
import static org.apache.arrow.vector.NullCheckingForGet.NULL_CHECKING_ENABLED;
import org.apache.arrow.memory.BufferAllocator;
import org.apache.arrow.vector.complex.impl.TimeStampNanoTZReaderImpl;
import org.apache.arrow.vector.complex.reader.FieldReader;
import org.apache.arrow.vector.holders.NullableTimeStampNanoTZHolder;
import org.apache.arrow.vector.holders.TimeStampNanoTZHolder;
import org.apache.arrow.vector.types.TimeUnit;
import org.apache.arrow.vector.types.Types.MinorType;
import org.apache.arrow.vector.types.pojo.ArrowType;
import org.apache.arrow.vector.types.pojo.Field;
import org.apache.arrow.vector.types.pojo.FieldType;
import org.apache.arrow.vector.util.TransferPair;
/**
* TimeStampNanoTZVector implements a fixed width vector (8 bytes) of
* timestamp (nanosecond resolution) values which could be null. A validity buffer
* (bit vector) is maintained to track which elements in the vector are null.
*/
public final class TimeStampNanoTZVector extends TimeStampVector {
private final FieldReader reader;
private final String timeZone;
/**
* Instantiate a TimeStampNanoTZVector. This doesn't allocate any memory for
* the data in vector.
*
* @param name name of the vector
* @param allocator allocator for memory management.
*/
public TimeStampNanoTZVector(String name, BufferAllocator allocator, String timeZone) {
this(name, FieldType.nullable(new ArrowType.Timestamp(TimeUnit.NANOSECOND, timeZone)), allocator);
}
/**
* Instantiate a TimeStampNanoTZVector. This doesn't allocate any memory for
* the data in vector.
*
* @param name name of the vector
* @param fieldType type of Field materialized by this vector
* @param allocator allocator for memory management.
*/
public TimeStampNanoTZVector(String name, FieldType fieldType, BufferAllocator allocator) {
super(name, fieldType, allocator);
ArrowType.Timestamp arrowType = (ArrowType.Timestamp) fieldType.getType();
timeZone = arrowType.getTimezone();
reader = new TimeStampNanoTZReaderImpl(TimeStampNanoTZVector.this);
}
/**
* Instantiate a TimeStampNanoTZVector. This doesn't allocate any memory for
* the data in vector.
*
* @param field Field materialized by this vector
* @param allocator allocator for memory management.
*/
public TimeStampNanoTZVector(Field field, BufferAllocator allocator) {
super(field, allocator);
ArrowType.Timestamp arrowType = (ArrowType.Timestamp) field.getFieldType().getType();
timeZone = arrowType.getTimezone();
reader = new TimeStampNanoTZReaderImpl(TimeStampNanoTZVector.this);
}
/**
* Get a reader that supports reading values from this vector.
*
* @return Field Reader for this vector
*/
@Override
public FieldReader getReader() {
return reader;
}
/**
* Get minor type for this vector. The vector holds values belonging
* to a particular type.
*
* @return {@link org.apache.arrow.vector.types.Types.MinorType}
*/
@Override
public MinorType getMinorType() {
return MinorType.TIMESTAMPNANOTZ;
}
/*----------------------------------------------------------------*
| |
| vector value retrieval methods |
| |
*----------------------------------------------------------------*/
/**
* Get the element at the given index from the vector and
* sets the state in holder. If element at given index
* is null, holder.isSet will be zero.
*
* @param index position of element
*/
public void get(int index, NullableTimeStampNanoTZHolder holder) {
if (NULL_CHECKING_ENABLED && isSet(index) == 0) {
holder.isSet = 0;
return;
}
holder.isSet = 1;
holder.value = valueBuffer.getLong((long) index * TYPE_WIDTH);
}
/**
* Same as {@link #get(int)}.
*
* @param index position of element
* @return element at given index
*/
public Long getObject(int index) {
if (isSet(index) == 0) {
return null;
} else {
return valueBuffer.getLong((long) index * TYPE_WIDTH);
}
}
/*----------------------------------------------------------------*
| |
| vector value setter methods |
| |
*----------------------------------------------------------------*/
/**
* Set the element at the given index to the value set in data holder.
* If the value in holder is not indicated as set, element in the
* at the given index will be null.
*
* @param index position of element
* @param holder nullable data holder for value of element
*/
public void set(int index, NullableTimeStampNanoTZHolder holder) throws IllegalArgumentException {
if (holder.isSet < 0) {
throw new IllegalArgumentException();
} else if (holder.isSet > 0) {
BitVectorHelper.setBit(validityBuffer, index);
setValue(index, holder.value);
} else {
BitVectorHelper.unsetBit(validityBuffer, index);
}
}
/**
* Set the element at the given index to the value set in data holder.
*
* @param index position of element
* @param holder data holder for value of element
*/
public void set(int index, TimeStampNanoTZHolder holder) {
BitVectorHelper.setBit(validityBuffer, index);
setValue(index, holder.value);
}
/**
* Same as {@link #set(int, NullableTimeStampNanoTZHolder)} except that it handles the
* case when index is greater than or equal to existing
* value capacity {@link #getValueCapacity()}.
*
* @param index position of element
* @param holder nullable data holder for value of element
*/
public void setSafe(
int index,
NullableTimeStampNanoTZHolder holder) throws IllegalArgumentException {
handleSafe(index);
set(index, holder);
}
/**
* Same as {@link #set(int, TimeStampNanoTZHolder)} except that it handles the
* case when index is greater than or equal to existing
* value capacity {@link #getValueCapacity()}.
*
* @param index position of element
* @param holder data holder for value of element
*/
public void setSafe(int index, TimeStampNanoTZHolder holder) {
handleSafe(index);
set(index, holder);
}
/*----------------------------------------------------------------*
| |
| vector transfer |
| |
*----------------------------------------------------------------*/
/**
* Construct a TransferPair comprising of this and a target vector of
* the same type.
*
* @param ref name of the target vector
* @param allocator allocator for the target vector
* @return {@link TransferPair}
*/
@Override
public TransferPair getTransferPair(String ref, BufferAllocator allocator) {
TimeStampNanoTZVector to = new TimeStampNanoTZVector(ref,
field.getFieldType(), allocator);
return new TransferImpl(to);
}
/**
* Construct a TransferPair with a desired target vector of the same type.
*
* @param to target vector
* @return {@link TransferPair}
*/
@Override
public TransferPair makeTransferPair(ValueVector to) {
return new TransferImpl((TimeStampNanoTZVector) to);
}
}
|
|
/*
* Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.spi.impl.eventservice.impl;
import com.hazelcast.core.ICompletableFuture;
import com.hazelcast.instance.MemberImpl;
import com.hazelcast.internal.cluster.ClusterService;
import com.hazelcast.internal.metrics.MetricsProvider;
import com.hazelcast.internal.metrics.MetricsRegistry;
import com.hazelcast.internal.metrics.Probe;
import com.hazelcast.internal.serialization.InternalSerializationService;
import com.hazelcast.internal.util.counters.MwCounter;
import com.hazelcast.logging.ILogger;
import com.hazelcast.nio.Address;
import com.hazelcast.nio.Connection;
import com.hazelcast.nio.Packet;
import com.hazelcast.nio.serialization.Data;
import com.hazelcast.spi.EventFilter;
import com.hazelcast.spi.EventRegistration;
import com.hazelcast.spi.EventService;
import com.hazelcast.spi.Operation;
import com.hazelcast.spi.impl.NodeEngineImpl;
import com.hazelcast.spi.impl.eventservice.InternalEventService;
import com.hazelcast.spi.impl.eventservice.impl.operations.DeregistrationOperationSupplier;
import com.hazelcast.spi.impl.eventservice.impl.operations.OnJoinRegistrationOperation;
import com.hazelcast.spi.impl.eventservice.impl.operations.RegistrationOperationSupplier;
import com.hazelcast.spi.impl.eventservice.impl.operations.SendEventOperation;
import com.hazelcast.spi.properties.HazelcastProperties;
import com.hazelcast.util.UuidUtil;
import com.hazelcast.util.executor.StripedExecutor;
import com.hazelcast.util.function.Supplier;
import java.io.Closeable;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedList;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
import java.util.logging.Level;
import static com.hazelcast.internal.metrics.ProbeLevel.MANDATORY;
import static com.hazelcast.internal.util.InvocationUtil.invokeOnStableClusterSerial;
import static com.hazelcast.internal.util.counters.MwCounter.newMwCounter;
import static com.hazelcast.spi.properties.GroupProperty.EVENT_QUEUE_CAPACITY;
import static com.hazelcast.spi.properties.GroupProperty.EVENT_QUEUE_TIMEOUT_MILLIS;
import static com.hazelcast.spi.properties.GroupProperty.EVENT_SYNC_TIMEOUT_MILLIS;
import static com.hazelcast.spi.properties.GroupProperty.EVENT_THREAD_COUNT;
import static com.hazelcast.util.EmptyStatement.ignore;
import static com.hazelcast.util.ExceptionUtil.rethrow;
import static com.hazelcast.util.ThreadUtil.createThreadName;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
/**
* Service responsible for routing and dispatching local and remote events and keeping track of listener
* registrations. Local events are events published on a local subscriber (the subscriber is on this node)
* and remote events are published on a remote subscriber (the subscriber is on a different node and the
* event is sent to that node). The remote events are generally asnychronous meaning that we send the event
* and don't wait for the response. The exception to this is that every {@link #eventSyncFrequency} remote
* event is sent as an operation and we wait for it to be submitted to the remote queue.
* <p>
* This implementation keeps registrations grouped into {@link EventServiceSegment}s. Each segment is
* responsible for a single service (e.g. map service, cluster service, proxy service).
* <p>
* The events are processed on a {@link StripedExecutor}. The executor has a fixed queue and thread size
* and it is shared between all events meaning that it is important to configure it correctly. Inadequate thread
* count sizing can lead to wasted threads or low throughput. Inadequate queue size can lead
* to {@link OutOfMemoryError} or events being dropped when the queue is full.
* The events are ordered by order key which can be defined when publishing the event meaning that you can
* define your custom ordering. Events with the same order key will be processed by the same thread on
* the executor.
* <p>
* This order can still be broken in some cases. This is possible because remote events are asynchronous
* and we don't wait for the response before publishing the next event. The previously published
* event can be retransmitted causing it to be received by the target node at a later time.
*/
@SuppressWarnings({"checkstyle:classfanoutcomplexity", "checkstyle:methodcount"})
public class EventServiceImpl implements InternalEventService, MetricsProvider {
public static final String SERVICE_NAME = "hz:core:eventService";
/**
* Usually remote events are sent asynchronously. This property dictates how often the event is sent
* synchronously. This means that the event will be sent as a {@link SendEventOperation} and we will
* wait for the response. The default value is {@value EVENT_SYNC_FREQUENCY}.
*
* @see #sendEvent(Address, EventEnvelope, int)
*/
public static final String EVENT_SYNC_FREQUENCY_PROP = "hazelcast.event.sync.frequency";
private static final EventRegistration[] EMPTY_REGISTRATIONS = new EventRegistration[0];
/**
* The default value for the {@link #EVENT_SYNC_FREQUENCY_PROP}.
*
* @see #sendEvent(Address, EventEnvelope, int)
*/
private static final int EVENT_SYNC_FREQUENCY = 100000;
/**
* The retry count for the synchronous remote events.
*
* @see #sendEvent(Address, EventEnvelope, int)
*/
private static final int SEND_RETRY_COUNT = 50;
/**
* How often failures are logged with {@link Level#WARNING}. Otherwise the failures are
* logged with a lower log level.
*/
private static final int WARNING_LOG_FREQUENCY = 1000;
/**
* Retry count for registration & deregistration operation invocations.
*/
private static final int MAX_RETRIES = 100;
final ILogger logger;
final NodeEngineImpl nodeEngine;
/** Service name to event service segment map */
private final ConcurrentMap<String, EventServiceSegment> segments;
/** The executor responsible for processing events */
private final StripedExecutor eventExecutor;
/**
* The timeout in milliseconds for offering an event to the local executor for processing. If the queue is full
* and the event is not accepted in the defined timeout, it will not be processed.
* This applies only to processing local events. Remote events (events on a remote subscriber) have no timeout,
* meaning that the event can be rejected immediately.
*/
private final long eventQueueTimeoutMs;
/** The thread count for the executor processing the events. */
@Probe(name = "threadCount")
private final int eventThreadCount;
/** The capacity of the executor processing the events. This capacity is shared for all events. */
@Probe(name = "queueCapacity")
private final int eventQueueCapacity;
@Probe(name = "totalFailureCount")
private final MwCounter totalFailures = newMwCounter();
@Probe(name = "rejectedCount")
private final MwCounter rejectedCount = newMwCounter();
@Probe(name = "syncDeliveryFailureCount")
private final MwCounter syncDeliveryFailureCount = newMwCounter();
private final int sendEventSyncTimeoutMillis;
private final InternalSerializationService serializationService;
private final int eventSyncFrequency;
public EventServiceImpl(NodeEngineImpl nodeEngine) {
this.nodeEngine = nodeEngine;
this.serializationService = (InternalSerializationService) nodeEngine.getSerializationService();
this.logger = nodeEngine.getLogger(EventService.class.getName());
HazelcastProperties hazelcastProperties = nodeEngine.getProperties();
this.eventThreadCount = hazelcastProperties.getInteger(EVENT_THREAD_COUNT);
this.eventQueueCapacity = hazelcastProperties.getInteger(EVENT_QUEUE_CAPACITY);
this.eventQueueTimeoutMs = hazelcastProperties.getMillis(EVENT_QUEUE_TIMEOUT_MILLIS);
this.sendEventSyncTimeoutMillis = hazelcastProperties.getInteger(EVENT_SYNC_TIMEOUT_MILLIS);
this.eventSyncFrequency = loadEventSyncFrequency();
this.eventExecutor = new StripedExecutor(
nodeEngine.getNode().getLogger(EventServiceImpl.class),
createThreadName(nodeEngine.getHazelcastInstance().getName(), "event"),
eventThreadCount,
eventQueueCapacity);
this.segments = new ConcurrentHashMap<String, EventServiceSegment>();
}
private static int loadEventSyncFrequency() {
try {
int eventSyncFrequency = Integer.parseInt(System.getProperty(EVENT_SYNC_FREQUENCY_PROP));
if (eventSyncFrequency <= 0) {
eventSyncFrequency = EVENT_SYNC_FREQUENCY;
}
return eventSyncFrequency;
} catch (Exception e) {
return EVENT_SYNC_FREQUENCY;
}
}
@Override
public void provideMetrics(MetricsRegistry registry) {
registry.scanAndRegister(this, "event");
}
@Override
public void close(EventRegistration eventRegistration) {
Registration registration = (Registration) eventRegistration;
Object listener = registration.getListener();
if (!(listener instanceof Closeable)) {
return;
}
try {
((Closeable) listener).close();
} catch (IOException e) {
ignore(e);
}
}
@Override
public int getEventThreadCount() {
return eventThreadCount;
}
@Override
public int getEventQueueCapacity() {
return eventQueueCapacity;
}
@Probe(name = "eventQueueSize", level = MANDATORY)
@Override
public int getEventQueueSize() {
return eventExecutor.getWorkQueueSize();
}
@Probe(level = MANDATORY)
private long eventsProcessed() {
return eventExecutor.processedCount();
}
@Override
public EventRegistration registerLocalListener(String serviceName, String topic, Object listener) {
return registerListenerInternal(serviceName, topic, TrueEventFilter.INSTANCE, listener, true);
}
@Override
public EventRegistration registerLocalListener(String serviceName, String topic, EventFilter filter, Object listener) {
return registerListenerInternal(serviceName, topic, filter, listener, true);
}
@Override
public EventRegistration registerListener(String serviceName, String topic, Object listener) {
return registerListenerInternal(serviceName, topic, TrueEventFilter.INSTANCE, listener, false);
}
@Override
public EventRegistration registerListener(String serviceName, String topic, EventFilter filter, Object listener) {
return registerListenerInternal(serviceName, topic, filter, listener, false);
}
/**
* Registers the listener for events matching the service name, topic and filter.
* If {@code localOnly} is {@code true}, it will register only for events published on this node,
* otherwise, the registration is sent to other nodes and the listener will listen for
* events on all cluster members.
*
* @param serviceName the service name for which we are registering
* @param topic the event topic for which we are registering
* @param filter the filter for the listened events
* @param listener the event listener
* @param localOnly whether to register on local events or on events on all cluster members
* @return the event registration
* @throws IllegalArgumentException if the listener or filter is null
*/
private EventRegistration registerListenerInternal(String serviceName, String topic, EventFilter filter, Object listener,
boolean localOnly) {
if (listener == null) {
throw new IllegalArgumentException("Listener required!");
}
if (filter == null) {
throw new IllegalArgumentException("EventFilter required!");
}
EventServiceSegment segment = getSegment(serviceName, true);
String id = UuidUtil.newUnsecureUuidString();
Registration reg = new Registration(id, serviceName, topic, filter, nodeEngine.getThisAddress(), listener, localOnly);
if (!segment.addRegistration(topic, reg)) {
return null;
}
if (!localOnly) {
Supplier<Operation> supplier = new RegistrationOperationSupplier(reg, nodeEngine.getClusterService());
invokeOnAllMembers(supplier);
}
return reg;
}
public boolean handleRegistration(Registration reg) {
if (nodeEngine.getThisAddress().equals(reg.getSubscriber())) {
return false;
}
EventServiceSegment segment = getSegment(reg.getServiceName(), true);
return segment.addRegistration(reg.getTopic(), reg);
}
@Override
public boolean deregisterListener(String serviceName, String topic, Object id) {
EventServiceSegment segment = getSegment(serviceName, false);
if (segment == null) {
return false;
}
Registration reg = segment.removeRegistration(topic, String.valueOf(id));
if (reg != null && !reg.isLocalOnly()) {
Supplier<Operation> supplier = new DeregistrationOperationSupplier(reg, nodeEngine.getClusterService());
invokeOnAllMembers(supplier);
}
return reg != null;
}
private void invokeOnAllMembers(Supplier<Operation> operationSupplier) {
ICompletableFuture<Object> future = invokeOnStableClusterSerial(nodeEngine, operationSupplier, MAX_RETRIES);
try {
future.get();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw rethrow(e);
} catch (ExecutionException e) {
throw rethrow(e);
}
}
@Override
public void deregisterAllListeners(String serviceName, String topic) {
EventServiceSegment segment = getSegment(serviceName, false);
if (segment != null) {
segment.removeRegistrations(topic);
}
}
public StripedExecutor getEventExecutor() {
return eventExecutor;
}
@Override
public EventRegistration[] getRegistrationsAsArray(String serviceName, String topic) {
EventServiceSegment segment = getSegment(serviceName, false);
if (segment == null) {
return EMPTY_REGISTRATIONS;
}
Collection<Registration> registrations = segment.getRegistrations(topic, false);
if (registrations == null || registrations.isEmpty()) {
return EMPTY_REGISTRATIONS;
} else {
return registrations.toArray(new Registration[0]);
}
}
/**
* {@inheritDoc}
* The returned collection is unmodifiable and the method always returns a non-null collection.
*
* @param serviceName service name
* @param topic topic name
* @return a non-null immutable collection of listener registrations
*/
@Override
public Collection<EventRegistration> getRegistrations(String serviceName, String topic) {
EventServiceSegment segment = getSegment(serviceName, false);
if (segment == null) {
return Collections.emptySet();
}
Collection<Registration> registrations = segment.getRegistrations(topic, false);
if (registrations == null || registrations.isEmpty()) {
return Collections.<EventRegistration>emptySet();
} else {
return Collections.<EventRegistration>unmodifiableCollection(registrations);
}
}
@Override
public boolean hasEventRegistration(String serviceName, String topic) {
EventServiceSegment segment = getSegment(serviceName, false);
if (segment == null) {
return false;
}
return segment.hasRegistration(topic);
}
@Override
public void publishEvent(String serviceName, String topic, Object event, int orderKey) {
Collection<EventRegistration> registrations = getRegistrations(serviceName, topic);
publishEvent(serviceName, registrations, event, orderKey);
}
@Override
public void publishEvent(String serviceName, EventRegistration registration, Object event, int orderKey) {
if (!(registration instanceof Registration)) {
throw new IllegalArgumentException();
}
if (isLocal(registration)) {
executeLocal(serviceName, event, registration, orderKey);
} else {
EventEnvelope eventEnvelope = new EventEnvelope(registration.getId(), serviceName, event);
sendEvent(registration.getSubscriber(), eventEnvelope, orderKey);
}
}
@Override
public void publishEvent(String serviceName, Collection<EventRegistration> registrations, Object event, int orderKey) {
Data eventData = null;
for (EventRegistration registration : registrations) {
if (!(registration instanceof Registration)) {
throw new IllegalArgumentException();
}
if (isLocal(registration)) {
executeLocal(serviceName, event, registration, orderKey);
continue;
}
if (eventData == null) {
eventData = serializationService.toData(event);
}
EventEnvelope eventEnvelope = new EventEnvelope(registration.getId(), serviceName, eventData);
sendEvent(registration.getSubscriber(), eventEnvelope, orderKey);
}
}
/**
* {@inheritDoc}
*
* @param serviceName service name
* @param registrations multiple event registrations
* @param event event object
* @param orderKey order key
* @throws IllegalArgumentException if any registration is not an instance of {@link Registration}
*/
@Override
public void publishRemoteEvent(String serviceName, Collection<EventRegistration> registrations, Object event, int orderKey) {
if (registrations.isEmpty()) {
return;
}
Data eventData = serializationService.toData(event);
for (EventRegistration registration : registrations) {
if (!(registration instanceof Registration)) {
throw new IllegalArgumentException();
}
if (isLocal(registration)) {
continue;
}
EventEnvelope eventEnvelope = new EventEnvelope(registration.getId(), serviceName, eventData);
sendEvent(registration.getSubscriber(), eventEnvelope, orderKey);
}
}
/**
* Processes the {@code event} on this node. If the event is not accepted to the executor
* in {@link #eventQueueTimeoutMs}, it will be rejected and not processed. This means that we increase the
* rejected count and log the failure.
*
* @param serviceName the name of the service responsible for this event
* @param event the event
* @param registration the listener registration responsible for this event
* @param orderKey the key defining the thread on which the event is processed. Events with the same key maintain order.
* @see LocalEventDispatcher
*/
private void executeLocal(String serviceName, Object event, EventRegistration registration, int orderKey) {
if (!nodeEngine.isRunning()) {
return;
}
Registration reg = (Registration) registration;
try {
if (reg.getListener() != null) {
eventExecutor.execute(new LocalEventDispatcher(this, serviceName, event, reg.getListener()
, orderKey, eventQueueTimeoutMs));
} else {
logger.warning("Something seems wrong! Listener instance is null! -> " + reg);
}
} catch (RejectedExecutionException e) {
rejectedCount.inc();
if (eventExecutor.isLive()) {
logFailure("EventQueue overloaded! %s failed to publish to %s:%s",
event, reg.getServiceName(), reg.getTopic());
}
}
}
/**
* Sends a remote event to the {@code subscriber}.
* Each event segment keeps track of the published event count. On every {@link #eventSyncFrequency} the event will
* be sent synchronously.
* A synchronous event means that we send the event as an {@link SendEventOperation} and in case of failure
* we increase the failure count and log the failure (see {@link EventProcessor})
* Otherwise, we send an asynchronous event. This means that we don't wait to see if the processing failed with an
* exception (see {@link RemoteEventProcessor})
*/
private void sendEvent(Address subscriber, EventEnvelope eventEnvelope, int orderKey) {
String serviceName = eventEnvelope.getServiceName();
EventServiceSegment segment = getSegment(serviceName, true);
boolean sync = segment.incrementPublish() % eventSyncFrequency == 0;
if (sync) {
SendEventOperation op = new SendEventOperation(eventEnvelope, orderKey);
Future f = nodeEngine.getOperationService()
.createInvocationBuilder(serviceName, op, subscriber)
.setTryCount(SEND_RETRY_COUNT).invoke();
try {
f.get(sendEventSyncTimeoutMillis, MILLISECONDS);
} catch (Exception e) {
syncDeliveryFailureCount.inc();
if (logger.isFinestEnabled()) {
logger.finest("Sync event delivery failed. Event: " + eventEnvelope, e);
}
}
} else {
Packet packet = new Packet(serializationService.toBytes(eventEnvelope), orderKey)
.setPacketType(Packet.Type.EVENT);
if (!nodeEngine.getNode().getConnectionManager().transmit(packet, subscriber)) {
if (nodeEngine.isRunning()) {
logFailure("Failed to send event packet to: %s, connection might not be alive.", subscriber);
}
}
}
}
/**
* Returns the {@link EventServiceSegment} for the {@code service}. If the segment is {@code null} and
* {@code forceCreate} is {@code true}, the segment is created and registered with the {@link MetricsRegistry}.
*
* @param service the service of the segment
* @param forceCreate whether the segment should be created in case there is no segment
* @return the segment for the service or null if there is no segment and {@code forceCreate} is {@code false}
*/
public EventServiceSegment getSegment(String service, boolean forceCreate) {
EventServiceSegment segment = segments.get(service);
if (segment == null && forceCreate) {
// we can't make use of the ConcurrentUtil; we need to register the segment to the metricsRegistry in case of creation
EventServiceSegment newSegment = new EventServiceSegment(service, nodeEngine.getService(service));
EventServiceSegment existingSegment = segments.putIfAbsent(service, newSegment);
if (existingSegment == null) {
segment = newSegment;
nodeEngine.getMetricsRegistry().scanAndRegister(newSegment, "event.[" + service + "]");
} else {
segment = existingSegment;
}
}
return segment;
}
/** Returns {@code true} if the subscriber of the registration is this node */
boolean isLocal(EventRegistration reg) {
return nodeEngine.getThisAddress().equals(reg.getSubscriber());
}
/**
* {@inheritDoc}
* If the execution is rejected, the rejection count is increased and a failure is logged.
* The event callback is not re-executed.
*
* @param callback the callback to execute on a random event thread
*/
@Override
public void executeEventCallback(Runnable callback) {
if (!nodeEngine.isRunning()) {
return;
}
try {
eventExecutor.execute(callback);
} catch (RejectedExecutionException e) {
rejectedCount.inc();
if (eventExecutor.isLive()) {
logFailure("EventQueue overloaded! Failed to execute event callback: %s", callback);
}
}
}
/**
* {@inheritDoc}
* Handles an asynchronous remote event with a {@link RemoteEventProcessor}. The
* processor may determine the thread which will handle the event. If the execution is rejected,
* the rejection count is increased and a failure is logged. The event processing is not retried.
*
* @param packet the response packet to handle
* @see #sendEvent(Address, EventEnvelope, int)
*/
@Override
public void accept(Packet packet) {
try {
eventExecutor.execute(new RemoteEventProcessor(this, packet));
} catch (RejectedExecutionException e) {
rejectedCount.inc();
if (eventExecutor.isLive()) {
Connection conn = packet.getConn();
String endpoint = conn.getEndPoint() != null ? conn.getEndPoint().toString() : conn.toString();
logFailure("EventQueue overloaded! Failed to process event packet sent from: %s", endpoint);
}
}
}
@Override
public Operation getPreJoinOperation() {
// pre-join operations are only sent by master member
return getOnJoinRegistrationOperation();
}
@Override
public Operation getPostJoinOperation() {
ClusterService clusterService = nodeEngine.getClusterService();
// Send post join registration operation only if this is the newly joining member.
// Master will send registrations with pre-join operation.
return clusterService.isMaster() ? null : getOnJoinRegistrationOperation();
}
/**
* Collects all non-local registrations and returns them as a {@link OnJoinRegistrationOperation}.
*
* @return the on join operation containing all non-local registrations
*/
private OnJoinRegistrationOperation getOnJoinRegistrationOperation() {
Collection<Registration> registrations = new LinkedList<Registration>();
for (EventServiceSegment segment : segments.values()) {
segment.collectRemoteRegistrations(registrations);
}
return registrations.isEmpty() ? null : new OnJoinRegistrationOperation(registrations);
}
public void shutdown() {
logger.finest("Stopping event executor...");
eventExecutor.shutdown();
for (EventServiceSegment segment : segments.values()) {
segment.clear();
}
segments.clear();
}
public void onMemberLeft(MemberImpl member) {
Address address = member.getAddress();
for (EventServiceSegment segment : segments.values()) {
segment.onMemberLeft(address);
}
}
/**
* Increase the failure count and log the failure. Every {@value #WARNING_LOG_FREQUENCY} log is logged
* with a higher log level.
*
* @param message the log message
* @param args the log message arguments
*/
private void logFailure(String message, Object... args) {
totalFailures.inc();
long total = totalFailures.get();
// it can happen that 2 threads at the same conclude that the level should be warn because of the
// non atomic int/get. This is an acceptable trade of since it is unlikely to happen and you only get
// additional warning under log as a side effect.
Level level = total % WARNING_LOG_FREQUENCY == 0
? Level.WARNING : Level.FINEST;
if (logger.isLoggable(level)) {
logger.log(level, String.format(message, args));
}
}
}
|
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.devicefarm.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/devicefarm-2015-06-23/UpdateTestGridProject" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UpdateTestGridProjectRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* ARN of the project to update.
* </p>
*/
private String projectArn;
/**
* <p>
* Human-readable name for the project.
* </p>
*/
private String name;
/**
* <p>
* Human-readable description for the project.
* </p>
*/
private String description;
/**
* <p>
* The VPC security groups and subnets that are attached to a project.
* </p>
*/
private TestGridVpcConfig vpcConfig;
/**
* <p>
* ARN of the project to update.
* </p>
*
* @param projectArn
* ARN of the project to update.
*/
public void setProjectArn(String projectArn) {
this.projectArn = projectArn;
}
/**
* <p>
* ARN of the project to update.
* </p>
*
* @return ARN of the project to update.
*/
public String getProjectArn() {
return this.projectArn;
}
/**
* <p>
* ARN of the project to update.
* </p>
*
* @param projectArn
* ARN of the project to update.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateTestGridProjectRequest withProjectArn(String projectArn) {
setProjectArn(projectArn);
return this;
}
/**
* <p>
* Human-readable name for the project.
* </p>
*
* @param name
* Human-readable name for the project.
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* Human-readable name for the project.
* </p>
*
* @return Human-readable name for the project.
*/
public String getName() {
return this.name;
}
/**
* <p>
* Human-readable name for the project.
* </p>
*
* @param name
* Human-readable name for the project.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateTestGridProjectRequest withName(String name) {
setName(name);
return this;
}
/**
* <p>
* Human-readable description for the project.
* </p>
*
* @param description
* Human-readable description for the project.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* <p>
* Human-readable description for the project.
* </p>
*
* @return Human-readable description for the project.
*/
public String getDescription() {
return this.description;
}
/**
* <p>
* Human-readable description for the project.
* </p>
*
* @param description
* Human-readable description for the project.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateTestGridProjectRequest withDescription(String description) {
setDescription(description);
return this;
}
/**
* <p>
* The VPC security groups and subnets that are attached to a project.
* </p>
*
* @param vpcConfig
* The VPC security groups and subnets that are attached to a project.
*/
public void setVpcConfig(TestGridVpcConfig vpcConfig) {
this.vpcConfig = vpcConfig;
}
/**
* <p>
* The VPC security groups and subnets that are attached to a project.
* </p>
*
* @return The VPC security groups and subnets that are attached to a project.
*/
public TestGridVpcConfig getVpcConfig() {
return this.vpcConfig;
}
/**
* <p>
* The VPC security groups and subnets that are attached to a project.
* </p>
*
* @param vpcConfig
* The VPC security groups and subnets that are attached to a project.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateTestGridProjectRequest withVpcConfig(TestGridVpcConfig vpcConfig) {
setVpcConfig(vpcConfig);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getProjectArn() != null)
sb.append("ProjectArn: ").append(getProjectArn()).append(",");
if (getName() != null)
sb.append("Name: ").append(getName()).append(",");
if (getDescription() != null)
sb.append("Description: ").append(getDescription()).append(",");
if (getVpcConfig() != null)
sb.append("VpcConfig: ").append(getVpcConfig());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof UpdateTestGridProjectRequest == false)
return false;
UpdateTestGridProjectRequest other = (UpdateTestGridProjectRequest) obj;
if (other.getProjectArn() == null ^ this.getProjectArn() == null)
return false;
if (other.getProjectArn() != null && other.getProjectArn().equals(this.getProjectArn()) == false)
return false;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null && other.getName().equals(this.getName()) == false)
return false;
if (other.getDescription() == null ^ this.getDescription() == null)
return false;
if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false)
return false;
if (other.getVpcConfig() == null ^ this.getVpcConfig() == null)
return false;
if (other.getVpcConfig() != null && other.getVpcConfig().equals(this.getVpcConfig()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getProjectArn() == null) ? 0 : getProjectArn().hashCode());
hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode());
hashCode = prime * hashCode + ((getVpcConfig() == null) ? 0 : getVpcConfig().hashCode());
return hashCode;
}
@Override
public UpdateTestGridProjectRequest clone() {
return (UpdateTestGridProjectRequest) super.clone();
}
}
|
|
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.skyframe;
import com.google.common.annotations.VisibleForTesting;
import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadSafe;
import com.google.devtools.build.lib.util.Preconditions;
import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor;
import com.google.devtools.build.lib.vfs.FileStatus;
import com.google.devtools.build.lib.vfs.FileStatusWithDigest;
import com.google.devtools.build.lib.vfs.FileStatusWithDigestAdapter;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.lib.vfs.RootedPath;
import com.google.devtools.build.lib.vfs.Symlinks;
import com.google.devtools.build.skyframe.SkyKey;
import com.google.devtools.build.skyframe.SkyValue;
import java.io.IOException;
import java.util.Arrays;
import java.util.Objects;
import javax.annotation.Nullable;
/**
* Encapsulates the filesystem operations needed to get state for a path. This is at least a
* 'lstat' to determine what type of file the path is.
* <ul>
* <li> For a non-existent file, the non existence is noted.
* <li> For a symlink, the symlink target is noted.
* <li> For a directory, the existence is noted.
* <li> For a file, the existence is noted, along with metadata about the file (e.g.
* file digest). See {@link RegularFileStateValue}.
* <ul>
*
* <p>This class is an implementation detail of {@link FileValue} and should not be used outside of
* {@link FileFunction}. Instead, {@link FileValue} should be used by consumers that care about
* files.
*
* <p>All subclasses must implement {@link #equals} and {@link #hashCode} properly.
*/
@VisibleForTesting
public abstract class FileStateValue implements SkyValue {
public static final DirectoryFileStateValue DIRECTORY_FILE_STATE_NODE =
new DirectoryFileStateValue();
public static final NonexistentFileStateValue NONEXISTENT_FILE_STATE_NODE =
new NonexistentFileStateValue();
/** Type of a path. */
public enum Type {
REGULAR_FILE,
SPECIAL_FILE,
DIRECTORY,
SYMLINK,
NONEXISTENT,
}
protected FileStateValue() {
}
public static FileStateValue create(RootedPath rootedPath,
@Nullable TimestampGranularityMonitor tsgm) throws InconsistentFilesystemException,
IOException {
Path path = rootedPath.asPath();
// Stat, but don't throw an exception for the common case of a nonexistent file. This still
// throws an IOException in case any other IO error is encountered.
FileStatus stat = path.statIfFound(Symlinks.NOFOLLOW);
if (stat == null) {
return NONEXISTENT_FILE_STATE_NODE;
}
return createWithStatNoFollow(rootedPath, FileStatusWithDigestAdapter.adapt(stat), tsgm);
}
static FileStateValue createWithStatNoFollow(RootedPath rootedPath,
FileStatusWithDigest statNoFollow, @Nullable TimestampGranularityMonitor tsgm)
throws InconsistentFilesystemException, IOException {
Path path = rootedPath.asPath();
if (statNoFollow.isFile()) {
return statNoFollow.isSpecialFile()
? SpecialFileStateValue.fromStat(statNoFollow, tsgm)
: RegularFileStateValue.fromPath(path, statNoFollow, tsgm);
} else if (statNoFollow.isDirectory()) {
return DIRECTORY_FILE_STATE_NODE;
} else if (statNoFollow.isSymbolicLink()) {
return new SymlinkFileStateValue(path.readSymbolicLinkUnchecked());
}
throw new InconsistentFilesystemException("according to stat, existing path " + path + " is "
+ "neither a file nor directory nor symlink.");
}
@VisibleForTesting
@ThreadSafe
public static SkyKey key(RootedPath rootedPath) {
return new SkyKey(SkyFunctions.FILE_STATE, rootedPath);
}
public abstract Type getType();
PathFragment getSymlinkTarget() {
throw new IllegalStateException();
}
long getSize() {
throw new IllegalStateException();
}
@Nullable
byte[] getDigest() {
throw new IllegalStateException();
}
@Override
public String toString() {
return prettyPrint();
}
abstract String prettyPrint();
/**
* Implementation of {@link FileStateValue} for regular files that exist.
*
* <p>A union of (digest, mtime). We use digests only if a fast digest lookup is available from
* the filesystem. If not, we fall back to mtime-based digests. This avoids the case where Blaze
* must read all files involved in the build in order to check for modifications in the case
* where fast digest lookups are not available.
*/
@ThreadSafe
public static final class RegularFileStateValue extends FileStateValue {
private final long size;
// Only needed for empty-file equality-checking. Otherwise is always -1.
// TODO(bazel-team): Consider getting rid of this special case for empty files.
private final long mtime;
@Nullable private final byte[] digest;
@Nullable private final FileContentsProxy contentsProxy;
public RegularFileStateValue(long size, long mtime, byte[] digest,
FileContentsProxy contentsProxy) {
Preconditions.checkState((digest == null) != (contentsProxy == null));
this.size = size;
// mtime is forced to be -1 so that we do not accidentally depend on it for non-empty files,
// which should only be compared using digests.
this.mtime = size == 0 ? mtime : -1;
this.digest = digest;
this.contentsProxy = contentsProxy;
}
/**
* Create a FileFileStateValue instance corresponding to the given existing file.
* @param stat must be of type "File". (Not a symlink).
*/
private static RegularFileStateValue fromPath(Path path, FileStatusWithDigest stat,
@Nullable TimestampGranularityMonitor tsgm)
throws InconsistentFilesystemException {
Preconditions.checkState(stat.isFile(), path);
try {
byte[] digest = stat.getDigest();
if (digest == null) {
digest = path.getFastDigest();
}
if (digest == null) {
long mtime = stat.getLastModifiedTime();
// Note that TimestampGranularityMonitor#notifyDependenceOnFileTime is a thread-safe
// method.
if (tsgm != null) {
tsgm.notifyDependenceOnFileTime(mtime);
}
return new RegularFileStateValue(stat.getSize(), stat.getLastModifiedTime(), null,
FileContentsProxy.create(mtime, stat.getNodeId()));
} else {
// We are careful here to avoid putting the value ID into FileMetadata if we already have
// a digest. Arbitrary filesystems may do weird things with the value ID; a digest is more
// robust.
return new RegularFileStateValue(stat.getSize(), stat.getLastModifiedTime(), digest, null);
}
} catch (IOException e) {
String errorMessage = e.getMessage() != null
? "error '" + e.getMessage() + "'" : "an error";
throw new InconsistentFilesystemException("'stat' said " + path + " is a file but then we "
+ "later encountered " + errorMessage + " which indicates that " + path + " is no "
+ "longer a file. Did you delete it during the build?");
}
}
@Override
public Type getType() {
return Type.REGULAR_FILE;
}
@Override
public long getSize() {
return size;
}
public long getMtime() {
return mtime;
}
@Override
@Nullable
public byte[] getDigest() {
return digest;
}
public FileContentsProxy getContentsProxy() {
return contentsProxy;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof RegularFileStateValue) {
RegularFileStateValue other = (RegularFileStateValue) obj;
return size == other.size && mtime == other.mtime && Arrays.equals(digest, other.digest)
&& Objects.equals(contentsProxy, other.contentsProxy);
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(size, mtime, Arrays.hashCode(digest), contentsProxy);
}
@Override
public String prettyPrint() {
String contents = digest != null
? String.format("digest of ", Arrays.toString(digest))
: contentsProxy.prettyPrint();
String extra = mtime != -1 ? String.format(" and mtime of %d", mtime) : "";
return String.format("regular file with size of %d and %s%s", size, contents, extra);
}
}
/** Implementation of {@link FileStateValue} for special files that exist. */
public static final class SpecialFileStateValue extends FileStateValue {
private final FileContentsProxy contentsProxy;
public SpecialFileStateValue(FileContentsProxy contentsProxy) {
this.contentsProxy = contentsProxy;
}
static SpecialFileStateValue fromStat(FileStatusWithDigest stat,
@Nullable TimestampGranularityMonitor tsgm) throws IOException {
long mtime = stat.getLastModifiedTime();
// Note that TimestampGranularityMonitor#notifyDependenceOnFileTime is a thread-safe
// method.
if (tsgm != null) {
tsgm.notifyDependenceOnFileTime(mtime);
}
return new SpecialFileStateValue(FileContentsProxy.create(mtime, stat.getNodeId()));
}
@Override
public Type getType() {
return Type.SPECIAL_FILE;
}
@Override
long getSize() {
return 0;
}
@Override
@Nullable
byte[] getDigest() {
return null;
}
public FileContentsProxy getContentsProxy() {
return contentsProxy;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof SpecialFileStateValue) {
SpecialFileStateValue other = (SpecialFileStateValue) obj;
return Objects.equals(contentsProxy, other.contentsProxy);
}
return false;
}
@Override
public int hashCode() {
return contentsProxy.hashCode();
}
@Override
public String prettyPrint() {
return String.format("special file with %s", contentsProxy.prettyPrint());
}
}
/** Implementation of {@link FileStateValue} for directories that exist. */
public static final class DirectoryFileStateValue extends FileStateValue {
private DirectoryFileStateValue() {
}
@Override
public Type getType() {
return Type.DIRECTORY;
}
@Override
public String prettyPrint() {
return "directory";
}
// This object is normally a singleton, but deserialization produces copies.
@Override
public boolean equals(Object obj) {
return obj instanceof DirectoryFileStateValue;
}
@Override
public int hashCode() {
return 7654321;
}
}
/** Implementation of {@link FileStateValue} for symlinks. */
public static final class SymlinkFileStateValue extends FileStateValue {
private final PathFragment symlinkTarget;
public SymlinkFileStateValue(PathFragment symlinkTarget) {
this.symlinkTarget = symlinkTarget;
}
@Override
public Type getType() {
return Type.SYMLINK;
}
@Override
public PathFragment getSymlinkTarget() {
return symlinkTarget;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof SymlinkFileStateValue)) {
return false;
}
SymlinkFileStateValue other = (SymlinkFileStateValue) obj;
return symlinkTarget.equals(other.symlinkTarget);
}
@Override
public int hashCode() {
return symlinkTarget.hashCode();
}
@Override
public String prettyPrint() {
return "symlink to " + symlinkTarget;
}
}
/** Implementation of {@link FileStateValue} for nonexistent files. */
public static final class NonexistentFileStateValue extends FileStateValue {
private NonexistentFileStateValue() {
}
@Override
public Type getType() {
return Type.NONEXISTENT;
}
@Override
public String prettyPrint() {
return "nonexistent path";
}
// This object is normally a singleton, but deserialization produces copies.
@Override
public boolean equals(Object obj) {
return obj instanceof NonexistentFileStateValue;
}
@Override
public int hashCode() {
return 8765432;
}
}
}
|
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v8/common/asset_types.proto
package com.google.ads.googleads.v8.common;
public interface LeadFormAssetOrBuilder extends
// @@protoc_insertion_point(interface_extends:google.ads.googleads.v8.common.LeadFormAsset)
com.google.protobuf.MessageOrBuilder {
/**
* <pre>
* Required. The name of the business being advertised.
* </pre>
*
* <code>string business_name = 10 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The businessName.
*/
java.lang.String getBusinessName();
/**
* <pre>
* Required. The name of the business being advertised.
* </pre>
*
* <code>string business_name = 10 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for businessName.
*/
com.google.protobuf.ByteString
getBusinessNameBytes();
/**
* <pre>
* Required. Pre-defined display text that encourages user to expand the form.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.LeadFormCallToActionTypeEnum.LeadFormCallToActionType call_to_action_type = 17 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The enum numeric value on the wire for callToActionType.
*/
int getCallToActionTypeValue();
/**
* <pre>
* Required. Pre-defined display text that encourages user to expand the form.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.LeadFormCallToActionTypeEnum.LeadFormCallToActionType call_to_action_type = 17 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The callToActionType.
*/
com.google.ads.googleads.v8.enums.LeadFormCallToActionTypeEnum.LeadFormCallToActionType getCallToActionType();
/**
* <pre>
* Required. Text giving a clear value proposition of what users expect once they expand
* the form.
* </pre>
*
* <code>string call_to_action_description = 18 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The callToActionDescription.
*/
java.lang.String getCallToActionDescription();
/**
* <pre>
* Required. Text giving a clear value proposition of what users expect once they expand
* the form.
* </pre>
*
* <code>string call_to_action_description = 18 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for callToActionDescription.
*/
com.google.protobuf.ByteString
getCallToActionDescriptionBytes();
/**
* <pre>
* Required. Headline of the expanded form to describe what the form is asking for or
* facilitating.
* </pre>
*
* <code>string headline = 12 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The headline.
*/
java.lang.String getHeadline();
/**
* <pre>
* Required. Headline of the expanded form to describe what the form is asking for or
* facilitating.
* </pre>
*
* <code>string headline = 12 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for headline.
*/
com.google.protobuf.ByteString
getHeadlineBytes();
/**
* <pre>
* Required. Detailed description of the expanded form to describe what the form is
* asking for or facilitating.
* </pre>
*
* <code>string description = 13 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The description.
*/
java.lang.String getDescription();
/**
* <pre>
* Required. Detailed description of the expanded form to describe what the form is
* asking for or facilitating.
* </pre>
*
* <code>string description = 13 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for description.
*/
com.google.protobuf.ByteString
getDescriptionBytes();
/**
* <pre>
* Required. Link to a page describing the policy on how the collected data is handled
* by the advertiser/business.
* </pre>
*
* <code>string privacy_policy_url = 14 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The privacyPolicyUrl.
*/
java.lang.String getPrivacyPolicyUrl();
/**
* <pre>
* Required. Link to a page describing the policy on how the collected data is handled
* by the advertiser/business.
* </pre>
*
* <code>string privacy_policy_url = 14 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for privacyPolicyUrl.
*/
com.google.protobuf.ByteString
getPrivacyPolicyUrlBytes();
/**
* <pre>
* Headline of text shown after form submission that describes how the
* advertiser will follow up with the user.
* </pre>
*
* <code>optional string post_submit_headline = 15;</code>
* @return Whether the postSubmitHeadline field is set.
*/
boolean hasPostSubmitHeadline();
/**
* <pre>
* Headline of text shown after form submission that describes how the
* advertiser will follow up with the user.
* </pre>
*
* <code>optional string post_submit_headline = 15;</code>
* @return The postSubmitHeadline.
*/
java.lang.String getPostSubmitHeadline();
/**
* <pre>
* Headline of text shown after form submission that describes how the
* advertiser will follow up with the user.
* </pre>
*
* <code>optional string post_submit_headline = 15;</code>
* @return The bytes for postSubmitHeadline.
*/
com.google.protobuf.ByteString
getPostSubmitHeadlineBytes();
/**
* <pre>
* Detailed description shown after form submission that describes how the
* advertiser will follow up with the user.
* </pre>
*
* <code>optional string post_submit_description = 16;</code>
* @return Whether the postSubmitDescription field is set.
*/
boolean hasPostSubmitDescription();
/**
* <pre>
* Detailed description shown after form submission that describes how the
* advertiser will follow up with the user.
* </pre>
*
* <code>optional string post_submit_description = 16;</code>
* @return The postSubmitDescription.
*/
java.lang.String getPostSubmitDescription();
/**
* <pre>
* Detailed description shown after form submission that describes how the
* advertiser will follow up with the user.
* </pre>
*
* <code>optional string post_submit_description = 16;</code>
* @return The bytes for postSubmitDescription.
*/
com.google.protobuf.ByteString
getPostSubmitDescriptionBytes();
/**
* <pre>
* Ordered list of input fields.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.common.LeadFormField fields = 8;</code>
*/
java.util.List<com.google.ads.googleads.v8.common.LeadFormField>
getFieldsList();
/**
* <pre>
* Ordered list of input fields.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.common.LeadFormField fields = 8;</code>
*/
com.google.ads.googleads.v8.common.LeadFormField getFields(int index);
/**
* <pre>
* Ordered list of input fields.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.common.LeadFormField fields = 8;</code>
*/
int getFieldsCount();
/**
* <pre>
* Ordered list of input fields.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.common.LeadFormField fields = 8;</code>
*/
java.util.List<? extends com.google.ads.googleads.v8.common.LeadFormFieldOrBuilder>
getFieldsOrBuilderList();
/**
* <pre>
* Ordered list of input fields.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.common.LeadFormField fields = 8;</code>
*/
com.google.ads.googleads.v8.common.LeadFormFieldOrBuilder getFieldsOrBuilder(
int index);
/**
* <pre>
* Configured methods for collected lead data to be delivered to advertiser.
* Only one method typed as WebhookDelivery can be configured.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.common.LeadFormDeliveryMethod delivery_methods = 9;</code>
*/
java.util.List<com.google.ads.googleads.v8.common.LeadFormDeliveryMethod>
getDeliveryMethodsList();
/**
* <pre>
* Configured methods for collected lead data to be delivered to advertiser.
* Only one method typed as WebhookDelivery can be configured.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.common.LeadFormDeliveryMethod delivery_methods = 9;</code>
*/
com.google.ads.googleads.v8.common.LeadFormDeliveryMethod getDeliveryMethods(int index);
/**
* <pre>
* Configured methods for collected lead data to be delivered to advertiser.
* Only one method typed as WebhookDelivery can be configured.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.common.LeadFormDeliveryMethod delivery_methods = 9;</code>
*/
int getDeliveryMethodsCount();
/**
* <pre>
* Configured methods for collected lead data to be delivered to advertiser.
* Only one method typed as WebhookDelivery can be configured.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.common.LeadFormDeliveryMethod delivery_methods = 9;</code>
*/
java.util.List<? extends com.google.ads.googleads.v8.common.LeadFormDeliveryMethodOrBuilder>
getDeliveryMethodsOrBuilderList();
/**
* <pre>
* Configured methods for collected lead data to be delivered to advertiser.
* Only one method typed as WebhookDelivery can be configured.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.common.LeadFormDeliveryMethod delivery_methods = 9;</code>
*/
com.google.ads.googleads.v8.common.LeadFormDeliveryMethodOrBuilder getDeliveryMethodsOrBuilder(
int index);
/**
* <pre>
* Pre-defined display text that encourages user action after the form is
* submitted.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.LeadFormPostSubmitCallToActionTypeEnum.LeadFormPostSubmitCallToActionType post_submit_call_to_action_type = 19;</code>
* @return The enum numeric value on the wire for postSubmitCallToActionType.
*/
int getPostSubmitCallToActionTypeValue();
/**
* <pre>
* Pre-defined display text that encourages user action after the form is
* submitted.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.LeadFormPostSubmitCallToActionTypeEnum.LeadFormPostSubmitCallToActionType post_submit_call_to_action_type = 19;</code>
* @return The postSubmitCallToActionType.
*/
com.google.ads.googleads.v8.enums.LeadFormPostSubmitCallToActionTypeEnum.LeadFormPostSubmitCallToActionType getPostSubmitCallToActionType();
/**
* <pre>
* Asset resource name of the background image. The minimum size is 600x314
* and the aspect ratio must be 1.91:1 (+-1%).
* </pre>
*
* <code>optional string background_image_asset = 20;</code>
* @return Whether the backgroundImageAsset field is set.
*/
boolean hasBackgroundImageAsset();
/**
* <pre>
* Asset resource name of the background image. The minimum size is 600x314
* and the aspect ratio must be 1.91:1 (+-1%).
* </pre>
*
* <code>optional string background_image_asset = 20;</code>
* @return The backgroundImageAsset.
*/
java.lang.String getBackgroundImageAsset();
/**
* <pre>
* Asset resource name of the background image. The minimum size is 600x314
* and the aspect ratio must be 1.91:1 (+-1%).
* </pre>
*
* <code>optional string background_image_asset = 20;</code>
* @return The bytes for backgroundImageAsset.
*/
com.google.protobuf.ByteString
getBackgroundImageAssetBytes();
/**
* <pre>
* Desired intent for the lead form, e.g. more volume or more qualified.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.LeadFormDesiredIntentEnum.LeadFormDesiredIntent desired_intent = 21;</code>
* @return The enum numeric value on the wire for desiredIntent.
*/
int getDesiredIntentValue();
/**
* <pre>
* Desired intent for the lead form, e.g. more volume or more qualified.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.LeadFormDesiredIntentEnum.LeadFormDesiredIntent desired_intent = 21;</code>
* @return The desiredIntent.
*/
com.google.ads.googleads.v8.enums.LeadFormDesiredIntentEnum.LeadFormDesiredIntent getDesiredIntent();
/**
* <pre>
* Custom disclosure shown along with Google disclaimer on the lead form.
* Accessible to allowed customers only.
* </pre>
*
* <code>optional string custom_disclosure = 22;</code>
* @return Whether the customDisclosure field is set.
*/
boolean hasCustomDisclosure();
/**
* <pre>
* Custom disclosure shown along with Google disclaimer on the lead form.
* Accessible to allowed customers only.
* </pre>
*
* <code>optional string custom_disclosure = 22;</code>
* @return The customDisclosure.
*/
java.lang.String getCustomDisclosure();
/**
* <pre>
* Custom disclosure shown along with Google disclaimer on the lead form.
* Accessible to allowed customers only.
* </pre>
*
* <code>optional string custom_disclosure = 22;</code>
* @return The bytes for customDisclosure.
*/
com.google.protobuf.ByteString
getCustomDisclosureBytes();
}
|
|
/**********************************************************************************
* $URL$
* $Id$
***********************************************************************************
*
* Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008, 2009 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.api.app.messageforums;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Map;
public interface MessageForumsMessageManager {
public Attachment createAttachment();
public Message createMessage(String typeId);
public PrivateMessage createPrivateMessage();
public Message createDiscussionMessage();
public Message createOpenMessage();
public void saveMessage(Message message);
public void saveMessage(Message message, boolean logEvent);
/**
*
* @param message
* @param logEvent
* @param ignoreLockedTopicForum set true if you want to allow the message
* to be updated even if the topic or forum is locked (ie marking as read or
* commenting on a moderated message)
*/
public void saveMessage(Message message, boolean logEvent, boolean ignoreLockedTopicForum);
public void deleteMessage(Message message);
public Message getMessageById(Long messageId);
public Message getMessageByIdWithAttachments(Long messageId);
public void markMessageApproval(Long messageId, boolean approved);
public void markMessageReadForUser(Long topicId, Long messageId, boolean read);
public void markMessageReadForUser(Long topicId, Long messageId, boolean read, String userId);
public void markMessageReadForUser(Long topicId, Long messageId, boolean read, String userId, String context, String toolId);
public boolean isMessageReadForUser(Long topicId, Long messageId);
public UnreadStatus findUnreadStatus(Long topicId, Long messageId);
public UnreadStatus findUnreadStatusByUserId(Long topicId, Long messageId, String userId);
public void deleteUnreadStatus(Long topicId, Long messageId);
public int findMessageCountByTopicId(Long topicId);
public List<Object[]> findMessageCountByForumId(Long forumId);
public List<Object[]> findMessageCountTotal();
public int findViewableMessageCountByTopicIdByUserId(Long topicId, String userId);
public int findViewableMessageCountByTopicId(Long topicId);
public int findAuhtoredMessageCountByTopicIdByUserId(final Long topicId, final String userId);
public int findAuthoredMessageCountForStudent(final String userId);
/**
* @param studentId The id of the student whose authored messages we are searching for.
* @return A list of all of the messages that the student has authored and are not flagged
* as DRAFT or DELETED.
*/
public List<Message> findAuthoredMessagesForStudent(String studentId);
public List<UserStatistics> findAuthoredStatsForStudent(String studentId);
public List<Message> findAuthoredMessagesForStudentByTopicId(String studentId, final Long topicId);
public List<UserStatistics> findAuthoredStatsForStudentByTopicId(String studentId, final Long topicId);
public List<Message> findAuthoredMessagesForStudentByForumId(String studentId, final Long forumId);
public List<UserStatistics> findAuthoredStatsForStudentByForumId(String studentId, final Long forumId);
/**
* @return Each item in the list will be an array consisting of two elements. The element
* at index 0 will correspond to the student's id and the element at index 1 will correspond
* to the number of messages that student has authored in the site.
*/
public List<Object[]> findAuthoredMessageCountForAllStudents();
public List<Object[]> findAuthoredMessageCountForAllStudentsByTopicId(final Long topicId);
public List<Object[]> findAuthoredMessageCountForAllStudentsByForumId(final Long forumId);
public int findUnreadMessageCountByTopicIdByUserId(Long topicId, String userId);
public int findUnreadMessageCountByTopicId(Long topicId);
public int findUnreadViewableMessageCountByTopicIdByUserId(Long topicId, String userId);
public int findUnreadViewableMessageCountByTopicId(Long topicId);
public int findReadMessageCountByTopicIdByUserId(Long topicId, String userId);
public int findReadMessageCountByTopicId(Long topicId);
public int findReadMessageCountForStudent(final String userId);
/**
* @return Each item in the list will be an array consisting of two elements. The element
* at index 0 will correspond to the student's id and the element at index 1 will correspond
* to the number of messages that student has read in the site.
*/
public List<Object[]> findReadMessageCountForAllStudents();
public List<Object[]> findReadMessageCountForAllStudentsByTopicId(final Long topicId);
public List<Object[]> findReadMessageCountForAllStudentsByForumId(final Long forumId);
/**
* @param studentId The id of the student whose read messages we are searching for.
* @return A list of all of the messages that the student has read and are not flagged
* as DRAFT or DELETED.
*/
public List<UserStatistics> findReadStatsForStudent(String studentId);
public List<UserStatistics> findReadStatsForStudentByTopicId(String studentId, final Long topicId);
public List<UserStatistics> findReadStatsForStudentByForumId(String studentId, final Long forumId);
public int findReadViewableMessageCountByTopicId(Long topicId);
public List findDiscussionForumMessageCountsForAllSitesByPermissionLevelId(final List siteList, final List roleList);
public List findDiscussionForumMessageCountsForAllSitesByPermissionLevelName(final List siteList, final List roleList);
public List findDiscussionForumReadMessageCountsForAllSitesByPermissionLevelId(final List siteList, final List roleList);
public List findDiscussionForumReadMessageCountsForAllSitesByPermissionLevelName(final List siteList, final List roleList);
public List findDiscussionForumMessageCountsForGroupedSitesByTopic(final List siteList, final List roleList); // added
public List findDiscussionForumReadMessageCountsForGroupedSitesByTopic(final List siteList, final List roleList); // added
public List<Object []> findDiscussionForumMessageCountsForTopicsWithMissingPermsForAllSites(final List<String> siteList);
public List<Object []> findDiscussionForumReadMessageCountsForTopicsWithMissingPermsForAllSites(final List<String> siteList);
/**
* @param topicIds The list of topic ids for which we want to gather the message counts.
*
* @return A list of arrays where each array will contain exactly two values. Index 0, a Long,
* will be the topicId and index 1, an Integer, will be the total number of messages
* under that topic (excluding any messages flagged as DRAFT or DELETED).
*/
public List<Object[]> findMessageCountsForMainPage(final Collection<Long> topicIds);
/**
* @param topicIds The list of topic ids for which we want to gather the message counts.
*
* @return A list of arrays where each array will contain exactly two values. Index 0, a Long,
* will be the topicId and index 1, an Integer, will be the total number of messages
* under that topic that the current user has read (excluding any messages flagged as DRAFT
* or DELETED).
*/
public List<Object[]> findReadMessageCountsForMainPage(final Collection<Long> topicIds);
public List findMessagesByTopicId(Long topicId);
public List findUndeletedMessagesByTopicId(Long topicId);
public Attachment getAttachmentById(Long attachmentId);
public void getChildMsgs(final Long messageId, List returnList);
public void deleteMsgWithChild(final Long messageId);
public List getFirstLevelChildMsgs(final Long messageId);
public List sortMessageBySubject(Topic topic, boolean asc);
public List sortMessageByAuthor(Topic topic, boolean asc);
public List sortMessageByDate(Topic topic, boolean asc);
public List sortMessageByDate(List list, boolean asc);
public List getAllRelatedMsgs(final Long messageId);
public List findPvtMsgsBySearchText(final String typeUuid, final String searchText,final Date searchFromDate, final Date searchToDate,
final boolean searchByText, final boolean searchByAuthor,final boolean searchByBody, final boolean searchByLabel,final boolean searchByDate);
/**
* Get a fully qualified URl
* @param id
* @return
*/
public String getAttachmentUrl(String id);
/**
* Get a relative URL escaped suitable for JSF pages
* @param id
* @return
*/
public String getAttachmentRelativeUrl(String id);
public boolean currentToolMatch(String toolId);
public boolean isToolInSite(String siteId, String toolId);
public Map<Long, Boolean> getReadStatusForMessagesWithId(List msgIds, String userId);
/**
* Returns list of all messages in site with Pending approval for which
* at least one of the given memberships has moderate perm
* @return
*/
public List getPendingMsgsInSiteByMembership(final List membershipList);
/**
* Retrieves all pending messages in a given topic
* @param topicId
* @return
*/
public List getPendingMsgsInTopic(final Long topicId);
/**
* Get all forum messages in a site
* @param siteId
* @return a list of messages
*/
public List<Message> getAllMessagesInSite(String siteId);
public void saveMessageMoveHistory(Long msgid, Long desttopicId,Long sourceTopicId, boolean checkreminder);
public List findMovedMessagesByTopicId(Long id);
/**
* Returns a given number of recent threads(which are not deleted and not in draft stage)
* for a given list of topicIds
* @param topicIds
* @param numberOfMessages
* @return
*/
public List getRecentDiscussionForumThreadsByTopicIds(List<Long> topicIds, int numberOfMessages);
}
|
|
/**
* SIX OVAL - https://nakamura5akihito.github.io/
* Copyright (C) 2010 Akihito Nakamura
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.opensec.six.oval.model.definitions;
import io.opensec.six.oval.model.ElementRef;
import io.opensec.six.oval.model.ElementType;
import io.opensec.six.oval.model.OvalObject;
import io.opensec.six.oval.model.common.CheckEnumeration;
import io.opensec.six.oval.model.common.DatatypeEnumeration;
import io.opensec.six.oval.model.common.OperationEnumeration;
/**
* The EntityAttributeGroup is a collection of attributes that are common to all entities.
*
* @author Akihito Nakamura, AIST
* @see <a href="http://oval.mitre.org/language/">OVAL Language</a>
*/
public abstract class EntityAttributeGroup
implements ElementRef, OvalObject
{
public static final DatatypeEnumeration DEFAULT_DATATYPE =
DatatypeEnumeration.STRING;
private DatatypeEnumeration datatype;
//{optional, default="string"}
public static final OperationEnumeration DEFAULT_OPERATION =
OperationEnumeration.EQUALS;
private OperationEnumeration operation;
//{optional, default="equals"}
public static final Boolean DEFAULT_MASK = Boolean.FALSE;
private Boolean mask;
//{optional, default="false"}
private String var_ref;
//{optional, type="oval:VariableIDPattern"}
// public static final CheckEnumeration DEFAULT_VAR_CHECK =
// CheckEnumeration.ALL;
private CheckEnumeration var_check;
//{optional}: 5.10 (30358)
//{optional, default="all"}: 5.9
/**
* Constructor.
*/
public EntityAttributeGroup()
{
}
// public EntityAttributeGroup(
// final OperationEnumeration operation
// )
// {
// this( DEFAULT_DATATYPE, operation );
// }
//
//
// public EntityAttributeGroup(
// final DatatypeEnumeration datatype,
// final OperationEnumeration operation
// )
// {
// setDatatype( datatype );
// setOperation( operation );
// }
public EntityAttributeGroup(
final DatatypeEnumeration datatype,
final OperationEnumeration operation,
final Boolean mask,
final String var_ref,
final CheckEnumeration var_check
)
{
setDatatype( datatype );
setOperation( operation );
setMask( mask );
setVarRef( var_ref );
setVarCheck( var_check );
}
public EntityAttributeGroup(
final String datatype,
final String operation,
final Boolean mask,
final String var_ref,
final String var_check
)
{
this(
(datatype == null ? null : DatatypeEnumeration.fromValue( datatype )),
(operation == null ? null : OperationEnumeration.fromValue( operation )),
mask,
var_ref,
(var_check == null ? null : CheckEnumeration.fromValue( var_check ))
);
}
/**
*/
public void setDatatype(
final DatatypeEnumeration datatype
)
{
this.datatype = datatype;
}
public DatatypeEnumeration getDatatype()
{
return datatype;
}
public static DatatypeEnumeration datatype(
final EntityAttributeGroup obj
)
{
if (obj == null) {
throw new IllegalArgumentException( "null EntityAttributeGroup" );
}
DatatypeEnumeration datatype = obj.getDatatype();
return (datatype == null ? DEFAULT_DATATYPE : datatype);
}
/**
*/
public void setOperation(
final OperationEnumeration operation
)
{
this.operation = operation;
}
public OperationEnumeration getOperation()
{
return operation;
}
public static OperationEnumeration operation(
final EntityAttributeGroup obj
)
{
if (obj == null) {
throw new IllegalArgumentException( "null EntityAttributeGroup" );
}
OperationEnumeration operation = obj.getOperation();
return (operation == null ? DEFAULT_OPERATION : operation);
}
/**
*/
public void setMask(
final Boolean mask
)
{
this.mask = mask;
}
public Boolean getMask()
{
return mask;
}
public static Boolean mask(
final EntityAttributeGroup obj
)
{
if (obj == null) {
throw new IllegalArgumentException( "null EntityAttributeGroup" );
}
Boolean mask = obj.getMask();
return (mask == null ? DEFAULT_MASK : mask);
}
/**
*/
public void setVarRef(
final String var_ref
)
{
this.var_ref= var_ref;
}
public String getVarRef()
{
return var_ref;
}
/**
*/
public void setVarCheck(
final CheckEnumeration var_check
)
{
this.var_check = var_check;
}
public CheckEnumeration getVarCheck()
{
return var_check;
}
// public static final CheckEnumeration varCheck(
// final EntityAttributeGroup eag
// )
// {
// if (eag == null) {
// throw new IllegalArgumentException( "null EntityAttributeGroup" );
// }
//
// CheckEnumeration var_check = eag.getVarCheck();
// if (var_check == null) {
// var_check = DEFAULT_VAR_CHECK;
// }
//
// return var_check;
// }
//*********************************************************************
// ElementRef
//*********************************************************************
public String ovalGetRefId()
{
return getVarRef();
}
public ElementType ovalGetRefType()
{
return ElementType.VARIABLE;
}
//**************************************************************
// java.lang.Object
//**************************************************************
@Override
public int hashCode()
{
final int prime = 37;
int result = 17;
result = prime * result + datatype( this ).hashCode();
result = prime * result + operation( this ).hashCode();
result = prime * result + mask( this ).hashCode();
String var_ref = getVarRef();
result = prime * result + ((var_ref == null) ? 0 : var_ref.hashCode());
CheckEnumeration var_check = getVarCheck();
result = prime * result + ((var_check == null) ? 0 : var_check.hashCode());
return result;
}
@Override
public boolean equals(
final Object obj
)
{
if (this == obj) {
return true;
}
if (!(obj instanceof EntityAttributeGroup)) {
return false;
}
EntityAttributeGroup other = (EntityAttributeGroup)obj;
if (datatype( this ) == datatype( other )) {
if (operation( this ) == operation( other )) {
if (this.getVarCheck() == other.getVarCheck()) {
final String other_var_ref = other.getVarRef();
final String this_var_ref = this.getVarRef();
if (this_var_ref == other_var_ref
|| (this_var_ref != null
&& this_var_ref.equals( other_var_ref ))) {
if (mask( this ) == mask( other )) {
return true;
}
}
}
}
}
return false;
}
@Override
public String toString()
{
return "datatype=" + getDatatype()
+ ", operation=" + getOperation()
+ ", mask=" + getMask()
+ ", var_ref=" + getVarRef()
+ ", var_check=" + getVarCheck()
;
}
}
// EntityAttributeGroup
|
|
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.android.inputmethod.keyboard.internal;
import android.content.res.Resources;
import android.text.TextUtils;
import android.util.Log;
import com.android.inputmethod.keyboard.Keyboard;
import com.android.s16.inputmethod.R;
import java.util.ArrayList;
/**
* String parser of moreKeys attribute of Key.
* The string is comma separated texts each of which represents one "more key".
* Each "more key" specification is one of the following:
* - A single letter (Letter)
* - Label optionally followed by keyOutputText or code (keyLabel|keyOutputText).
* - Icon followed by keyOutputText or code (@icon/icon_number|@integer/key_code)
* Special character, comma ',' backslash '\', and bar '|' can be escaped by '\'
* character.
* Note that the character '@' and '\' are also parsed by XML parser and CSV parser as well.
* See {@link KeyboardIconsSet} about icon_number.
*/
public class MoreKeySpecParser {
private static final String TAG = MoreKeySpecParser.class.getSimpleName();
private static final char ESCAPE = '\\';
private static final String LABEL_END = "|";
private static final String PREFIX_AT = "@";
private static final String PREFIX_ICON = PREFIX_AT + "icon/";
private static final String PREFIX_CODE = PREFIX_AT + "integer/";
private MoreKeySpecParser() {
// Intentional empty constructor for utility class.
}
private static boolean hasIcon(String moreKeySpec) {
if (moreKeySpec.startsWith(PREFIX_ICON)) {
final int end = indexOfLabelEnd(moreKeySpec, 0);
if (end > 0)
return true;
throw new MoreKeySpecParserError("outputText or code not specified: " + moreKeySpec);
}
return false;
}
private static boolean hasCode(String moreKeySpec) {
final int end = indexOfLabelEnd(moreKeySpec, 0);
if (end > 0 && end + 1 < moreKeySpec.length()
&& moreKeySpec.substring(end + 1).startsWith(PREFIX_CODE)) {
return true;
}
return false;
}
private static String parseEscape(String text) {
if (text.indexOf(ESCAPE) < 0)
return text;
final int length = text.length();
final StringBuilder sb = new StringBuilder();
for (int pos = 0; pos < length; pos++) {
final char c = text.charAt(pos);
if (c == ESCAPE && pos + 1 < length) {
sb.append(text.charAt(++pos));
} else {
sb.append(c);
}
}
return sb.toString();
}
private static int indexOfLabelEnd(String moreKeySpec, int start) {
if (moreKeySpec.indexOf(ESCAPE, start) < 0) {
final int end = moreKeySpec.indexOf(LABEL_END, start);
if (end == 0)
throw new MoreKeySpecParserError(LABEL_END + " at " + start + ": " + moreKeySpec);
return end;
}
final int length = moreKeySpec.length();
for (int pos = start; pos < length; pos++) {
final char c = moreKeySpec.charAt(pos);
if (c == ESCAPE && pos + 1 < length) {
pos++;
} else if (moreKeySpec.startsWith(LABEL_END, pos)) {
return pos;
}
}
return -1;
}
public static String getLabel(String moreKeySpec) {
if (hasIcon(moreKeySpec))
return null;
final int end = indexOfLabelEnd(moreKeySpec, 0);
final String label = (end > 0) ? parseEscape(moreKeySpec.substring(0, end))
: parseEscape(moreKeySpec);
if (TextUtils.isEmpty(label))
throw new MoreKeySpecParserError("Empty label: " + moreKeySpec);
return label;
}
public static String getOutputText(String moreKeySpec) {
if (hasCode(moreKeySpec))
return null;
final int end = indexOfLabelEnd(moreKeySpec, 0);
if (end > 0) {
if (indexOfLabelEnd(moreKeySpec, end + 1) >= 0)
throw new MoreKeySpecParserError("Multiple " + LABEL_END + ": "
+ moreKeySpec);
final String outputText = parseEscape(moreKeySpec.substring(end + LABEL_END.length()));
if (!TextUtils.isEmpty(outputText))
return outputText;
throw new MoreKeySpecParserError("Empty outputText: " + moreKeySpec);
}
final String label = getLabel(moreKeySpec);
if (label == null)
throw new MoreKeySpecParserError("Empty label: " + moreKeySpec);
// Code is automatically generated for one letter label. See {@link getCode()}.
if (label.length() == 1)
return null;
return label;
}
public static int getCode(Resources res, String moreKeySpec) {
if (hasCode(moreKeySpec)) {
final int end = indexOfLabelEnd(moreKeySpec, 0);
if (indexOfLabelEnd(moreKeySpec, end + 1) >= 0)
throw new MoreKeySpecParserError("Multiple " + LABEL_END + ": " + moreKeySpec);
final int resId = getResourceId(res,
moreKeySpec.substring(end + LABEL_END.length() + PREFIX_AT.length()));
final int code = res.getInteger(resId);
return code;
}
if (indexOfLabelEnd(moreKeySpec, 0) > 0)
return Keyboard.CODE_DUMMY;
final String label = getLabel(moreKeySpec);
// Code is automatically generated for one letter label.
if (label != null && label.length() == 1)
return label.charAt(0);
return Keyboard.CODE_DUMMY;
}
public static int getIconId(String moreKeySpec) {
if (hasIcon(moreKeySpec)) {
int end = moreKeySpec.indexOf(LABEL_END, PREFIX_ICON.length() + 1);
final String iconId = moreKeySpec.substring(PREFIX_ICON.length(), end);
try {
return Integer.valueOf(iconId);
} catch (NumberFormatException e) {
Log.w(TAG, "illegal icon id specified: " + iconId);
return KeyboardIconsSet.ICON_UNDEFINED;
}
}
return KeyboardIconsSet.ICON_UNDEFINED;
}
private static int getResourceId(Resources res, String name) {
String packageName = res.getResourcePackageName(R.string.english_ime_name);
int resId = res.getIdentifier(name, null, packageName);
if (resId == 0)
throw new MoreKeySpecParserError("Unknown resource: " + name);
return resId;
}
@SuppressWarnings("serial")
public static class MoreKeySpecParserError extends RuntimeException {
public MoreKeySpecParserError(String message) {
super(message);
}
}
public interface CodeFilter {
public boolean shouldFilterOut(int code);
}
public static final CodeFilter DIGIT_FILTER = new CodeFilter() {
@Override
public boolean shouldFilterOut(int code) {
return Character.isDigit(code);
}
};
public static CharSequence[] filterOut(Resources res, CharSequence[] moreKeys,
CodeFilter filter) {
if (moreKeys == null || moreKeys.length < 1) {
return null;
}
if (moreKeys.length == 1
&& filter.shouldFilterOut(getCode(res, moreKeys[0].toString()))) {
return null;
}
ArrayList<CharSequence> filtered = null;
for (int i = 0; i < moreKeys.length; i++) {
final CharSequence moreKeySpec = moreKeys[i];
if (filter.shouldFilterOut(getCode(res, moreKeySpec.toString()))) {
if (filtered == null) {
filtered = new ArrayList<CharSequence>();
for (int j = 0; j < i; j++) {
filtered.add(moreKeys[j]);
}
}
} else if (filtered != null) {
filtered.add(moreKeySpec);
}
}
if (filtered == null) {
return moreKeys;
}
if (filtered.size() == 0) {
return null;
}
return filtered.toArray(new CharSequence[filtered.size()]);
}
}
|
|
package home.smart.fly.animations.ui.activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.net.wifi.WifiInfo;
import android.net.wifi.WifiManager;
import android.os.Bundle;
import androidx.appcompat.app.AppCompatActivity;
import android.text.TextPaint;
import android.text.TextUtils;
import android.util.Log;
import android.view.View;
import android.widget.ArrayAdapter;
import android.widget.EditText;
import android.widget.TextView;
import android.widget.Toast;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.StringWriter;
import java.io.Writer;
import java.net.NetworkInterface;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import androidx.appcompat.widget.AppCompatAutoCompleteTextView;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
import home.smart.fly.animations.R;
import home.smart.fly.animations.utils.AppUtils;
import home.smart.fly.animations.utils.TT;
import me.leolin.shortcutbadger.ShortcutBadger;
public class InputActivity extends AppCompatActivity {
private static final String TAG = "InputActivity";
@BindView(R.id.input)
EditText mInput;
@BindView(R.id.result)
TextView mResult;
@BindView(R.id.macAddress)
TextView mMacAddress;
@BindView(R.id.link)
TextView mLink;
@BindView(R.id.auto_complete_text)
AppCompatAutoCompleteTextView mAutoCompleteTextView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_input);
ButterKnife.bind(this);
mLink.getPaint().setFlags(TextPaint.UNDERLINE_TEXT_FLAG);
mLink.getPaint().setAntiAlias(true);
setupAutoCompleteTextView();
}
@OnClick({R.id.set_badge_num, R.id.get, R.id.format, R.id.getMac1, R.id.getMac2, R.id.link})
public void Click(View view) {
switch (view.getId()) {
case R.id.set_badge_num:
int count = 0;
if (!TextUtils.isEmpty(mInput.getText().toString())) {
count = Integer.parseInt(mInput.getText().toString());
}
if (ShortcutBadger.applyCount(this, count)) {
TT.showSToast(this, "success");
} else {
TT.showSToast(this, "fail");
}
break;
case R.id.get:
mResult.setText(mInput.getText().toString());
break;
case R.id.format:
String temp = mResult.getText().toString();
temp = temp.replace("\n", "");
mResult.setText(temp);
break;
case R.id.getMac1:
getMacAddress1();
break;
case R.id.getMac2:
getMacAddress2();
break;
case R.id.link:
Toast.makeText(this, "link !", Toast.LENGTH_SHORT).show();
break;
default:
break;
}
}
private void getMacAddress1() {
WifiManager mWifiManager = (WifiManager) getApplicationContext().getSystemService(Context.WIFI_SERVICE);
WifiInfo mWifiInfo = mWifiManager.getConnectionInfo();
Log.e(TAG, "getMacAddress1: " + mWifiInfo.getMacAddress());
mMacAddress.setText(mWifiInfo.getMacAddress());
}
private void getMacAddress2() {
String mac = getAddressMAC(getApplicationContext());
mMacAddress.setText(mac);
}
private static final String marshmallowMacAddress = "02:00:00:00:00:00";
private static final String fileAddressMac = "/sys/class/net/wlan0/address";
public static String getAddressMAC(Context context) {
WifiManager wifiMan = (WifiManager) context.getSystemService(Context.WIFI_SERVICE);
WifiInfo wifiInf = wifiMan.getConnectionInfo();
if (wifiInf != null && marshmallowMacAddress.equals(wifiInf.getMacAddress())) {
String result = null;
try {
result = getAdressMacByInterface();
if (result != null) {
return result;
} else {
result = getAddressMacByFile(wifiMan);
return result;
}
} catch (IOException e) {
Log.e("MobileAccess", "Erreur lecture propriete Adresse MAC");
} catch (Exception e) {
Log.e("MobileAcces", "Erreur lecture propriete Adresse MAC ");
}
} else {
if (wifiInf != null && wifiInf.getMacAddress() != null) {
return wifiInf.getMacAddress();
} else {
return "";
}
}
return marshmallowMacAddress;
}
private static String getAdressMacByInterface() {
try {
List<NetworkInterface> all = Collections.list(NetworkInterface.getNetworkInterfaces());
for (NetworkInterface nif : all) {
if (nif.getName().equalsIgnoreCase("wlan0")) {
byte[] macBytes = nif.getHardwareAddress();
if (macBytes == null) {
return "";
}
StringBuilder res1 = new StringBuilder();
for (byte b : macBytes) {
res1.append(String.format("%02X:", b));
}
if (res1.length() > 0) {
res1.deleteCharAt(res1.length() - 1);
}
return res1.toString();
}
}
} catch (Exception e) {
Log.e("MobileAcces", "Erreur lecture propriete Adresse MAC ");
}
return null;
}
private static String getAddressMacByFile(WifiManager wifiMan) throws Exception {
String ret;
int wifiState = wifiMan.getWifiState();
wifiMan.setWifiEnabled(true);
File fl = new File(fileAddressMac);
FileInputStream fin = new FileInputStream(fl);
ret = crunchifyGetStringFromStream(fin);
fin.close();
boolean enabled = WifiManager.WIFI_STATE_ENABLED == wifiState;
wifiMan.setWifiEnabled(enabled);
return ret;
}
private static String crunchifyGetStringFromStream(InputStream crunchifyStream) throws IOException {
if (crunchifyStream != null) {
Writer crunchifyWriter = new StringWriter();
char[] crunchifyBuffer = new char[2048];
try {
Reader crunchifyReader = new BufferedReader(new InputStreamReader(crunchifyStream, "UTF-8"));
int counter;
while ((counter = crunchifyReader.read(crunchifyBuffer)) != -1) {
crunchifyWriter.write(crunchifyBuffer, 0, counter);
}
} finally {
crunchifyStream.close();
}
return crunchifyWriter.toString();
} else {
return "No Contents";
}
}
private void setupAutoCompleteTextView() {
List<String> activites = new ArrayList<>();
PackageManager packageManager = getPackageManager();
try {
PackageInfo packageInfo = packageManager.getPackageInfo(AppUtils.getPackageName(this),
PackageManager.GET_ACTIVITIES);
ActivityInfo[] activityInfos = packageInfo.activities;
for (ActivityInfo activityInfo : activityInfos) {
String activity = activityInfo.name;
int dotIndex = activity.lastIndexOf(".");
String act = activity.substring(dotIndex + 1);
activites.add(act.concat(".class"));
}
} catch (PackageManager.NameNotFoundException e) {
e.printStackTrace();
}
ArrayAdapter<?> adapter = new ArrayAdapter<>(this,
android.R.layout.simple_list_item_1, activites);
mAutoCompleteTextView.setAdapter(adapter);
mAutoCompleteTextView.setOnItemClickListener((parent, view, position, id) ->
Toast.makeText(this, "result==" + activites.get(position), Toast.LENGTH_SHORT).show());
}
}
|
|
package nxt;
import nxt.util.Convert;
import java.util.Collection;
import java.util.Collections;
import java.util.SortedSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ConcurrentSkipListSet;
public abstract class Order {
private static final SortedSet<? extends Order> emptySortedSet = Collections.unmodifiableSortedSet(new ConcurrentSkipListSet<Order>());
static void clear() {
Ask.askOrders.clear();
Ask.sortedAskOrders.clear();
Bid.bidOrders.clear();
Bid.sortedBidOrders.clear();
}
private static void matchOrders(Long assetId) {
SortedSet<Ask> sortedAssetAskOrders = Ask.sortedAskOrders.get(assetId);
SortedSet<Bid> sortedAssetBidOrders = Bid.sortedBidOrders.get(assetId);
if (sortedAssetAskOrders == null || sortedAssetBidOrders == null) {
return;
}
while (!sortedAssetAskOrders.isEmpty() && !sortedAssetBidOrders.isEmpty()) {
Ask askOrder = sortedAssetAskOrders.first();
Bid bidOrder = sortedAssetBidOrders.first();
if (askOrder.getPrice() > bidOrder.getPrice()) {
break;
}
int quantity = Math.min(((Order)askOrder).quantity, ((Order)bidOrder).quantity);
long price = askOrder.getHeight() < bidOrder.getHeight() || (askOrder.getHeight() == bidOrder.getHeight() && askOrder.getId() < bidOrder.getId()) ? askOrder.getPrice() : bidOrder.getPrice();
Block lastBlock=Nxt.getBlockchain().getLastBlock();
int timeStamp=lastBlock.getTimestamp();
Trade.addTrade(assetId, timeStamp, lastBlock.getId(), askOrder.getId(), bidOrder.getId(), quantity, price);
if ((((Order)askOrder).quantity -= quantity) == 0) {
Ask.removeOrder(askOrder.getId());
}
askOrder.getAccount().addToBalanceAndUnconfirmedBalance(quantity * price);
askOrder.getAccount().addToAssetBalance(assetId, -quantity);
if ((((Order)bidOrder).quantity -= quantity) == 0) {
Bid.removeOrder(bidOrder.getId());
}
bidOrder.getAccount().addToAssetAndUnconfirmedAssetBalance(assetId, quantity);
bidOrder.getAccount().addToBalance(-quantity * price);
bidOrder.getAccount().addToUnconfirmedBalance(quantity * (bidOrder.getPrice() - price));
}
}
private final Long id;
private final Account account;
private final Long assetId;
private final long price;
private final long height;
private volatile int quantity;
private Order(Long id, Account account, Long assetId, int quantity, long price) {
this.id = id;
this.account = account;
this.assetId = assetId;
this.quantity = quantity;
this.price = price;
this.height = Nxt.getBlockchain().getLastBlock().getHeight();
}
public Long getId() {
return id;
}
public Account getAccount() {
return account;
}
public Long getAssetId() {
return assetId;
}
public long getPrice() {
return price;
}
public final int getQuantity() {
return quantity;
}
public long getHeight() {
return height;
}
private int compareTo(Order o) {
if (height < o.height) {
return -1;
} else if (height > o.height) {
return 1;
} else {
if (id < o.id) {
return -1;
} else if (id > o.id) {
return 1;
} else {
return 0;
}
}
}
public static final class Ask extends Order implements Comparable<Ask> {
private static final ConcurrentMap<Long, Ask> askOrders = new ConcurrentHashMap<>();
private static final ConcurrentMap<Long, SortedSet<Ask>> sortedAskOrders = new ConcurrentHashMap<>();
private static final Collection<Ask> allAskOrders = Collections.unmodifiableCollection(askOrders.values());
public static Collection<Ask> getAllAskOrders() {
return allAskOrders;
}
public static Ask getAskOrder(Long orderId) {
return askOrders.get(orderId);
}
public static SortedSet<Ask> getSortedOrders(Long assetId) {
SortedSet<Ask> sortedOrders = sortedAskOrders.get(assetId);
return sortedOrders == null ? (SortedSet<Ask>)emptySortedSet : Collections.unmodifiableSortedSet(sortedOrders);
}
static void addOrder(Long transactionId, Account senderAccount, Long assetId, int quantity, long price) {
Ask order = new Ask(transactionId, senderAccount, assetId, quantity, price);
if (askOrders.putIfAbsent(order.getId(), order) != null) {
throw new IllegalStateException("Ask order id " + Convert.toUnsignedLong(order.getId()) + " already exists");
}
SortedSet<Ask> sortedAssetAskOrders = sortedAskOrders.get(assetId);
if (sortedAssetAskOrders == null) {
sortedAssetAskOrders = new ConcurrentSkipListSet<>();
sortedAskOrders.put(assetId,sortedAssetAskOrders);
}
sortedAssetAskOrders.add(order);
matchOrders(assetId);
}
static Ask removeOrder(Long orderId) {
Ask askOrder = askOrders.remove(orderId);
if (askOrder != null) {
sortedAskOrders.get(askOrder.getAssetId()).remove(askOrder);
}
return askOrder;
}
private Ask(Long orderId, Account account, Long assetId, int quantity, long price) {
super(orderId, account, assetId, quantity, price);
}
@Override
public int compareTo(Ask o) {
if (this.getPrice() < o.getPrice()) {
return -1;
} else if (this.getPrice() > o.getPrice()) {
return 1;
} else {
return super.compareTo(o);
}
}
}
public static final class Bid extends Order implements Comparable<Bid> {
private static final ConcurrentMap<Long, Bid> bidOrders = new ConcurrentHashMap<>();
private static final ConcurrentMap<Long, SortedSet<Bid>> sortedBidOrders = new ConcurrentHashMap<>();
private static final Collection<Bid> allBidOrders = Collections.unmodifiableCollection(bidOrders.values());
public static Collection<Bid> getAllBidOrders() {
return allBidOrders;
}
public static Bid getBidOrder(Long orderId) {
return bidOrders.get(orderId);
}
public static SortedSet<Bid> getSortedOrders(Long assetId) {
SortedSet<Bid> sortedOrders = sortedBidOrders.get(assetId);
return sortedOrders == null ? (SortedSet<Bid>)emptySortedSet : Collections.unmodifiableSortedSet(sortedOrders);
}
static void addOrder(Long transactionId, Account senderAccount, Long assetId, int quantity, long price) {
Bid order = new Bid(transactionId, senderAccount, assetId, quantity, price);
if (bidOrders.putIfAbsent(order.getId(), order) != null) {
throw new IllegalStateException("Bid order id " + Convert.toUnsignedLong(order.getId()) + " already exists");
}
SortedSet<Bid> sortedAssetBidOrders = sortedBidOrders.get(assetId);
if (sortedAssetBidOrders == null) {
sortedAssetBidOrders = new ConcurrentSkipListSet<>();
sortedBidOrders.put(assetId,sortedAssetBidOrders);
}
sortedAssetBidOrders.add(order);
matchOrders(assetId);
}
static Bid removeOrder(Long orderId) {
Bid bidOrder = bidOrders.remove(orderId);
if (bidOrder != null) {
sortedBidOrders.get(bidOrder.getAssetId()).remove(bidOrder);
}
return bidOrder;
}
private Bid(Long orderId, Account account, Long assetId, int quantity, long price) {
super(orderId, account, assetId, quantity, price);
}
@Override
public int compareTo(Bid o) {
if (this.getPrice() > o.getPrice()) {
return -1;
} else if (this.getPrice() < o.getPrice()) {
return 1;
} else {
return super.compareTo(o);
}
}
}
}
|
|
package jere99.chess.backEnd;
import jere99.chess.backEnd.pieces.*;
import jere99.chess.reference.Pieces;
/**
* Represents a chess board.
*
* @author Megha
* @author JeremiahDeGreeff
*/
public class Board implements Cloneable {
/**
* The game which this board is a part of.
*/
private final Game game;
/**
* All the Pieces on this Board.
*/
private final Piece[][] board = new Piece[8][8];;
/**
* Alias of the white king to make locating easier.
*/
private final King whiteKing;
/**
* Alias of the black king to make locating easier.
*/
private final King blackKing;
/**
* Initializes a Board with the starting piece positions.
*
* @param game the game which this board is a part of
*/
protected Board(Game game) {
this.game = game;
for(int row = 0; row < 8; row++) {
if(row == 0 || row == 7) {
board[row][0] = new Rook(row, 0, row == 7, this);
board[row][1] = new Knight(row, 1, row == 7, this);
board[row][2] = new Bishop(row, 2, row == 7, this);
board[row][3] = new Queen(row, 3, row == 7, this);
board[row][4] = new King(row, 4, row == 7, this);
board[row][5] = new Bishop(row, 5, row == 7, this);
board[row][6] = new Knight(row, 6, row == 7, this);
board[row][7] = new Rook(row, 7, row == 7, this);
}
else if(row == 1 || row == 6)
for(int column = 0; column < board[1].length; column++)
board[row][column] = new Pawn(row, column, row == 6, this);
}
blackKing = (King)board[0][4];
whiteKing = (King)board[7][4];
}
/**
* Initializes a Board from an existing Board.
* Intended to be used for cloning.
*
* @param board the board whose state will be copied
*/
private Board(Board oldBoard) {
this.game = oldBoard.game;
for(int row = 0; row < 8; row++)
for(int column = 0; column < 8; column++) {
Piece p = oldBoard.board[row][column];
if(p != null) {
Piece newPiece = p.copyPiece(this);
if(p instanceof Castleable && ((Castleable) p).hasMoved())
((Castleable) newPiece).castleableMove();
this.board[row][column] = newPiece;
}
}
blackKing = (King)board[oldBoard.blackKing.getRow()][oldBoard.blackKing.getColumn()];
whiteKing = (King)board[oldBoard.whiteKing.getRow()][oldBoard.whiteKing.getColumn()];
}
@Override
public Board clone() {
return new Board(this);
}
/**
* @param row the row of the wanted Piece
* @param column the column of the wanted Piece
* @return The piece at board[row][column]
*/
public Piece getPieceAt(int row, int column) {
return board[row][column];
}
/**
* Moves the passed Piece to the specified spot on this Board.
* Should only be run when the move is valid according to piece rules.
*
* @param piece the Piece to move
* @param newRow the row to move piece to
* @param newColumn the column to move piece to
*/
private void movePiece(Piece piece, int newRow, int newColumn) {
board[newRow][newColumn] = piece;
board[piece.getRow()][piece.getColumn()] = null;
piece.move(newRow, newColumn);
}
/**
* Tests if moving piece to board[newRow][newColumn] is valid and does not check own king.
* Should only be called on a Board which is not associated with a GUI and can be discarded afterwards.
*
* @param piece Piece to test move
* @param newRow row to move piece to
* @param newColumn column to move piece to
* @return true if move is valid and will not check own king, false otherwise
* @throws IllegalStateException if this Board is associated with a GUI
*/
public boolean testMove(Piece piece, int newRow, int newColumn) {
if(game.isRepresentedBoard(this))
throw new IllegalStateException("Cannot run move testing methods on a baord associated with a GUI");
if(piece.getRow() == newRow && piece.getColumn() == newColumn || !piece.isValid(newRow, newColumn)) //actually a move and is valid for the piece
return false;
movePiece(piece, newRow, newColumn);
return !kingChecked(piece.isWhite() ? whiteKing : blackKing); //doesn't check own king
}
/**
* Moves the passed piece to the specified spot on this Board.
* Updates the GUI accordingly.
* Should only be run if the move has been tested and is actually intended to be represented on the GUI.
*
* @param piece the Piece to move
* @param newRow the row to move p to
* @param newColumn the column to move p to
*/
protected void makeMove(Piece piece, int newRow, int newColumn) {
int startRow = piece.getRow(), startColumn = piece.getColumn();
//move the piece
movePiece(piece, newRow, newColumn);
//update the GUI
game.updateSquare(startRow, startColumn);
game.updateSquare(newRow, newColumn);
//if the move is a castle also move the rook
if(piece instanceof King && startColumn == 4 && (newColumn == 6 || newColumn == 2)) {
boolean kingSide = newColumn == 6; //true if into column 6, false if into column 2
movePiece(board[newRow][kingSide ? 7 : 0], newRow, kingSide ? 5 : 3);
game.updateSquare(newRow, kingSide ? 7 : 0);
game.updateSquare(newRow, kingSide ? 5 : 3);
}
//create a GUI for pawn promotion if necessary
if(piece instanceof Pawn && (newRow == 0 || newRow == 7))
game.pawnPromotionInit(newRow, newColumn);
//see if the move has put the opposing king in check or checkmate
detectCheck(newRow, newColumn);
}
/**
* Tests if a move to a particular row and column has resulted in check or checkmate for the opposing player.
*
* @param row the row of the piece that just moved
* @param column the column of the piece that just moved
*/
private void detectCheck(int row, int column) {
boolean isWhiteMove = board[row][column].isWhite();
if(kingChecked(isWhiteMove ? blackKing : whiteKing)) {
System.out.println("The " + (isWhiteMove ? "black" : "white") + " king is in check!");
Board testBoard = this.clone();
if(testBoard.checkmate(isWhiteMove ? testBoard.blackKing : testBoard.whiteKing))
game.checkmateInit(isWhiteMove);
}
}
/**
* Tests if a King is in check.
*
* @param king the King to test
* @return true if the King is checked, false otherwise
*/
public boolean kingChecked(King king) {
for(Piece[] row : board)
for(Piece p : row)
if(p != null && king.isWhite() != p.isWhite())
if(p.isValid(king.getRow(), king.getColumn()))
return true;
return false;
}
/**
* Tests if a King is in checkmate.
* Should only be called on a Board which is not associated with a GUI and can be discarded afterwards.
*
* @param king the King to test
* @return true if the King is in checkmate, false otherwise
* @throws IllegalStateException if this Board is associated with a GUI
*/
private boolean checkmate(King king) {
if(game.isRepresentedBoard(this))
throw new IllegalStateException("Cannot run move testing methods on a baord associated with a GUI");
//test if king can move
for(int r = king.getRow() - 1; r < king.getRow() + 1; r++)
for(int c = king.getColumn() - 1; c < king.getColumn() + 1; c++)
if(r >= 0 && r <= 7 && c >= 0 && c <= 7 && testMove(king, r, c))
return false;
//find Piece checking king
Piece checking = null;
for(Piece[] row : board)
for(Piece p : row)
if(p != null && p.isWhite() != king.isWhite() && p.isValid(king.getRow(), king.getColumn()))
if(checking == null)
checking = p;
else
return true;
//find out if checking can be taken
for(Piece[] row : board)
for(Piece p : row)
if(p != null && p.isWhite() != checking.isWhite() && testMove(p, checking.getRow(), checking.getColumn()))
return false;
//find out if checking can be blocked
if(checking instanceof Pawn || checking instanceof Knight)
return true;
int row = checking.getRow();
int column = checking.getColumn();
//if checking is Rook (or Queen)...
if(checking instanceof Rook || checking instanceof Queen)
if(king.getRow() == row) { //same row
for(int c = column; column < king.getColumn() ? c < king.getColumn() : c > king.getColumn(); c += (column < king.getColumn() ? 1 : -1))
for(Piece[] boardRow : board)
for(Piece p : boardRow)
if(p != null && p.isWhite() != checking.isWhite() && testMove(p, row, c))
return false;
} else if(king.getColumn() == column) { //same column
for(int r = row; row < king.getRow() ? r < king.getRow() : r > king.getRow(); r += (row < king.getRow() ? 1 : -1))
for(Piece[] boardRow : board)
for(Piece p : boardRow)
if(p != null && p.isWhite() != checking.isWhite() && testMove(p, r, column))
return false;
}
//if checking is Bishop (or Queen)...
if(checking instanceof Bishop || checking instanceof Queen)
for(Piece[] boardRow : board)
for(Piece p : boardRow)
if(p != null && p.isWhite() != checking.isWhite())
for(int i = 1; i < Math.abs(row - king.getRow()); i++)
if(testMove(p, i * (int)Math.signum(king.getRow() - row) + row, i * (int)Math.signum(king.getColumn() - column) + column))
return false;
return true;
}
/**
* Promotes a Pawn that reached far row to a new Piece of player's choice.
* Updates GUI accordingly.
* Tests if promotion puts king in check.
*
* @param row the row of pawn to change
* @param column the column of pawn to change
* @param type the name of the selected piece
*/
protected void pawnPromotion(int row, int column, Pieces piece) {
boolean pawnIsWhite = board[row][column].isWhite();
switch(piece.toString()) {
case "QUEEN":
board[row][column] = new Queen(row, column, pawnIsWhite, this);
break;
case "KNIGHT":
board[row][column] = new Knight(row, column, pawnIsWhite, this);
break;
case "ROOK":
board[row][column] = new Rook(row, column, pawnIsWhite, this);
break;
case "BISHOP":
board[row][column] = new Bishop(row, column, pawnIsWhite, this);
break;
}
game.updateSquare(row, column);
detectCheck(row, column);
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.broker.service.persistent;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
import java.util.Optional;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import lombok.Cleanup;
import org.apache.bookkeeper.mledger.ManagedCursor;
import org.apache.bookkeeper.mledger.impl.PositionImpl;
import org.apache.pulsar.broker.service.Topic;
import org.apache.pulsar.client.api.Producer;
import org.apache.pulsar.client.api.ProducerConsumerBase;
import org.apache.pulsar.client.api.Schema;
import org.apache.pulsar.common.naming.TopicName;
import org.awaitility.Awaitility;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
@Test(groups = "broker")
public class TopicDuplicationTest extends ProducerConsumerBase {
private final String testTenant = "my-property";
private final String testNamespace = "my-ns";
private final String myNamespace = testTenant + "/" + testNamespace;
private final String testTopic = "persistent://" + myNamespace + "/max-unacked-";
@BeforeMethod
@Override
protected void setup() throws Exception {
resetConfig();
this.conf.setSystemTopicEnabled(true);
this.conf.setTopicLevelPoliciesEnabled(true);
this.conf.setBrokerDeduplicationEnabled(true);
super.internalSetup();
super.producerBaseSetup();
}
@AfterMethod(alwaysRun = true)
@Override
protected void cleanup() throws Exception {
super.internalCleanup();
}
@Test(timeOut = 10000)
public void testDuplicationApi() throws Exception {
final String topicName = testTopic + UUID.randomUUID().toString();
admin.topics().createPartitionedTopic(topicName, 3);
waitCacheInit(topicName);
Boolean enabled = admin.topics().getDeduplicationEnabled(topicName);
assertNull(enabled);
admin.topics().enableDeduplication(topicName, true);
Awaitility.await()
.until(()-> admin.topics().getDeduplicationEnabled(topicName) != null);
assertTrue(admin.topics().getDeduplicationEnabled(topicName));
admin.topics().disableDeduplication(topicName);
Awaitility.await()
.until(()-> admin.topics().getMaxUnackedMessagesOnSubscription(topicName) == null);
assertNull(admin.topics().getDeduplicationEnabled(topicName));
}
@Test(timeOut = 10000)
public void testTopicDuplicationApi2() throws Exception {
final String topicName = testTopic + UUID.randomUUID().toString();
admin.topics().createPartitionedTopic(topicName, 3);
waitCacheInit(topicName);
Boolean enabled = admin.topics().getDeduplicationStatus(topicName);
assertNull(enabled);
admin.topics().setDeduplicationStatus(topicName, true);
Awaitility.await()
.until(() -> admin.topics().getDeduplicationStatus(topicName) != null);
assertTrue(admin.topics().getDeduplicationStatus(topicName));
admin.topics().removeDeduplicationStatus(topicName);
Awaitility.await()
.until(() -> admin.topics().getMaxUnackedMessagesOnSubscription(topicName) == null);
assertNull(admin.topics().getDeduplicationStatus(topicName));
}
@Test(timeOut = 10000)
public void testTopicDuplicationAppliedApi() throws Exception {
final String topicName = testTopic + UUID.randomUUID().toString();
waitCacheInit(topicName);
assertNull(admin.namespaces().getDeduplicationStatus(myNamespace));
assertNull(admin.topics().getDeduplicationStatus(topicName));
assertEquals(admin.topics().getDeduplicationStatus(topicName, true).booleanValue(),
conf.isBrokerDeduplicationEnabled());
admin.namespaces().setDeduplicationStatus(myNamespace, false);
Awaitility.await().untilAsserted(() -> assertFalse(admin.topics().getDeduplicationStatus(topicName, true)));
admin.topics().setDeduplicationStatus(topicName, true);
Awaitility.await().untilAsserted(() -> assertTrue(admin.topics().getDeduplicationStatus(topicName, true)));
admin.topics().removeDeduplicationStatus(topicName);
Awaitility.await().untilAsserted(() -> assertFalse(admin.topics().getDeduplicationStatus(topicName, true)));
admin.namespaces().removeDeduplicationStatus(myNamespace);
Awaitility.await().untilAsserted(() -> assertEquals(admin.topics().getDeduplicationStatus(topicName, true).booleanValue(),
conf.isBrokerDeduplicationEnabled()));
}
@Test(timeOut = 30000)
public void testDeduplicationPriority() throws Exception {
final String topicName = testTopic + UUID.randomUUID().toString();
final String producerName = "my-producer";
final int maxMsgNum = 5;
waitCacheInit(topicName);
//1) Start up producer and send msg.We specified the max sequenceId
@Cleanup
Producer<String> producer = pulsarClient.newProducer(Schema.STRING).topic(topicName)
.producerName(producerName).create();
long maxSeq = sendMessageAndGetMaxSeq(maxMsgNum, producer);
PersistentTopic persistentTopic =
(PersistentTopic) pulsar.getBrokerService().getTopicIfExists(topicName).get().get();
MessageDeduplication messageDeduplication = persistentTopic.getMessageDeduplication();
//broker-level deduplication is enabled in setup() by default
checkDeduplicationEnabled(producerName, messageDeduplication, maxSeq);
//disabled in namespace-level
admin.namespaces().setDeduplicationStatus(myNamespace, false);
Awaitility.await().untilAsserted(() -> assertNotNull(admin.namespaces().getDeduplicationStatus(myNamespace)));
sendMessageAndGetMaxSeq(maxMsgNum, producer);
checkDeduplicationDisabled(producerName, messageDeduplication);
//enabled in topic-level
admin.topics().setDeduplicationStatus(topicName, true);
Awaitility.await().untilAsserted(() -> assertNotNull(admin.topics().getDeduplicationStatus(topicName)));
Awaitility.await().untilAsserted(() -> assertTrue(messageDeduplication.isEnabled()));
long maxSeq2 = sendMessageAndGetMaxSeq(maxMsgNum, producer);
checkDeduplicationEnabled(producerName, messageDeduplication, maxSeq2);
//remove topic-level, use namespace-level
admin.topics().removeDeduplicationStatus(topicName);
Awaitility.await().untilAsserted(() -> assertNull(admin.topics().getDeduplicationStatus(topicName)));
Awaitility.await().untilAsserted(() -> assertFalse(messageDeduplication.isEnabled()));
producer.newMessage().value("msg").sequenceId(1).send();
checkDeduplicationDisabled(producerName, messageDeduplication);
//remove namespace-level , use broker-level
admin.namespaces().removeDeduplicationStatus(myNamespace);
Awaitility.await().untilAsserted(() -> assertNull(admin.namespaces().getDeduplicationStatus(myNamespace)));
Awaitility.await().untilAsserted(() -> assertTrue(messageDeduplication.isEnabled()));
long maxSeq3 = sendMessageAndGetMaxSeq(maxMsgNum, producer);
checkDeduplicationEnabled(producerName, messageDeduplication, maxSeq3);
}
private long sendMessageAndGetMaxSeq(int maxMsgNum, Producer producer) throws Exception{
long seq = System.nanoTime();
for (int i = 0; i <= maxMsgNum; i++) {
producer.newMessage().value("msg-" + i).sequenceId(seq + i).send();
}
return seq + maxMsgNum;
}
private void checkDeduplicationDisabled(String producerName, MessageDeduplication messageDeduplication) throws Exception {
messageDeduplication.checkStatus().whenComplete((res, ex) -> {
if (ex != null) {
fail("should not fail");
}
assertEquals(messageDeduplication.getLastPublishedSequenceId(producerName), -1);
assertEquals(messageDeduplication.highestSequencedPersisted.size(), 0);
assertEquals(messageDeduplication.highestSequencedPushed.size(), 0);
}).get();
}
private void checkDeduplicationEnabled(String producerName, MessageDeduplication messageDeduplication,
long maxSeq) throws Exception {
messageDeduplication.checkStatus().whenComplete((res, ex) -> {
if (ex != null) {
fail("should not fail");
}
assertNotNull(messageDeduplication.highestSequencedPersisted);
assertNotNull(messageDeduplication.highestSequencedPushed);
long seqId = messageDeduplication.getLastPublishedSequenceId(producerName);
assertEquals(seqId, maxSeq);
assertEquals(messageDeduplication.highestSequencedPersisted.get(producerName).longValue(), maxSeq);
assertEquals(messageDeduplication.highestSequencedPushed.get(producerName).longValue(), maxSeq);
}).get();
}
@Test(timeOut = 10000)
public void testDuplicationSnapshotApi() throws Exception {
final String topicName = testTopic + UUID.randomUUID().toString();
admin.topics().createPartitionedTopic(topicName, 3);
waitCacheInit(topicName);
Integer interval = admin.topics().getDeduplicationSnapshotInterval(topicName);
assertNull(interval);
admin.topics().setDeduplicationSnapshotInterval(topicName, 1024);
Awaitility.await()
.until(()-> admin.topics().getDeduplicationSnapshotInterval(topicName) != null);
Assert.assertEquals(admin.topics().getDeduplicationSnapshotInterval(topicName).intValue(), 1024);
admin.topics().removeDeduplicationSnapshotInterval(topicName);
Awaitility.await()
.until(()-> admin.topics().getDeduplicationSnapshotInterval(topicName) == null);
assertNull(admin.topics().getDeduplicationSnapshotInterval(topicName));
}
@Test(timeOut = 30000)
public void testTopicPolicyTakeSnapshot() throws Exception {
super.internalCleanup();
resetConfig();
conf.setSystemTopicEnabled(true);
conf.setTopicLevelPoliciesEnabled(true);
conf.setBrokerDeduplicationEnabled(true);
conf.setBrokerDeduplicationSnapshotFrequencyInSeconds(1);
conf.setBrokerDeduplicationSnapshotIntervalSeconds(7);
conf.setBrokerDeduplicationEntriesInterval(20000);
super.internalSetup();
super.producerBaseSetup();
final String topicName = testTopic + UUID.randomUUID().toString();
final String producerName = "my-producer";
@Cleanup
Producer<String> producer = pulsarClient
.newProducer(Schema.STRING).topic(topicName).enableBatching(false).producerName(producerName).create();
waitCacheInit(topicName);
admin.topics().setDeduplicationSnapshotInterval(topicName, 3);
admin.namespaces().setDeduplicationSnapshotInterval(myNamespace, 5);
int msgNum = 10;
CountDownLatch countDownLatch = new CountDownLatch(msgNum);
for (int i = 0; i < msgNum; i++) {
producer.newMessage().value("msg" + i).sendAsync().whenComplete((res, e) -> countDownLatch.countDown());
}
countDownLatch.await();
PersistentTopic persistentTopic = (PersistentTopic) pulsar.getBrokerService().getTopicIfExists(topicName).get().get();
long seqId = persistentTopic.getMessageDeduplication().highestSequencedPersisted.get(producerName);
PositionImpl position = (PositionImpl) persistentTopic.getMessageDeduplication().getManagedCursor()
.getManagedLedger().getLastConfirmedEntry();
assertEquals(seqId, msgNum - 1);
assertEquals(position.getEntryId(), msgNum - 1);
//The first time, use topic-leve policies, 1 second delay + 3 second interval
Awaitility.await()
.until(() -> ((PositionImpl) persistentTopic.getMessageDeduplication().getManagedCursor()
.getMarkDeletedPosition()).getEntryId() == msgNum - 1);
ManagedCursor managedCursor = persistentTopic.getMessageDeduplication().getManagedCursor();
PositionImpl markDeletedPosition = (PositionImpl) managedCursor.getMarkDeletedPosition();
assertEquals(position, markDeletedPosition);
//remove topic-level policies, namespace-level should be used, interval becomes 5 seconds
admin.topics().removeDeduplicationSnapshotInterval(topicName);
producer.newMessage().value("msg").send();
//zk update time + 5 second interval time
Awaitility.await()
.until(() -> ((PositionImpl) persistentTopic.getMessageDeduplication().getManagedCursor()
.getMarkDeletedPosition()).getEntryId() == msgNum);
markDeletedPosition = (PositionImpl) managedCursor.getMarkDeletedPosition();
position = (PositionImpl) persistentTopic.getMessageDeduplication().getManagedCursor().getManagedLedger().getLastConfirmedEntry();
assertEquals(msgNum, markDeletedPosition.getEntryId());
assertEquals(position, markDeletedPosition);
//4 remove namespace-level policies, broker-level should be used, interval becomes 3 seconds
admin.namespaces().removeDeduplicationSnapshotInterval(myNamespace);
Awaitility.await()
.until(() -> (admin.namespaces().getDeduplicationSnapshotInterval(myNamespace) == null));
producer.newMessage().value("msg").send();
//ensure that the time exceeds the scheduling interval of ns and topic, but no snapshot is generated
Thread.sleep(3000);
markDeletedPosition = (PositionImpl) managedCursor.getMarkDeletedPosition();
position = (PositionImpl) persistentTopic.getMessageDeduplication().getManagedCursor().getManagedLedger().getLastConfirmedEntry();
// broker-level interval is 7 seconds, so 3 seconds will not take a snapshot
assertNotEquals(msgNum + 1, markDeletedPosition.getEntryId());
assertNotEquals(position, markDeletedPosition);
// wait for scheduler
Awaitility.await()
.until(() -> ((PositionImpl) persistentTopic.getMessageDeduplication().getManagedCursor()
.getMarkDeletedPosition()).getEntryId() == msgNum + 1);
markDeletedPosition = (PositionImpl) managedCursor.getMarkDeletedPosition();
position = (PositionImpl) persistentTopic.getMessageDeduplication().getManagedCursor().getManagedLedger().getLastConfirmedEntry();
assertEquals(msgNum + 1, markDeletedPosition.getEntryId());
assertEquals(position, markDeletedPosition);
}
@Test(timeOut = 20000)
public void testDuplicationMethod() throws Exception {
final String topicName = testTopic + UUID.randomUUID().toString();
final String producerName = "my-producer";
final int maxMsgNum = 100;
admin.topics().createPartitionedTopic(testTopic, 3);
waitCacheInit(topicName);
//1) Start up producer and send msg.We specified the max sequenceId
@Cleanup
Producer<String> producer = pulsarClient.newProducer(Schema.STRING).topic(topicName)
.producerName(producerName).create();
long maxSeq = sendMessageAndGetMaxSeq(maxMsgNum, producer);
//2) Max sequenceId should be recorded correctly
CompletableFuture<Optional<Topic>> completableFuture = pulsar.getBrokerService().getTopics().get(topicName);
Topic topic = completableFuture.get(1, TimeUnit.SECONDS).get();
PersistentTopic persistentTopic = (PersistentTopic) topic;
MessageDeduplication messageDeduplication = persistentTopic.getMessageDeduplication();
checkDeduplicationEnabled(producerName, messageDeduplication, maxSeq);
//3) disable the deduplication check
admin.topics().enableDeduplication(topicName, false);
Awaitility.await()
.until(() -> admin.topics().getDeduplicationEnabled(topicName) != null);
for (int i = 0; i < 100; i++) {
producer.newMessage().value("msg-" + i).sequenceId(maxSeq + i).send();
}
//4) Max sequenceId record should be clear
checkDeduplicationDisabled(producerName, messageDeduplication);
}
@Test(timeOut = 40000)
public void testDuplicationSnapshot() throws Exception {
testTakeSnapshot(true);
testTakeSnapshot(false);
}
private void testTakeSnapshot(boolean enabledSnapshot) throws Exception {
super.internalCleanup();
resetConfig();
conf.setBrokerDeduplicationEnabled(true);
conf.setBrokerDeduplicationSnapshotFrequencyInSeconds(enabledSnapshot ? 1 : 0);
conf.setBrokerDeduplicationSnapshotIntervalSeconds(1);
conf.setBrokerDeduplicationEntriesInterval(20000);
super.internalSetup();
super.producerBaseSetup();
final String topicName = testTopic + UUID.randomUUID().toString();
final String producerName = "my-producer";
@Cleanup
Producer<String> producer = pulsarClient
.newProducer(Schema.STRING).topic(topicName).enableBatching(false).producerName(producerName).create();
int msgNum = 50;
CountDownLatch countDownLatch = new CountDownLatch(msgNum);
for (int i = 0; i < msgNum; i++) {
producer.newMessage().value("msg" + i).sendAsync().whenComplete((res, e) -> countDownLatch.countDown());
}
countDownLatch.await();
PersistentTopic persistentTopic = (PersistentTopic) pulsar.getBrokerService().getTopicIfExists(topicName).get().get();
long seqId = persistentTopic.getMessageDeduplication().highestSequencedPersisted.get(producerName);
PositionImpl position = (PositionImpl) persistentTopic.getMessageDeduplication().getManagedCursor().getManagedLedger().getLastConfirmedEntry();
assertEquals(seqId, msgNum - 1);
assertEquals(position.getEntryId(), msgNum - 1);
Thread.sleep(2000);
ManagedCursor managedCursor = persistentTopic.getMessageDeduplication().getManagedCursor();
PositionImpl markDeletedPosition = (PositionImpl) managedCursor.getMarkDeletedPosition();
if (enabledSnapshot) {
assertEquals(position, markDeletedPosition);
} else {
assertNotEquals(position, markDeletedPosition);
assertNotEquals(markDeletedPosition.getEntryId(), -1);
}
producer.newMessage().value("msg").send();
markDeletedPosition = (PositionImpl) managedCursor.getMarkDeletedPosition();
position = (PositionImpl) persistentTopic.getMessageDeduplication().getManagedCursor().getManagedLedger().getLastConfirmedEntry();
assertNotEquals(msgNum, markDeletedPosition.getEntryId());
assertNotNull(position);
Thread.sleep(2000);
markDeletedPosition = (PositionImpl) managedCursor.getMarkDeletedPosition();
position = (PositionImpl) persistentTopic.getMessageDeduplication().getManagedCursor().getManagedLedger().getLastConfirmedEntry();
if (enabledSnapshot) {
assertEquals(msgNum, markDeletedPosition.getEntryId());
assertEquals(position, markDeletedPosition);
} else {
assertNotEquals(msgNum, markDeletedPosition.getEntryId());
assertNotEquals(position, markDeletedPosition);
}
}
@Test(timeOut = 30000)
public void testNamespacePolicyApi() throws Exception {
Integer interval = admin.namespaces().getDeduplicationSnapshotInterval(myNamespace);
assertNull(interval);
admin.namespaces().setDeduplicationSnapshotInterval(myNamespace, 100);
interval = admin.namespaces().getDeduplicationSnapshotInterval(myNamespace);
assertEquals(interval.intValue(), 100);
admin.namespaces().removeDeduplicationSnapshotInterval(myNamespace);
interval = admin.namespaces().getDeduplicationSnapshotInterval(myNamespace);
assertNull(interval);
admin.namespaces().setDeduplicationSnapshotIntervalAsync(myNamespace, 200).get();
interval = admin.namespaces().getDeduplicationSnapshotIntervalAsync(myNamespace).get();
assertEquals(interval.intValue(), 200);
admin.namespaces().removeDeduplicationSnapshotIntervalAsync(myNamespace).get();
interval = admin.namespaces().getDeduplicationSnapshotIntervalAsync(myNamespace).get();
assertNull(interval);
}
@Test(timeOut = 30000)
public void testNamespacePolicyTakeSnapshot() throws Exception {
super.internalCleanup();
resetConfig();
conf.setBrokerDeduplicationEnabled(true);
conf.setBrokerDeduplicationSnapshotFrequencyInSeconds(1);
conf.setBrokerDeduplicationSnapshotIntervalSeconds(3);
conf.setBrokerDeduplicationEntriesInterval(20000);
super.internalSetup();
super.producerBaseSetup();
final String topicName = testTopic + UUID.randomUUID().toString();
final String producerName = "my-producer";
@Cleanup
Producer<String> producer = pulsarClient
.newProducer(Schema.STRING).topic(topicName).enableBatching(false).producerName(producerName).create();
admin.namespaces().setDeduplicationSnapshotInterval(myNamespace, 1);
int msgNum = 50;
CountDownLatch countDownLatch = new CountDownLatch(msgNum);
for (int i = 0; i < msgNum; i++) {
producer.newMessage().value("msg" + i).sendAsync().whenComplete((res, e) -> countDownLatch.countDown());
}
countDownLatch.await();
PersistentTopic persistentTopic = (PersistentTopic) pulsar.getBrokerService().getTopicIfExists(topicName).get().get();
long seqId = persistentTopic.getMessageDeduplication().highestSequencedPersisted.get(producerName);
PositionImpl position = (PositionImpl) persistentTopic.getMessageDeduplication().getManagedCursor()
.getManagedLedger().getLastConfirmedEntry();
assertEquals(seqId, msgNum - 1);
assertEquals(position.getEntryId(), msgNum - 1);
//The first time, 1 second delay + 1 second interval
Awaitility.await().until(()-> ((PositionImpl) persistentTopic
.getMessageDeduplication().getManagedCursor().getMarkDeletedPosition()).getEntryId() == msgNum -1);
ManagedCursor managedCursor = persistentTopic.getMessageDeduplication().getManagedCursor();
PositionImpl markDeletedPosition = (PositionImpl) managedCursor.getMarkDeletedPosition();
assertEquals(position, markDeletedPosition);
//remove namespace-level policies, broker-level should be used
admin.namespaces().removeDeduplicationSnapshotInterval(myNamespace);
Thread.sleep(2000);
markDeletedPosition = (PositionImpl) managedCursor.getMarkDeletedPosition();
position = (PositionImpl) persistentTopic.getMessageDeduplication().getManagedCursor().getManagedLedger().getLastConfirmedEntry();
assertNotEquals(msgNum - 1, markDeletedPosition.getEntryId());
assertNotEquals(position, markDeletedPosition.getEntryId());
//3 seconds total
Thread.sleep(1000);
markDeletedPosition = (PositionImpl) managedCursor.getMarkDeletedPosition();
position = (PositionImpl) persistentTopic.getMessageDeduplication().getManagedCursor().getManagedLedger().getLastConfirmedEntry();
assertEquals(msgNum - 1, markDeletedPosition.getEntryId());
assertEquals(position, markDeletedPosition);
}
@Test(timeOut = 30000)
public void testDisableNamespacePolicyTakeSnapshot() throws Exception {
super.internalCleanup();
resetConfig();
conf.setBrokerDeduplicationEnabled(true);
conf.setBrokerDeduplicationSnapshotFrequencyInSeconds(1);
conf.setBrokerDeduplicationSnapshotIntervalSeconds(1);
conf.setBrokerDeduplicationEntriesInterval(20000);
super.internalSetup();
super.producerBaseSetup();
final String topicName = testTopic + UUID.randomUUID().toString();
final String producerName = "my-producer";
@Cleanup
Producer<String> producer = pulsarClient
.newProducer(Schema.STRING).topic(topicName).enableBatching(false).producerName(producerName).create();
//set value to 0
admin.namespaces().setDeduplicationSnapshotInterval(myNamespace, 0);
int msgNum = 50;
CountDownLatch countDownLatch = new CountDownLatch(msgNum);
for (int i = 0; i < msgNum; i++) {
producer.newMessage().value("msg" + i).sendAsync().whenComplete((res, e) -> countDownLatch.countDown());
}
countDownLatch.await();
PersistentTopic persistentTopic = (PersistentTopic) pulsar.getBrokerService().getTopicIfExists(topicName).get().get();
ManagedCursor managedCursor = persistentTopic.getMessageDeduplication().getManagedCursor();
PositionImpl markDeletedPosition = (PositionImpl) managedCursor.getMarkDeletedPosition();
long seqId = persistentTopic.getMessageDeduplication().highestSequencedPersisted.get(producerName);
PositionImpl position = (PositionImpl) persistentTopic.getMessageDeduplication().getManagedCursor()
.getManagedLedger().getLastConfirmedEntry();
assertEquals(seqId, msgNum - 1);
assertEquals(position.getEntryId(), msgNum - 1);
Awaitility.await().until(()-> ((PositionImpl) persistentTopic
.getMessageDeduplication().getManagedCursor().getMarkDeletedPosition()).getEntryId() == -1);
// take snapshot is disabled, so markDeletedPosition should not change
assertEquals(markDeletedPosition, managedCursor.getMarkDeletedPosition());
assertEquals(markDeletedPosition.getEntryId(), -1);
assertNotEquals(position, markDeletedPosition);
}
private void waitCacheInit(String topicName) throws Exception {
pulsarClient.newConsumer().topic(topicName).subscriptionName("my-sub").subscribe().close();
TopicName topic = TopicName.get(topicName);
Awaitility.await()
.until(()-> pulsar.getTopicPoliciesService().cacheIsInitialized(topic));
}
}
|
|
/*
* Copyright 2003-2018 Dave Griffith, Bas Leijdekkers
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.siyeh.ig.psiutils;
import com.intellij.codeInsight.AnnotationUtil;
import com.intellij.codeInsight.PsiEquivalenceUtil;
import com.intellij.codeInspection.dataFlow.value.RelationType;
import com.intellij.openapi.util.Comparing;
import com.intellij.psi.*;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.util.SmartList;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import static java.util.Comparator.*;
/**
* This equivalence checker will consider references to variables, declared inside the element checked for equivalence,
* NOT equivalent.
* @see TrackingEquivalenceChecker which also tracks declaration equivalence, to accurately check the
* equivalence of reference expressions.
*/
public class EquivalenceChecker {
protected static final Match EXACT_MATCH = new Match(true);
protected static final Match EXACT_MISMATCH = new Match(false);
private static final EquivalenceChecker ourCanonicalPsiEquivalence = new EquivalenceChecker();
private static final Comparator<PsiMember> MEMBER_COMPARATOR =
comparing(PsiMember::getName, nullsFirst(naturalOrder())).thenComparing(PsiMember::getText);
private static final Comparator<PsiExpression> EXPRESSION_COMPARATOR =
comparing(ParenthesesUtils::stripParentheses, nullsFirst(comparing(PsiExpression::getText)));
protected EquivalenceChecker() {}
/**
* Returns a shareable EquivalenceChecker instance that does not track declaration equivalence.
* @return a shareable EquivalenceChecker instance
*/
public static EquivalenceChecker getCanonicalPsiEquivalence() {
return ourCanonicalPsiEquivalence;
}
public static class Match {
private final PsiElement myLeftDiff;
private final PsiElement myRightDiff;
private final Boolean myExactlyMatches;
Match(boolean exactlyMatches) {
myExactlyMatches = exactlyMatches;
myLeftDiff = null;
myRightDiff = null;
}
Match(PsiElement leftDiff, PsiElement rightDiff) {
myExactlyMatches = null;
myLeftDiff = leftDiff;
myRightDiff = rightDiff;
}
public PsiElement getLeftDiff() {
return myLeftDiff;
}
public PsiElement getRightDiff() {
return myRightDiff;
}
public boolean isPartialMatch() {
return myExactlyMatches == null;
}
public boolean isExactMatch() {
return myExactlyMatches != null && myExactlyMatches;
}
public boolean isExactMismatch() {
return myExactlyMatches != null && !myExactlyMatches;
}
Match partialIfExactMismatch(PsiElement left, PsiElement right) {
return this == EXACT_MISMATCH ? new Match(left, right) : this;
}
static Match exact(boolean exactMatches) {
return exactMatches ? EXACT_MATCH : EXACT_MISMATCH;
}
Match combine(Match other) {
if (other.isExactMismatch() || isExactMatch()) {
return other;
}
if (isExactMismatch() || other.isExactMatch()) {
return this;
}
return EXACT_MISMATCH;
}
}
public boolean statementsAreEquivalent(@Nullable PsiStatement statement1, @Nullable PsiStatement statement2) {
return statementsMatch(statement1, statement2).isExactMatch();
}
public Match statementsMatch(@Nullable PsiStatement statement1, @Nullable PsiStatement statement2) {
statement1 = ControlFlowUtils.stripBraces(statement1);
statement2 = ControlFlowUtils.stripBraces(statement2);
if (statement1 == null || statement2 == null) {
return Match.exact(statement1 == statement2);
}
if (statement1.getClass() != statement2.getClass()) {
return EXACT_MISMATCH;
}
if (statement1 instanceof PsiAssertStatement) {
return assertStatementsMatch((PsiAssertStatement)statement1, (PsiAssertStatement)statement2);
}
if (statement1 instanceof PsiBlockStatement) {
return blockStatementsMatch((PsiBlockStatement)statement1, (PsiBlockStatement)statement2);
}
if (statement1 instanceof PsiBreakStatement) {
return breakStatementsMatch((PsiBreakStatement)statement1, (PsiBreakStatement)statement2);
}
if (statement1 instanceof PsiContinueStatement) {
return continueStatementsMatch((PsiContinueStatement)statement1, (PsiContinueStatement)statement2);
}
if (statement1 instanceof PsiDeclarationStatement) {
return declarationStatementsMatch((PsiDeclarationStatement)statement1, (PsiDeclarationStatement)statement2);
}
if (statement1 instanceof PsiConditionalLoopStatement) {
return conditionalLoopStatementsMatch((PsiConditionalLoopStatement)statement1, (PsiConditionalLoopStatement)statement2);
}
if (statement1 instanceof PsiEmptyStatement) {
return EXACT_MATCH;
}
if (statement1 instanceof PsiExpressionListStatement) {
return expressionListStatementsMatch((PsiExpressionListStatement)statement1, (PsiExpressionListStatement)statement2);
}
if (statement1 instanceof PsiExpressionStatement) {
return expressionStatementsMatch((PsiExpressionStatement)statement1, (PsiExpressionStatement)statement2);
}
if (statement1 instanceof PsiForeachStatement) {
return forEachStatementsMatch((PsiForeachStatement)statement1, (PsiForeachStatement)statement2);
}
if (statement1 instanceof PsiIfStatement) {
return ifStatementsMatch((PsiIfStatement)statement1, (PsiIfStatement)statement2);
}
if (statement1 instanceof PsiLabeledStatement) {
return labeledStatementsMatch((PsiLabeledStatement)statement1, (PsiLabeledStatement)statement2);
}
if (statement1 instanceof PsiReturnStatement) {
return returnStatementsMatch((PsiReturnStatement)statement1, (PsiReturnStatement)statement2);
}
if (statement1 instanceof PsiSwitchStatement) {
return switchBlocksMatch((PsiSwitchStatement)statement1, (PsiSwitchStatement)statement2);
}
if (statement1 instanceof PsiSwitchLabelStatementBase && statement2 instanceof PsiSwitchLabelStatementBase) {
return switchLabelStatementsMatch((PsiSwitchLabelStatementBase)statement1, (PsiSwitchLabelStatementBase)statement2);
}
if (statement1 instanceof PsiSynchronizedStatement) {
return synchronizedStatementsMatch((PsiSynchronizedStatement)statement1, (PsiSynchronizedStatement)statement2);
}
if (statement1 instanceof PsiThrowStatement) {
return throwStatementsMatch((PsiThrowStatement)statement1, (PsiThrowStatement)statement2);
}
if (statement1 instanceof PsiTryStatement) {
return tryStatementsMatch((PsiTryStatement)statement1, (PsiTryStatement)statement2);
}
final String text1 = statement1.getText();
final String text2 = statement2.getText();
return Match.exact(text1.equals(text2));
}
protected Match declarationStatementsMatch(@NotNull PsiDeclarationStatement statement1, @NotNull PsiDeclarationStatement statement2) {
final PsiElement[] elements1 = statement1.getDeclaredElements();
final PsiElement[] elements2 = statement2.getDeclaredElements();
if (elements1.length != elements2.length) {
return EXACT_MISMATCH;
}
for (int i = 0; i < elements1.length; i++) {
final PsiElement element1 = elements1[i];
final PsiElement element2 = elements2[i];
if (!(element1 instanceof PsiLocalVariable) ||
!(element2 instanceof PsiLocalVariable) ||
!localVariablesAreEquivalent((PsiLocalVariable)element1, (PsiLocalVariable)element2).isExactMatch()) {
return EXACT_MISMATCH;
}
}
return EXACT_MATCH;
}
protected Match localVariablesAreEquivalent(@NotNull PsiLocalVariable localVariable1,
@NotNull PsiLocalVariable localVariable2) {
return variablesAreEquivalent(localVariable1, localVariable2);
}
protected Match variablesAreEquivalent(@NotNull PsiVariable variable1, @NotNull PsiVariable variable2) {
if (!variableSignatureMatch(variable1, variable2)) {
return EXACT_MISMATCH;
}
PsiExpression initializer1 = variable1.getInitializer();
PsiExpression initializer2 = variable2.getInitializer();
return expressionsMatch(initializer1, initializer2).partialIfExactMismatch(initializer1, initializer2);
}
private boolean variableSignatureMatch(@NotNull PsiVariable variable1, @NotNull PsiVariable variable2) {
PsiType type1 = variable1.getType();
PsiType type2 = variable2.getType();
if (!typesAreEquivalent(type1, type2)) {
return false;
}
PsiModifierList modifierList1 = variable1.getModifierList();
PsiModifierList modifierList2 = variable2.getModifierList();
if (modifierList1 == null || modifierList2 == null) {
return modifierList1 == modifierList2;
}
if (!modifierListsAreEquivalent(modifierList1, modifierList2)) {
return false;
}
markDeclarationsAsEquivalent(variable1, variable2);
return true;
}
protected Match tryStatementsMatch(@NotNull PsiTryStatement statement1, @NotNull PsiTryStatement statement2) {
final PsiCodeBlock tryBlock1 = statement1.getTryBlock();
final PsiCodeBlock tryBlock2 = statement2.getTryBlock();
if (!codeBlocksMatch(tryBlock1, tryBlock2).isExactMatch()) {
return EXACT_MISMATCH;
}
final PsiCodeBlock finallyBlock1 = statement1.getFinallyBlock();
final PsiCodeBlock finallyBlock2 = statement2.getFinallyBlock();
if (!codeBlocksMatch(finallyBlock1, finallyBlock2).isExactMatch()) {
return EXACT_MISMATCH;
}
final PsiCodeBlock[] catchBlocks1 = statement1.getCatchBlocks();
final PsiCodeBlock[] catchBlocks2 = statement2.getCatchBlocks();
if (catchBlocks1.length != catchBlocks2.length) {
return EXACT_MISMATCH;
}
for (int i = 0; i < catchBlocks2.length; i++) {
if (!codeBlocksMatch(catchBlocks1[i], catchBlocks2[i]).isExactMatch()) {
return EXACT_MISMATCH;
}
}
final PsiResourceList resourceList1 = statement1.getResourceList();
final PsiResourceList resourceList2 = statement2.getResourceList();
if (resourceList1 == null || resourceList2 == null) {
return Match.exact(resourceList1 == resourceList2);
}
if (resourceList1.getResourceVariablesCount() != resourceList2.getResourceVariablesCount()) {
return EXACT_MISMATCH;
}
final List<PsiResourceListElement> resources1 = PsiTreeUtil.getChildrenOfTypeAsList(resourceList1, PsiResourceListElement.class);
final List<PsiResourceListElement> resources2 = PsiTreeUtil.getChildrenOfTypeAsList(resourceList2, PsiResourceListElement.class);
for (int i = 0, size = resources1.size(); i < size; i++) {
final PsiResourceListElement resource1 = resources1.get(i);
final PsiResourceListElement resource2 = resources2.get(i);
if (resource1 instanceof PsiResourceVariable && resource2 instanceof PsiResourceVariable) {
if (!variablesAreEquivalent((PsiLocalVariable)resource1, (PsiLocalVariable)resource2).isExactMatch()) {
return EXACT_MISMATCH;
}
}
else if (resource1 instanceof PsiResourceExpression && resource2 instanceof PsiResourceExpression) {
if (!expressionsMatch(((PsiResourceExpression)resource1).getExpression(),
((PsiResourceExpression)resource2).getExpression()).isExactMatch()) {
return EXACT_MISMATCH;
}
}
else {
return EXACT_MISMATCH;
}
}
final PsiParameter[] catchParameters1 = statement1.getCatchBlockParameters();
final PsiParameter[] catchParameters2 = statement2.getCatchBlockParameters();
if (catchParameters1.length != catchParameters2.length) {
return EXACT_MISMATCH;
}
for (int i = 0; i < catchParameters2.length; i++) {
if (!variablesAreEquivalent(catchParameters2[i], catchParameters1[i]).isExactMatch()) {
return EXACT_MISMATCH;
}
}
return EXACT_MATCH;
}
public boolean typesAreEquivalent(@Nullable PsiType type1, @Nullable PsiType type2) {
if (type1 == null || type2 == null) {
return type1 == type2;
}
final String type1Text = type1.getCanonicalText();
final String type2Text = type2.getCanonicalText();
return type1Text.equals(type2Text);
}
protected Match conditionalLoopStatementsMatch(@NotNull PsiConditionalLoopStatement statement1,
@NotNull PsiConditionalLoopStatement statement2) {
if (statement1 instanceof PsiForStatement) {
final PsiStatement initialization1 = ((PsiForStatement)statement1).getInitialization();
final PsiStatement initialization2 = ((PsiForStatement)statement2).getInitialization();
if (!statementsMatch(initialization1, initialization2).isExactMatch()) {
return EXACT_MISMATCH;
}
final PsiStatement update1 = ((PsiForStatement)statement1).getUpdate();
final PsiStatement update2 = ((PsiForStatement)statement2).getUpdate();
if (!statementsMatch(update1, update2).isExactMatch()) {
return EXACT_MISMATCH;
}
}
final PsiExpression condition1 = statement1.getCondition();
final PsiExpression condition2 = statement2.getCondition();
final PsiStatement body1 = statement1.getBody();
final PsiStatement body2 = statement2.getBody();
final Match conditionEquivalence = expressionsMatch(condition1, condition2);
final Match bodyEquivalence = statementsMatch(body1, body2);
return getComplexElementDecision(bodyEquivalence, conditionEquivalence, body1, body2, condition1, condition2);
}
protected Match forEachStatementsMatch(@NotNull PsiForeachStatement statement1, @NotNull PsiForeachStatement statement2) {
final PsiExpression value1 = statement1.getIteratedValue();
final PsiExpression value2 = statement2.getIteratedValue();
if (!expressionsMatch(value1, value2).isExactMatch()) {
return EXACT_MISMATCH;
}
final PsiParameter parameter1 = statement1.getIterationParameter();
final PsiParameter parameter2 = statement1.getIterationParameter();
final String name1 = parameter1.getName();
final String name2 = parameter2.getName();
if (!name1.equals(name2)) {
return EXACT_MISMATCH;
}
final PsiType type1 = parameter1.getType();
if (!type1.equals(parameter2.getType())) {
return EXACT_MISMATCH;
}
final PsiStatement body1 = statement1.getBody();
final PsiStatement body2 = statement2.getBody();
return statementsMatch(body1, body2).partialIfExactMismatch(body1, body2);
}
protected Match switchBlocksMatch(@NotNull PsiSwitchBlock switchBlock1, @NotNull PsiSwitchBlock switchBlock2) {
final PsiCodeBlock body1 = switchBlock1.getBody();
final PsiCodeBlock body2 = switchBlock2.getBody();
if (!codeBlocksAreEquivalent(body1, body2)) {
return EXACT_MISMATCH;
}
final PsiExpression switchExpression1 = switchBlock1.getExpression();
final PsiExpression switchExpression2 = switchBlock2.getExpression();
return expressionsMatch(switchExpression1, switchExpression2).partialIfExactMismatch(switchExpression1, switchExpression2);
}
protected Match assertStatementsMatch(@NotNull PsiAssertStatement statement1, @NotNull PsiAssertStatement statement2) {
final PsiExpression condition1 = statement1.getAssertCondition();
final PsiExpression condition2 = statement2.getAssertCondition();
final PsiExpression description1 = statement1.getAssertDescription();
final PsiExpression description2 = statement2.getAssertDescription();
final Match condEq = expressionsMatch(condition1, condition2);
final Match exprEq = expressionsMatch(description1, description2);
return getComplexElementDecision(condEq, exprEq, condition1, condition2, description1, description2);
}
protected Match synchronizedStatementsMatch(@NotNull PsiSynchronizedStatement statement1, @NotNull PsiSynchronizedStatement statement2) {
final PsiExpression lock1 = statement1.getLockExpression();
final PsiExpression lock2 = statement2.getLockExpression();
final PsiCodeBlock body1 = statement1.getBody();
final PsiCodeBlock body2 = statement2.getBody();
final Match lockEq = expressionsMatch(lock1, lock2);
final Match blockEq = codeBlocksMatch(body1, body2);
return getComplexElementDecision(blockEq, lockEq, body1, body2, lock1, lock2);
}
protected Match blockStatementsMatch(@NotNull PsiBlockStatement statement1, @NotNull PsiBlockStatement statement2) {
final PsiCodeBlock block1 = statement1.getCodeBlock();
final PsiCodeBlock block2 = statement2.getCodeBlock();
return codeBlocksMatch(block1, block2);
}
protected Match breakStatementsMatch(@NotNull PsiBreakStatement statement1, @NotNull PsiBreakStatement statement2) {
final PsiIdentifier identifier1 = statement1.getLabelIdentifier();
final PsiIdentifier identifier2 = statement2.getLabelIdentifier();
return matchLabels(identifier1, identifier2);
}
protected Match continueStatementsMatch(@NotNull PsiContinueStatement statement1, @NotNull PsiContinueStatement statement2) {
final PsiIdentifier identifier1 = statement1.getLabelIdentifier();
final PsiIdentifier identifier2 = statement2.getLabelIdentifier();
return matchLabels(identifier1, identifier2);
}
private static Match matchLabels(PsiIdentifier identifier1, PsiIdentifier identifier2) {
if (identifier1 == null || identifier2 == null) {
return Match.exact(identifier1 == identifier2);
}
final String text1 = identifier1.getText();
final String text2 = identifier2.getText();
return Match.exact(text1.equals(text2));
}
protected Match switchLabelStatementsMatch(@NotNull PsiSwitchLabelStatementBase statement1,
@NotNull PsiSwitchLabelStatementBase statement2) {
if (statement1.isDefaultCase() != statement2.isDefaultCase()) {
return EXACT_MISMATCH;
}
final boolean rule1 = statement1 instanceof PsiSwitchLabeledRuleStatement;
final boolean rule2 = statement2 instanceof PsiSwitchLabeledRuleStatement;
if (rule1 && rule2) {
final PsiSwitchLabeledRuleStatement switchLabeledRuleStatement1 = (PsiSwitchLabeledRuleStatement)statement1;
final PsiSwitchLabeledRuleStatement switchLabeledRuleStatement2 = (PsiSwitchLabeledRuleStatement)statement2;
if (!statementsAreEquivalent(switchLabeledRuleStatement1.getBody(), switchLabeledRuleStatement2.getBody())) {
return EXACT_MISMATCH;
}
}
else if (rule1 || rule2) {
return EXACT_MISMATCH;
}
final PsiExpressionList caseValues1 = statement1.getCaseValues();
final PsiExpressionList caseValues2 = statement2.getCaseValues();
if (caseValues1 == null || caseValues2 == null) {
return Match.exact(caseValues1 == caseValues2);
}
return expressionsAreEquivalent(caseValues1.getExpressions(), caseValues2.getExpressions(), true);
}
protected Match labeledStatementsMatch(@NotNull PsiLabeledStatement statement1, @NotNull PsiLabeledStatement statement2) {
return Match.exact(statement1.getName().equals(statement2.getName()));
}
public boolean codeBlocksAreEquivalent(@Nullable PsiCodeBlock block1, @Nullable PsiCodeBlock block2) {
return codeBlocksMatch(block1, block2).isExactMatch();
}
protected Match codeBlocksMatch(@Nullable PsiCodeBlock block1, @Nullable PsiCodeBlock block2) {
if (block1 == null || block2 == null) {
return Match.exact(block1 == block2);
}
final List<PsiStatement> statements1 = collectStatements(block1, new SmartList<>());
final List<PsiStatement> statements2 = collectStatements(block2, new SmartList<>());
final int size = statements1.size();
if (size != statements2.size()) {
return EXACT_MISMATCH;
}
for (int i = 0; i < size; i++) {
if (!statementsMatch(statements2.get(i), statements1.get(i)).isExactMatch()) {
return EXACT_MISMATCH;
}
}
return EXACT_MATCH;
}
private static List<PsiStatement> collectStatements(PsiCodeBlock codeBlock, List<PsiStatement> out) {
for (PsiStatement statement : codeBlock.getStatements()) {
if (statement instanceof PsiBlockStatement) {
final PsiBlockStatement blockStatement = (PsiBlockStatement)statement;
collectStatements(blockStatement.getCodeBlock(), out);
}
else if (!(statement instanceof PsiEmptyStatement)) {
out.add(statement);
}
}
return out;
}
protected Match ifStatementsMatch(@NotNull PsiIfStatement statement1, @NotNull PsiIfStatement statement2) {
final PsiExpression condition1 = statement1.getCondition();
final PsiExpression condition2 = statement2.getCondition();
final PsiStatement thenBranch1 = statement1.getThenBranch();
final PsiStatement thenBranch2 = statement2.getThenBranch();
final PsiStatement elseBranch1 = statement1.getElseBranch();
final PsiStatement elseBranch2 = statement2.getElseBranch();
final Match conditionEq = expressionsMatch(condition1, condition2);
final Match thenEq = statementsMatch(thenBranch1, thenBranch2);
final Match elseEq = statementsMatch(elseBranch1, elseBranch2);
if (conditionEq == EXACT_MATCH && thenEq == EXACT_MATCH && elseEq == EXACT_MATCH) {
return EXACT_MATCH;
}
return EXACT_MISMATCH;
}
protected Match expressionStatementsMatch(@NotNull PsiExpressionStatement statement1, @NotNull PsiExpressionStatement statement2) {
final PsiExpression expression1 = statement1.getExpression();
final PsiExpression expression2 = statement2.getExpression();
return expressionsMatch(expression1, expression2);
}
protected Match returnStatementsMatch(@NotNull PsiReturnStatement statement1, @NotNull PsiReturnStatement statement2) {
final PsiExpression returnValue1 = statement1.getReturnValue();
final PsiExpression returnValue2 = statement2.getReturnValue();
final Match match = expressionsMatch(returnValue1, returnValue2);
if (match.isExactMismatch()) {
return new Match(returnValue1, returnValue2);
}
return match;
}
protected Match throwStatementsMatch(@NotNull PsiThrowStatement statement1, @NotNull PsiThrowStatement statement2) {
final PsiExpression exception1 = statement1.getException();
final PsiExpression exception2 = statement2.getException();
return expressionsMatch(exception1, exception2);
}
protected Match expressionListStatementsMatch(@NotNull PsiExpressionListStatement statement1, @NotNull PsiExpressionListStatement statement2) {
final PsiExpression[] expressions1 = statement1.getExpressionList().getExpressions();
final PsiExpression[] expressions2 = statement2.getExpressionList().getExpressions();
return expressionsAreEquivalent(expressions1, expressions2, false);
}
public boolean expressionsAreEquivalent(@Nullable PsiExpression expression1, @Nullable PsiExpression expression2) {
return expressionsMatch(expression1, expression2).isExactMatch();
}
public Match expressionsMatch(@Nullable PsiExpression expression1, @Nullable PsiExpression expression2) {
if (expression1 == expression2) {
return EXACT_MATCH;
}
expression1 = ParenthesesUtils.stripParentheses(expression1);
expression2 = ParenthesesUtils.stripParentheses(expression2);
if (expression1 == null || expression2 == null) {
return Match.exact(expression1 == expression2);
}
if (expression1.getClass() != expression2.getClass()) {
return EXACT_MISMATCH;
}
if (expression1 instanceof PsiThisExpression) {
return thisExpressionsMatch((PsiThisExpression)expression1, (PsiThisExpression)expression2);
}
if (expression1 instanceof PsiSuperExpression) {
return EXACT_MATCH;
}
if (expression1 instanceof PsiLiteralExpression) {
return literalExpressionsMatch((PsiLiteralExpression)expression1, (PsiLiteralExpression)expression2);
}
if (expression1 instanceof PsiClassObjectAccessExpression) {
return classObjectAccessExpressionsMatch((PsiClassObjectAccessExpression)expression1,
(PsiClassObjectAccessExpression)expression2);
}
if (expression1 instanceof PsiReferenceExpression) {
return referenceExpressionsMatch((PsiReferenceExpression)expression1, (PsiReferenceExpression)expression2);
}
if (expression1 instanceof PsiMethodCallExpression) {
return methodCallExpressionsMatch((PsiMethodCallExpression)expression1, (PsiMethodCallExpression)expression2);
}
if (expression1 instanceof PsiNewExpression) {
return newExpressionsMatch((PsiNewExpression)expression1, (PsiNewExpression)expression2);
}
if (expression1 instanceof PsiArrayInitializerExpression) {
return arrayInitializerExpressionsMatch((PsiArrayInitializerExpression)expression1,
(PsiArrayInitializerExpression)expression2);
}
if (expression1 instanceof PsiTypeCastExpression) {
return typeCastExpressionsMatch((PsiTypeCastExpression)expression1, (PsiTypeCastExpression)expression2);
}
if (expression1 instanceof PsiArrayAccessExpression) {
return arrayAccessExpressionsMatch((PsiArrayAccessExpression)expression2, (PsiArrayAccessExpression)expression1);
}
if (expression1 instanceof PsiUnaryExpression) {
return unaryExpressionsMatch((PsiUnaryExpression)expression1, (PsiUnaryExpression)expression2);
}
if (expression1 instanceof PsiBinaryExpression) {
return binaryExpressionsMatch((PsiBinaryExpression)expression1, (PsiBinaryExpression)expression2);
}
if (expression1 instanceof PsiPolyadicExpression) {
return polyadicExpressionsMatch((PsiPolyadicExpression)expression1, (PsiPolyadicExpression)expression2);
}
if (expression1 instanceof PsiAssignmentExpression) {
return assignmentExpressionsMatch((PsiAssignmentExpression)expression1, (PsiAssignmentExpression)expression2);
}
if (expression1 instanceof PsiConditionalExpression) {
return conditionalExpressionsMatch((PsiConditionalExpression)expression1, (PsiConditionalExpression)expression2);
}
if (expression1 instanceof PsiInstanceOfExpression) {
return instanceOfExpressionsMatch((PsiInstanceOfExpression)expression1, (PsiInstanceOfExpression)expression2);
}
if (expression1 instanceof PsiLambdaExpression) {
return lambdaExpressionsMatch((PsiLambdaExpression)expression1, (PsiLambdaExpression)expression2);
}
if (expression1 instanceof PsiSwitchExpression) {
return switchBlocksMatch((PsiSwitchExpression)expression1, (PsiSwitchExpression)expression2);
}
return EXACT_MISMATCH;
}
@NotNull
private Match thisExpressionsMatch(@NotNull PsiThisExpression thisExpression1, @NotNull PsiThisExpression thisExpression2) {
final PsiJavaCodeReferenceElement qualifier1 = thisExpression1.getQualifier();
final PsiJavaCodeReferenceElement qualifier2 = thisExpression2.getQualifier();
if (qualifier1 != null && qualifier2 != null) {
return javaCodeReferenceElementsMatch(qualifier1, qualifier2);
}
else if (qualifier1 != qualifier2){
return EXACT_MISMATCH;
}
final PsiClass containingClass1 = PsiTreeUtil.getParentOfType(thisExpression1, PsiClass.class);
final PsiClass containingClass2 = PsiTreeUtil.getParentOfType(thisExpression2, PsiClass.class);
return Match.exact(containingClass1 == containingClass2);
}
protected Match lambdaExpressionsMatch(PsiLambdaExpression expression1, PsiLambdaExpression expression2) {
final PsiParameterList parameterList1 = expression1.getParameterList();
final PsiParameterList parameterList2 = expression2.getParameterList();
final PsiParameter[] parameters1 = parameterList1.getParameters();
final PsiParameter[] parameters2 = parameterList2.getParameters();
if (parameters1.length != parameters2.length) {
return EXACT_MISMATCH;
}
for (int i = 0, length = parameters1.length; i < length; i++) {
if (!variablesAreEquivalent(parameters1[i], parameters2[i]).isExactMatch()) {
return EXACT_MISMATCH;
}
}
final PsiElement body1 = unwrapLambdaBody(expression1.getBody());
final PsiElement body2 = unwrapLambdaBody(expression2.getBody());
if (body1 instanceof PsiCodeBlock && body2 instanceof PsiCodeBlock) {
return codeBlocksMatch((PsiCodeBlock)body1, (PsiCodeBlock)body2);
}
else if (body1 instanceof PsiExpression && body2 instanceof PsiExpression) {
return expressionsMatch((PsiExpression)body1, (PsiExpression)body2);
}
return EXACT_MISMATCH;
}
private static PsiElement unwrapLambdaBody(PsiElement element) {
while (element instanceof PsiCodeBlock) {
final PsiCodeBlock codeBlock = (PsiCodeBlock)element;
final PsiStatement[] statements = codeBlock.getStatements();
if (statements.length != 1) {
break;
}
final PsiStatement statement = statements[0];
if (statement instanceof PsiReturnStatement) {
return ((PsiReturnStatement)statement).getReturnValue();
}
else if (statement instanceof PsiExpressionStatement) {
return ((PsiExpressionStatement)statement).getExpression();
}
else if (statement instanceof PsiBlockStatement) {
element = ((PsiBlockStatement)statement).getCodeBlock();
}
else {
break;
}
}
return element;
}
protected Match literalExpressionsMatch(PsiLiteralExpression expression1, PsiLiteralExpression expression2) {
if (PsiType.NULL.equals(expression1.getType()) && PsiType.NULL.equals(expression2.getType())) {
return EXACT_MATCH;
}
final Object value1 = expression1.getValue();
final Object value2 = expression2.getValue();
return (value1 == null || value2 == null)
? EXACT_MISMATCH // broken code
: Match.exact(value1.equals(value2));
}
protected Match classObjectAccessExpressionsMatch(PsiClassObjectAccessExpression expression1,
PsiClassObjectAccessExpression expression2) {
final PsiTypeElement operand1 = expression1.getOperand();
final PsiTypeElement operand2 = expression2.getOperand();
return typeElementsAreEquivalent(operand1, operand2);
}
protected Match referenceExpressionsMatch(PsiReferenceExpression referenceExpression1, PsiReferenceExpression referenceExpression2) {
final PsiElement element1 = referenceExpression1.resolve();
final PsiElement element2 = referenceExpression2.resolve();
if (element1 != null) {
if (element2 == null || !equivalentDeclarations(element1, element2) && !element1.equals(element2)) {
return EXACT_MISMATCH;
}
}
else {
return EXACT_MISMATCH; // incomplete code
}
if (element1 instanceof PsiMember) {
final PsiMember member1 = (PsiMember)element1;
if (member1.hasModifierProperty(PsiModifier.STATIC)) {
return EXACT_MATCH;
}
if (member1 instanceof PsiClass) {
return EXACT_MATCH;
}
}
else {
return EXACT_MATCH;
}
final PsiExpression qualifier1 = ParenthesesUtils.stripParentheses(referenceExpression1.getQualifierExpression());
final PsiExpression qualifier2 = ParenthesesUtils.stripParentheses(referenceExpression2.getQualifierExpression());
if (qualifier1 != null && !(qualifier1 instanceof PsiThisExpression || qualifier1 instanceof PsiSuperExpression)) {
if (qualifier2 == null) {
return EXACT_MISMATCH;
}
Match match = expressionsMatch(qualifier1, qualifier2);
if (!match.isExactMatch() && PsiUtil.isArrayClass(((PsiMember)element1).getContainingClass()) &&
!((GenericsUtil.getLeastUpperBound(qualifier1.getType(), qualifier2.getType(),
referenceExpression1.getManager())) instanceof PsiArrayType)) {
// access to the member (length or clone()) of incompatible arrays
return EXACT_MISMATCH;
}
if (match.isExactMismatch()) {
return new Match(qualifier1, qualifier2);
}
return match;
}
else {
if (qualifier2 != null && !(qualifier2 instanceof PsiThisExpression || qualifier2 instanceof PsiSuperExpression)) {
return EXACT_MISMATCH;
}
}
return EXACT_MATCH;
}
protected Match instanceOfExpressionsMatch(PsiInstanceOfExpression instanceOfExpression1, PsiInstanceOfExpression instanceOfExpression2) {
final PsiExpression operand1 = instanceOfExpression1.getOperand();
final PsiExpression operand2 = instanceOfExpression2.getOperand();
if (!expressionsMatch(operand1, operand2).isExactMatch()) {
return EXACT_MISMATCH;
}
final PsiTypeElement typeElement1 = instanceOfExpression1.getCheckType();
final PsiTypeElement typeElement2 = instanceOfExpression2.getCheckType();
return typeElementsAreEquivalent(typeElement1, typeElement2);
}
protected Match typeElementsAreEquivalent(PsiTypeElement typeElement1, PsiTypeElement typeElement2) {
if (typeElement1 == null || typeElement2 == null) {
return Match.exact(typeElement1 == typeElement2);
}
final PsiType type1 = typeElement1.getType();
final PsiType type2 = typeElement2.getType();
return Match.exact(typesAreEquivalent(type1, type2));
}
protected Match methodCallExpressionsMatch(@NotNull PsiMethodCallExpression methodCallExpression1,
@NotNull PsiMethodCallExpression methodCallExpression2) {
final PsiReferenceExpression methodExpression1 = methodCallExpression1.getMethodExpression();
final PsiReferenceExpression methodExpression2 = methodCallExpression2.getMethodExpression();
Match match = expressionsMatch(methodExpression1, methodExpression2);
if (match.isExactMismatch()) {
return EXACT_MISMATCH;
}
final PsiExpression[] args1 = methodCallExpression1.getArgumentList().getExpressions();
final PsiExpression[] args2 = methodCallExpression2.getArgumentList().getExpressions();
match = match.combine(expressionsAreEquivalent(args1, args2, false));
if (args1.length != 0 && match.isPartialMatch()) {
final PsiElement leftDiff = match.getLeftDiff();
final PsiExpression lastArg = args1[args1.length - 1];
if (Comparing.equal(leftDiff, lastArg)) {
final PsiType type1 = lastArg.getType();
final PsiType type2 = args2[args2.length - 1].getType();
if (type2 instanceof PsiArrayType && !(type1 instanceof PsiArrayType)) {
return EXACT_MISMATCH;
}
if (type1 instanceof PsiArrayType && !(type2 instanceof PsiArrayType)) {
return EXACT_MISMATCH;
}
}
}
return match;
}
protected Match newExpressionsMatch(@NotNull PsiNewExpression newExpression1, @NotNull PsiNewExpression newExpression2) {
final PsiJavaCodeReferenceElement classReference1 = newExpression1.getClassReference();
final PsiJavaCodeReferenceElement classReference2 = newExpression2.getClassReference();
if (classReference1 != null && classReference2 != null) {
if (javaCodeReferenceElementsMatch(classReference1, classReference2) == EXACT_MISMATCH) {
return EXACT_MISMATCH;
}
}
else if (classReference1 != classReference2) {
return EXACT_MISMATCH;
}
else if (!typesAreEquivalent(newExpression1.getType(), newExpression2.getType())) {
// both classReference1 and classReference2 nulls
// could be new int[0] and new long[0]
return EXACT_MISMATCH;
}
final PsiExpression[] arrayDimensions1 = newExpression1.getArrayDimensions();
final PsiExpression[] arrayDimensions2 = newExpression2.getArrayDimensions();
if (!expressionsAreEquivalent(arrayDimensions1, arrayDimensions2, false).isExactMatch()) {
return EXACT_MISMATCH;
}
final PsiArrayInitializerExpression arrayInitializer1 =
newExpression1.getArrayInitializer();
final PsiArrayInitializerExpression arrayInitializer2 =
newExpression2.getArrayInitializer();
if (!expressionsMatch(arrayInitializer1, arrayInitializer2).isExactMatch()) {
return EXACT_MISMATCH;
}
final PsiMethod constructor1 = newExpression1.resolveConstructor();
final PsiMethod constructor2 = newExpression2.resolveConstructor();
if (!Comparing.equal(constructor1, constructor2)) {
return EXACT_MISMATCH;
}
final PsiExpression qualifier1 = newExpression1.getQualifier();
final PsiExpression qualifier2 = newExpression2.getQualifier();
if (!expressionsMatch(qualifier1, qualifier2).isExactMatch()) {
return EXACT_MISMATCH;
}
final PsiExpressionList argumentList1 = newExpression1.getArgumentList();
final PsiExpression[] args1 = argumentList1 == null ? null : argumentList1.getExpressions();
final PsiExpressionList argumentList2 = newExpression2.getArgumentList();
final PsiExpression[] args2 = argumentList2 == null ? null : argumentList2.getExpressions();
PsiAnonymousClass anonymousClass1 = newExpression1.getAnonymousClass();
PsiAnonymousClass anonymousClass2 = newExpression2.getAnonymousClass();
if (anonymousClass1 != null || anonymousClass2 != null) {
if (anonymousClass1 != null && anonymousClass2 != null) {
return classesMatch(anonymousClass1, anonymousClass2);
}
return EXACT_MISMATCH;
}
return expressionsAreEquivalent(args1, args2, false);
}
private Match classesMatch(PsiAnonymousClass class1, PsiAnonymousClass class2) {
PsiJavaCodeReferenceElement baseClass1 = class1.getBaseClassReference();
PsiJavaCodeReferenceElement baseClass2 = class2.getBaseClassReference();
Match match = javaCodeReferenceElementsMatch(baseClass1, baseClass2);
if (!match.isExactMatch()) return EXACT_MISMATCH;
List<PsiMember> children1 = PsiTreeUtil.getChildrenOfTypeAsList(class1, PsiMember.class);
List<PsiMember> children2 = PsiTreeUtil.getChildrenOfTypeAsList(class2, PsiMember.class);
int size = children1.size();
if (size != children2.size()) return EXACT_MISMATCH;
Collections.sort(children1, MEMBER_COMPARATOR);
Collections.sort(children2, MEMBER_COMPARATOR);
for (int i = 0; i < size; i++) {
// first pass checks only signatures for accurate reference tracking
PsiElement child1 = children1.get(i);
PsiElement child2 = children2.get(i);
if (child1 instanceof PsiMethod && child2 instanceof PsiMethod) {
if (!methodSignaturesMatch((PsiMethod)child1, (PsiMethod)child2)) {
return EXACT_MISMATCH;
}
} else if (child1 instanceof PsiField && child2 instanceof PsiField) {
if (!variableSignatureMatch((PsiField)child1, (PsiField)child2)) {
return EXACT_MISMATCH;
}
}
}
for (int i = 0; i < size; i++) {
PsiElement child1 = children1.get(i);
PsiElement child2 = children2.get(i);
if (child1 instanceof PsiMethod && child2 instanceof PsiMethod) {
// method signature already checked
if (!codeBlocksAreEquivalent(((PsiMethod)child1).getBody(), ((PsiMethod)child2).getBody())) return EXACT_MISMATCH;
} else if (child1 instanceof PsiField && child2 instanceof PsiField) {
// field signature already checked
if (!expressionsAreEquivalent(((PsiField)child1).getInitializer(), ((PsiField)child2).getInitializer())) return EXACT_MISMATCH;
} else if (child1 instanceof PsiClassInitializer && child2 instanceof PsiClassInitializer) {
if (!classInitializersMatch((PsiClassInitializer)child1, (PsiClassInitializer)child2).isExactMatch()) return EXACT_MISMATCH;
} else if (!PsiEquivalenceUtil.areElementsEquivalent(child1, child2)) {
return EXACT_MISMATCH;
}
}
return EXACT_MATCH;
}
private Match classInitializersMatch(PsiClassInitializer classInitializer1, PsiClassInitializer classInitializer2) {
if (!modifierListsAreEquivalent(classInitializer1.getModifierList(), classInitializer2.getModifierList())) {
return EXACT_MISMATCH;
}
return codeBlocksMatch(classInitializer1.getBody(), classInitializer2.getBody());
}
private boolean methodSignaturesMatch(PsiMethod method1, PsiMethod method2) {
if (!method1.getName().equals(method2.getName()) || !typesAreEquivalent(method1.getReturnType(), method2.getReturnType())) {
return false;
}
PsiParameter[] parameters1 = method1.getParameterList().getParameters();
PsiParameter[] parameters2 = method2.getParameterList().getParameters();
if (parameters1.length != parameters2.length) {
return false;
}
for (int j = 0; j < parameters1.length; j++) {
if (!variableSignatureMatch(parameters1[j], parameters2[j])) {
return false;
}
}
PsiClassType[] thrownTypes1 = method1.getThrowsList().getReferencedTypes();
PsiClassType[] thrownTypes2 = method2.getThrowsList().getReferencedTypes();
if (thrownTypes1.length != thrownTypes2.length) {
return false;
}
for (int i = 0; i < thrownTypes1.length; i++) {
if (!typesAreEquivalent(thrownTypes1[i], thrownTypes2[i])) {
return false;
}
}
markDeclarationsAsEquivalent(method1, method2);
return true;
}
private Match javaCodeReferenceElementsMatch(@NotNull PsiJavaCodeReferenceElement classReference1,
@NotNull PsiJavaCodeReferenceElement classReference2) {
final PsiType[] parameters1 = classReference1.getTypeParameters();
final PsiType[] parameters2 = classReference2.getTypeParameters();
if (parameters1.length != parameters2.length) {
return EXACT_MISMATCH;
}
for (int i = 0; i < parameters1.length; i++) {
if (!typesAreEquivalent(parameters1[i], parameters2[i])) {
return EXACT_MISMATCH;
}
}
final PsiElement target1 = classReference1.resolve();
final PsiElement target2 = classReference2.resolve();
return (target1 == null && target2 == null)
? Match.exact(classReference1.getText().equals(classReference2.getText()))
: Match.exact(target1 == target2);
}
protected Match arrayInitializerExpressionsMatch(@NotNull PsiArrayInitializerExpression arrayInitializerExpression1,
@NotNull PsiArrayInitializerExpression arrayInitializerExpression2) {
final PsiExpression[] initializers1 = arrayInitializerExpression1.getInitializers();
final PsiExpression[] initializers2 = arrayInitializerExpression2.getInitializers();
return expressionsAreEquivalent(initializers1, initializers2, false);
}
protected Match typeCastExpressionsMatch(@NotNull PsiTypeCastExpression typeCastExpression1, @NotNull PsiTypeCastExpression typeCastExpression2) {
final PsiTypeElement typeElement1 = typeCastExpression1.getCastType();
final PsiTypeElement typeElement2 = typeCastExpression2.getCastType();
if (!typeElementsAreEquivalent(typeElement1, typeElement2).isExactMatch()) {
return EXACT_MISMATCH;
}
final PsiExpression operand1 = typeCastExpression1.getOperand();
final PsiExpression operand2 = typeCastExpression2.getOperand();
if (operand1 instanceof PsiFunctionalExpression || operand2 instanceof PsiFunctionalExpression) {
return EXACT_MISMATCH;
}
return expressionsMatch(operand1, operand2).partialIfExactMismatch(operand1, operand2);
}
protected Match arrayAccessExpressionsMatch(@NotNull PsiArrayAccessExpression arrayAccessExpression1, @NotNull PsiArrayAccessExpression arrayAccessExpression2) {
final PsiExpression arrayExpression2 =
arrayAccessExpression1.getArrayExpression();
final PsiExpression arrayExpression1 =
arrayAccessExpression2.getArrayExpression();
final PsiExpression indexExpression2 =
arrayAccessExpression1.getIndexExpression();
final PsiExpression indexExpression1 =
arrayAccessExpression2.getIndexExpression();
final Match arrayExpressionEq = expressionsMatch(arrayExpression2, arrayExpression1);
if (arrayExpressionEq != EXACT_MATCH) {
return EXACT_MISMATCH;
}
return expressionsMatch(indexExpression1, indexExpression2).partialIfExactMismatch(indexExpression1, indexExpression2);
}
protected Match unaryExpressionsMatch(@NotNull PsiUnaryExpression unaryExpression1, @NotNull PsiUnaryExpression unaryExpression2) {
final IElementType tokenType1 = unaryExpression1.getOperationTokenType();
if (!tokenType1.equals(unaryExpression2.getOperationTokenType())) {
return EXACT_MISMATCH;
}
final PsiExpression operand1 = unaryExpression1.getOperand();
final PsiExpression operand2 = unaryExpression2.getOperand();
return expressionsMatch(operand1, operand2);
}
protected Match polyadicExpressionsMatch(@NotNull PsiPolyadicExpression polyadicExpression1,
@NotNull PsiPolyadicExpression polyadicExpression2) {
if (!polyadicExpression1.getOperationTokenType().equals(polyadicExpression2.getOperationTokenType())) {
return EXACT_MISMATCH;
}
return expressionsAreEquivalent(polyadicExpression1.getOperands(), polyadicExpression2.getOperands(), false);
}
protected Match binaryExpressionsMatch(@NotNull PsiBinaryExpression binaryExpression1, @NotNull PsiBinaryExpression binaryExpression2) {
final IElementType tokenType1 = binaryExpression1.getOperationTokenType();
final IElementType tokenType2 = binaryExpression2.getOperationTokenType();
final PsiExpression left1 = binaryExpression1.getLOperand();
final PsiExpression left2 = binaryExpression2.getLOperand();
final PsiExpression right1 = binaryExpression1.getROperand();
final PsiExpression right2 = binaryExpression2.getROperand();
if (right1 == null || right2 == null) {
return Match.exact(right1 == right2);
}
if (!tokenType1.equals(tokenType2)) {
// process matches like "a < b" and "b > a"
final RelationType rel1 = RelationType.fromElementType(tokenType1);
final RelationType rel2 = RelationType.fromElementType(tokenType2);
if(rel1 != null && rel2 != null && rel1.getFlipped() == rel2) {
return expressionsAreEquivalent(new PsiExpression[] {left1, right1}, new PsiExpression[] {right2, left2}, false);
}
return EXACT_MISMATCH;
}
return expressionsAreEquivalent(new PsiExpression[] {left1, right1}, new PsiExpression[] {left2, right2},
ParenthesesUtils.isCommutativeOperation(binaryExpression1));
}
protected Match assignmentExpressionsMatch(@NotNull PsiAssignmentExpression assignmentExpression1, @NotNull PsiAssignmentExpression assignmentExpression2) {
final IElementType tokenType1 = assignmentExpression1.getOperationTokenType();
if (!tokenType1.equals(assignmentExpression2.getOperationTokenType())) {
return EXACT_MISMATCH;
}
final PsiExpression lhs1 = assignmentExpression1.getLExpression();
final PsiExpression lhs2 = assignmentExpression2.getLExpression();
final PsiExpression rhs1 = assignmentExpression1.getRExpression();
final PsiExpression rhs2 = assignmentExpression2.getRExpression();
final Match leftEq = expressionsMatch(lhs1, lhs2);
final Match rightEq = expressionsMatch(rhs1, rhs2);
return getComplexElementDecision(leftEq, rightEq, lhs1, lhs2, rhs1, rhs2);
}
protected Match conditionalExpressionsMatch(@NotNull PsiConditionalExpression conditionalExpression1, @NotNull PsiConditionalExpression conditionalExpression2) {
final PsiExpression condition1 = conditionalExpression1.getCondition();
final PsiExpression condition2 = conditionalExpression2.getCondition();
final PsiExpression thenExpression1 = conditionalExpression1.getThenExpression();
final PsiExpression thenExpression2 = conditionalExpression2.getThenExpression();
final PsiExpression elseExpression1 = conditionalExpression1.getElseExpression();
final PsiExpression elseExpression2 = conditionalExpression2.getElseExpression();
if (expressionsMatch(condition1, condition2) == EXACT_MATCH &&
expressionsMatch(thenExpression1, thenExpression2) == EXACT_MATCH &&
expressionsMatch(elseExpression1, elseExpression2) == EXACT_MATCH) {
return EXACT_MATCH;
}
return EXACT_MISMATCH;
}
protected Match expressionsAreEquivalent(PsiExpression @Nullable [] expressions1, PsiExpression @Nullable [] expressions2, boolean inAnyOrder) {
if (expressions1 == null || expressions2 == null) {
return Match.exact(expressions1 == expressions2);
}
if (expressions1.length != expressions2.length) {
return EXACT_MISMATCH;
}
if (inAnyOrder) {
Arrays.sort(expressions1, EXPRESSION_COMPARATOR);
Arrays.sort(expressions2, EXPRESSION_COMPARATOR);
}
Match incompleteMatch = null;
for (int i = 0; i < expressions1.length; i++) {
final Match match = expressionsMatch(expressions1[i], expressions2[i]);
if (incompleteMatch == null && match.isPartialMatch()) {
incompleteMatch = match;
}
else if (!match.isExactMatch()) {
if (incompleteMatch != null) {
return EXACT_MISMATCH;
}
incompleteMatch = match.partialIfExactMismatch(expressions1[i], expressions2[i]);
}
}
return incompleteMatch == null ? EXACT_MATCH : incompleteMatch;
}
@NotNull
private static Match getComplexElementDecision(Match equivalence1,
Match equivalence2,
PsiElement left1,
PsiElement right1,
PsiElement left2,
PsiElement right2) {
if (equivalence2 == EXACT_MATCH) {
if (equivalence1 == EXACT_MATCH) {
return EXACT_MATCH;
}
else if (equivalence1 == EXACT_MISMATCH) {
return new Match(left1, right1);
}
}
else if (equivalence2 == EXACT_MISMATCH) {
if (equivalence1 == EXACT_MISMATCH) {
return EXACT_MISMATCH;
}
else if (equivalence1 == EXACT_MATCH) {
return new Match(left2, right2);
}
}
return EXACT_MISMATCH;
}
private static boolean modifierListsAreEquivalent(PsiModifierList modifierList1, PsiModifierList modifierList2) {
for (String modifier : PsiModifier.MODIFIERS) {
if (PsiModifier.FINAL.equals(modifier)) continue; // final does not change the semantics of the code.
if (modifierList1.hasModifierProperty(modifier) != modifierList2.hasModifierProperty(modifier)) {
return false;
}
}
return AnnotationUtil.equal(modifierList1.getAnnotations(), modifierList2.getAnnotations());
}
protected void markDeclarationsAsEquivalent(PsiElement element1, PsiElement element2) {}
protected boolean equivalentDeclarations(PsiElement element1, PsiElement element2) {
return false;
}
}
|
|
/*
* Copyright (c) 2013, jEVETools
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the author nor the names of the contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package com.jevetools.unmarshal.python.api.impl.test; //NOPMD
import static com.jevetools.unmarshal.python.api.impl.test.OrderingComparison.comparesEqualTo;
import static com.jevetools.unmarshal.python.api.impl.test.OrderingComparison.greaterThan;
import static com.jevetools.unmarshal.python.api.impl.test.OrderingComparison.lessThan;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.junit.Assert.assertThat;
import org.junit.Test;
import com.jevetools.unmarshal.python.api.PyBase;
import com.jevetools.unmarshal.python.api.PyBool;
import com.jevetools.unmarshal.python.api.PyFactory;
import com.jevetools.unmarshal.python.api.PyNone;
import com.jevetools.unmarshal.python.api.PyVisitor;
/**
* Copyright (c) 2013, jEVETools.
*
* All rights reserved.
*
* @version 0.0.1
* @since 0.0.1
*/
public final class PyBoolTest
extends AbstractPyBaseTest
{
/**
* Test method for {@link PyBool#value()}.
*/
@Test
public void testDefault()
{
final PyFactory.PyBoolBuilder builder = getPyFactory().getPyBoolBuilder();
final PyBool testSubject = builder.build();
assertThat(testSubject, not(nullValue()));
assertThat(testSubject.value(), equalTo(false));
assertThat(testSubject, equalTo(getPyFactory().getPyBoolFalse()));
assertThat(testSubject.toString(), equalTo(Boolean.toString(false)));
}
/**
* Test method for {@link PyBool#value()}.
*/
@Test
public void testValueOne()
{
final PyFactory.PyBoolBuilder builder = getPyFactory().getPyBoolBuilder();
builder.value(true);
final PyBool testSubject = builder.build();
assertThat(testSubject, not(nullValue()));
assertThat(testSubject.value(), equalTo(true));
assertThat(testSubject, equalTo(getPyFactory().getPyBoolTrue()));
assertThat(testSubject.toString(), equalTo(Boolean.toString(true)));
}
/**
* Test method for {@link PyBool#value()}.
*/
@Test
public void testValueTwo()
{
final PyFactory.PyBoolBuilder builder = getPyFactory().getPyBoolBuilder();
builder.value(false);
final PyBool testSubject = builder.build();
assertThat(testSubject, not(nullValue()));
assertThat(testSubject.value(), equalTo(false));
assertThat(testSubject.toString(), equalTo(Boolean.toString(false)));
}
/**
* Test method for {@link PyBool#hashCode()}.
*/
@Test
public void testHashCode()
{
final PyFactory.PyBoolBuilder builder = getPyFactory().getPyBoolBuilder();
builder.value(false);
final PyBool testSubject11 = builder.build();
final PyBool testSubject12 = builder.build();
builder.value(true);
final PyBool testSubject21 = builder.build();
final PyBool testSubject22 = builder.build();
assertThat(testSubject11.hashCode(), equalTo(testSubject12.hashCode()));
assertThat(testSubject21.hashCode(), equalTo(testSubject22.hashCode()));
assertThat(testSubject11.hashCode(), not(equalTo(testSubject22.hashCode())));
}
/**
* Test method for {@link PyBool#getPyType()}.
*/
@Test
public void testGetPyType()
{
final PyFactory.PyBoolBuilder builder = getPyFactory().getPyBoolBuilder();
final PyBool testSubject = builder.build();
assertThat(testSubject, not(nullValue()));
assertThat(testSubject.isBool(), not(false));
assertThat(testSubject.getPyType(), equalTo(PyBase.PyType.BOOL));
assertThat(testSubject.asBool(), not(nullValue()));
}
/**
* Test method for {@link PyBool#equals()}.
*/
@Test
public void testEquals()
{
final PyFactory.PyBoolBuilder builder = getPyFactory().getPyBoolBuilder();
final PyFactory.PyNoneBuilder builderNone = getPyFactory()
.getPyNoneBuilder();
builder.value(false);
final PyNone none = builderNone.build();
final PyBool testSubject11 = builder.build();
final PyBool testSubject12 = builder.build();
builder.value(true);
final PyBool testSubject21 = builder.build();
final PyBool testSubject22 = builder.build();
assertThat(testSubject11, equalTo(testSubject12));
assertThat(testSubject21, equalTo(testSubject22));
assertThat(testSubject11, not(equalTo(testSubject22)));
assertThat(((PyBase) testSubject11), not(equalTo(((PyBase) none))));
}
/**
* Test method for {@link PyBool#accept(PyVisitor)}.
*/
@Test
public void testAccept()
{
final PyFactory.PyBoolBuilder builder = getPyFactory().getPyBoolBuilder();
final PyBool testSubject = builder.build();
assertThat(testSubject, not(nullValue()));
final PyVisitor visitor = new TestVisitor(testSubject);
testSubject.accept(visitor);
}
/**
* Test method for {@link PyBool#compareTo(PyBase)}.
*/
@Test
public void testCompareTo()
{
final PyFactory.PyBoolBuilder builder = getPyFactory().getPyBoolBuilder();
final PyFactory.PyNoneBuilder builderNone = getPyFactory()
.getPyNoneBuilder();
builder.value(false);
final PyNone none = builderNone.build();
final PyBool testSubject11 = builder.build();
final PyBool testSubject12 = builder.build();
builder.value(true);
final PyBool testSubject21 = builder.build();
final PyBool testSubject22 = builder.build();
assertThat(((PyBase) testSubject11),
comparesEqualTo((PyBase) testSubject12));
assertThat(((PyBase) testSubject21),
comparesEqualTo((PyBase) testSubject22));
assertThat(((PyBase) testSubject11), lessThan((PyBase) testSubject21));
assertThat(((PyBase) testSubject21), greaterThan((PyBase) testSubject11));
assertThat(((PyBase) testSubject11), not(comparesEqualTo(((PyBase) none))));
assertThat(((PyBase) testSubject11), not(comparesEqualTo(((PyBase) null))));
}
}
|
|
/*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.integrationtests;
import org.drools.compiler.compiler.DroolsError;
import org.drools.core.ClockType;
import org.drools.core.SessionConfiguration;
import org.drools.core.common.InternalWorkingMemory;
import org.drools.core.definitions.InternalKnowledgePackage;
import org.drools.core.time.SessionPseudoClock;
import org.jbpm.integrationtests.test.Message;
import org.jbpm.process.instance.InternalProcessRuntime;
import org.jbpm.process.instance.ProcessInstance;
import org.jbpm.process.instance.impl.demo.DoNothingWorkItemHandler;
import org.jbpm.test.util.AbstractBaseTest;
import org.junit.Test;
import org.kie.api.runtime.conf.ClockTypeOption;
import org.kie.internal.KnowledgeBase;
import org.kie.internal.KnowledgeBaseFactory;
import org.kie.internal.runtime.StatefulKnowledgeSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Reader;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class ProcessTimerTest extends AbstractBaseTest {
private static final Logger logger = LoggerFactory.getLogger(ProcessTimerTest.class);
@Test
public void testSimpleProcess() throws Exception {
Reader source = new StringReader(
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<process xmlns=\"http://drools.org/drools-5.0/process\"\n" +
" xmlns:xs=\"http://www.w3.org/2001/XMLSchema-instance\"\n" +
" xs:schemaLocation=\"http://drools.org/drools-5.0/process drools-processes-5.0.xsd\"\n" +
" type=\"RuleFlow\" name=\"flow\" id=\"org.drools.timer\" package-name=\"org.jbpm\" version=\"1\" >\n" +
"\n" +
" <header>\n" +
" <imports>\n" +
" <import name=\"org.jbpm.integrationtests.test.Message\" />\n" +
" </imports>\n" +
" <globals>\n" +
" <global identifier=\"myList\" type=\"java.util.List\" />\n" +
" </globals>\n" +
" </header>\n" +
"\n" +
" <nodes>\n" +
" <start id=\"1\" name=\"Start\" />\n" +
" <end id=\"2\" name=\"End\" />\n" +
" <timerNode id=\"3\" name=\"Timer\" delay=\"800ms\" period=\"200ms\" />\n" +
" <actionNode id=\"4\" name=\"Action\" >\n" +
" <action type=\"expression\" dialect=\"java\" >System.out.println(\"Triggered\");\n" +
"myList.add( new Message() );\n" +
"insert( new Message() );\n" +
"</action>\n" +
" </actionNode>\n" +
" <milestone id=\"5\" name=\"Wait\" >\n" +
" <constraint type=\"rule\" dialect=\"mvel\" >Number( intValue >= 5 ) from accumulate ( m: Message( ), count( m ) )</constraint>\n" +
" </milestone>\n" +
" </nodes>\n" +
"\n" +
" <connections>\n" +
" <connection from=\"5\" to=\"2\" />\n" +
" <connection from=\"1\" to=\"3\" />\n" +
" <connection from=\"3\" to=\"4\" />\n" +
" <connection from=\"4\" to=\"5\" />\n" +
" </connections>\n" +
"\n" +
"</process>");
builder.addRuleFlow(source);
if (!builder.getErrors().isEmpty()) {
for (DroolsError error: builder.getErrors().getErrors()) {
logger.error(error.toString());
}
fail("Could not build process");
}
StatefulKnowledgeSession session = createKieSession(builder.getPackage());
List<Message> myList = new ArrayList<Message>();
session.setGlobal("myList", myList);
ProcessInstance processInstance = ( ProcessInstance )
session.startProcess("org.drools.timer");
assertEquals(0, myList.size());
assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState());
assertEquals(1, ((InternalProcessRuntime) ((InternalWorkingMemory) session).getProcessRuntime()).getTimerManager().getTimers().size());
// test that the delay works
try {
Thread.sleep(400);
} catch (InterruptedException e) {
// do nothing
}
assertEquals(0, myList.size());
// test that the period works
try {
Thread.sleep(1300);
} catch (InterruptedException e) {
// do nothing
}
assertEquals(5, myList.size());
try {
Thread.sleep(200);
} catch (InterruptedException e) {
// do nothing
}
assertEquals(ProcessInstance.STATE_COMPLETED, processInstance.getState());
session.dispose();
}
@Test
public void testVariableSimpleProcess() throws Exception {
Reader source = new StringReader(
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<process xmlns=\"http://drools.org/drools-5.0/process\"\n" +
" xmlns:xs=\"http://www.w3.org/2001/XMLSchema-instance\"\n" +
" xs:schemaLocation=\"http://drools.org/drools-5.0/process drools-processes-5.0.xsd\"\n" +
" type=\"RuleFlow\" name=\"flow\" id=\"org.drools.timer\" package-name=\"org.jbpm\" version=\"1\" >\n" +
"\n" +
" <header>\n" +
" <imports>\n" +
" <import name=\"org.jbpm.integrationtests.test.Message\" />\n" +
" </imports>\n" +
" <globals>\n" +
" <global identifier=\"myList\" type=\"java.util.List\" />\n" +
" </globals>\n" +
" <variables>\n" +
" <variable name=\"x\" >\n" +
" <type name=\"org.drools.core.process.core.datatype.impl.type.IntegerDataType\" />\n" +
" </variable>\n" +
" <variable name=\"y\" >\n" +
" <type name=\"org.drools.core.process.core.datatype.impl.type.IntegerDataType\" />\n" +
" </variable>\n" +
" </variables>\n" +
" </header>\n" +
"\n" +
" <nodes>\n" +
" <start id=\"1\" name=\"Start\" />\n" +
" <end id=\"2\" name=\"End\" />\n" +
" <timerNode id=\"3\" name=\"Timer\" delay=\"#{x}ms\" period=\"#{y}ms\" />\n" +
" <actionNode id=\"4\" name=\"Action\" >\n" +
" <action type=\"expression\" dialect=\"java\" >System.out.println(\"Triggered\");\n" +
"myList.add( new Message() );\n" +
"insert( new Message() );\n" +
"</action>\n" +
" </actionNode>\n" +
" <milestone id=\"5\" name=\"Wait\" >\n" +
" <constraint type=\"rule\" dialect=\"mvel\" >Number( intValue >= 5 ) from accumulate ( m: Message( ), count( m ) )</constraint>\n" +
" </milestone>\n" +
" </nodes>\n" +
"\n" +
" <connections>\n" +
" <connection from=\"5\" to=\"2\" />\n" +
" <connection from=\"1\" to=\"3\" />\n" +
" <connection from=\"3\" to=\"4\" />\n" +
" <connection from=\"4\" to=\"5\" />\n" +
" </connections>\n" +
"\n" +
"</process>");
builder.addRuleFlow(source);
if (!builder.getErrors().isEmpty()) {
for (DroolsError error: builder.getErrors().getErrors()) {
logger.error(error.toString());
}
fail("Could not build process");
}
StatefulKnowledgeSession session = createKieSession(builder.getPackage());
List<Message> myList = new ArrayList<Message>();
session.setGlobal("myList", myList);
Map<String, Object> params = new HashMap<String, Object>();
params.put("x", 800);
params.put("y", 200);
ProcessInstance processInstance = ( ProcessInstance )
session.startProcess("org.drools.timer", params);
assertEquals(0, myList.size());
assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState());
assertEquals(1, ((InternalProcessRuntime) ((InternalWorkingMemory) session).getProcessRuntime()).getTimerManager().getTimers().size());
// test that the delay works
try {
Thread.sleep(400);
} catch (InterruptedException e) {
// do nothing
}
assertEquals(0, myList.size());
// test that the period works
try {
Thread.sleep(1300);
} catch (InterruptedException e) {
// do nothing
}
assertEquals(5, myList.size());
try {
Thread.sleep(200);
} catch (InterruptedException e) {
// do nothing
}
assertEquals(ProcessInstance.STATE_COMPLETED, processInstance.getState());
session.dispose();
}
@Test
public void testIncorrectTimerNode() throws Exception {
Reader source = new StringReader(
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<process xmlns=\"http://drools.org/drools-5.0/process\"\n" +
" xmlns:xs=\"http://www.w3.org/2001/XMLSchema-instance\"\n" +
" xs:schemaLocation=\"http://drools.org/drools-5.0/process drools-processes-5.0.xsd\"\n" +
" type=\"RuleFlow\" name=\"flow\" id=\"org.drools.timer\" package-name=\"org.drools\" version=\"1\" >\n" +
"\n" +
" <header>\n" +
" </header>\n" +
"\n" +
" <nodes>\n" +
" <start id=\"1\" name=\"Start\" />\n" +
" <end id=\"2\" name=\"End\" />\n" +
" <timerNode id=\"3\" name=\"Timer\" delay=\"800msdss\" period=\"200mssds\" />\n" +
" </nodes>\n" +
"\n" +
" <connections>\n" +
" <connection from=\"1\" to=\"3\" />\n" +
" <connection from=\"3\" to=\"2\" />\n" +
" </connections>\n" +
"\n" +
"</process>");
builder.addRuleFlow(source);
assertEquals(2, builder.getErrors().size());
for (DroolsError error: builder.getErrors().getErrors()) {
logger.error(error.toString());
}
}
@Test
public void testOnEntryTimerExecuted() throws Exception {
Reader source = new StringReader(
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<process xmlns=\"http://drools.org/drools-5.0/process\"\n" +
" xmlns:xs=\"http://www.w3.org/2001/XMLSchema-instance\"\n" +
" xs:schemaLocation=\"http://drools.org/drools-5.0/process drools-processes-5.0.xsd\"\n" +
" type=\"RuleFlow\" name=\"flow\" id=\"org.drools.timer\" package-name=\"org.drools\" version=\"1\" >\n" +
"\n" +
" <header>\n" +
" <globals>\n" +
" <global identifier=\"myList\" type=\"java.util.List\" />\n" +
" </globals>\n" +
" </header>\n" +
"\n" +
" <nodes>\n" +
" <start id=\"1\" name=\"Start\" />\n" +
" <milestone id=\"2\" name=\"Wait\" >\n" +
" <timers>\n" +
" <timer id=\"1\" delay=\"300\" >\n" +
" <action type=\"expression\" dialect=\"java\" >myList.add(\"Executing timer\");</action>\n" +
" </timer>\n" +
" </timers>\n" +
" <constraint type=\"rule\" dialect=\"mvel\" >eval(false)</constraint>\n" +
" </milestone>\n" +
" <end id=\"3\" name=\"End\" />\n" +
" </nodes>\n" +
"\n" +
" <connections>\n" +
" <connection from=\"1\" to=\"2\" />\n" +
" <connection from=\"2\" to=\"3\" />\n" +
" </connections>\n" +
"\n" +
"</process>");
builder.addRuleFlow(source);
StatefulKnowledgeSession session = createKieSession(builder.getPackage());
List<String> myList = new ArrayList<String>();
session.setGlobal("myList", myList);
ProcessInstance processInstance = ( ProcessInstance )
session.startProcess("org.drools.timer");
assertEquals(0, myList.size());
assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState());
assertEquals(1, ((InternalProcessRuntime) ((InternalWorkingMemory) session).getProcessRuntime()).getTimerManager().getTimers().size());
try {
Thread.sleep(400);
} catch (InterruptedException e) {
// do nothing
}
assertEquals(1, myList.size());
session.dispose();
}
@Test
public void testOnEntryTimerVariableExecuted() throws Exception {
Reader source = new StringReader(
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<process xmlns=\"http://drools.org/drools-5.0/process\"\n" +
" xmlns:xs=\"http://www.w3.org/2001/XMLSchema-instance\"\n" +
" xs:schemaLocation=\"http://drools.org/drools-5.0/process drools-processes-5.0.xsd\"\n" +
" type=\"RuleFlow\" name=\"flow\" id=\"org.drools.timer\" package-name=\"org.drools\" version=\"1\" >\n" +
"\n" +
" <header>\n" +
" <globals>\n" +
" <global identifier=\"myList\" type=\"java.util.List\" />\n" +
" </globals>\n" +
" <variables>\n" +
" <variable name=\"x\" >\n" +
" <type name=\"org.drools.core.process.core.datatype.impl.type.IntegerDataType\" />\n" +
" </variable>\n" +
" </variables>\n" +
" </header>\n" +
"\n" +
" <nodes>\n" +
" <start id=\"1\" name=\"Start\" />\n" +
" <milestone id=\"2\" name=\"Wait\" >\n" +
" <timers>\n" +
" <timer id=\"1\" delay=\"#{x}\" >\n" +
" <action type=\"expression\" dialect=\"java\" >myList.add(\"Executing timer\");</action>\n" +
" </timer>\n" +
" </timers>\n" +
" <constraint type=\"rule\" dialect=\"mvel\" >eval(false)</constraint>\n" +
" </milestone>\n" +
" <end id=\"3\" name=\"End\" />\n" +
" </nodes>\n" +
"\n" +
" <connections>\n" +
" <connection from=\"1\" to=\"2\" />\n" +
" <connection from=\"2\" to=\"3\" />\n" +
" </connections>\n" +
"\n" +
"</process>");
builder.addRuleFlow(source);
final StatefulKnowledgeSession session = createKieSession(builder.getPackage());
List<String> myList = new ArrayList<String>();
session.setGlobal("myList", myList);
new Thread(new Runnable() {
public void run() {
session.fireUntilHalt();
}
}).start();
Map<String, Object> params = new HashMap<String, Object>();
params.put("x", 300);
ProcessInstance processInstance = ( ProcessInstance )
session.startProcess("org.drools.timer", params);
assertEquals(0, myList.size());
assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState());
assertEquals(1, ((InternalProcessRuntime) ((InternalWorkingMemory) session).getProcessRuntime()).getTimerManager().getTimers().size());
try {
Thread.sleep(400);
} catch (InterruptedException e) {
// do nothing
}
assertEquals(1, myList.size());
session.dispose();
}
@Test
public void testOnEntryTimerWorkItemExecuted() throws Exception {
Reader source = new StringReader(
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<process xmlns=\"http://drools.org/drools-5.0/process\"\n" +
" xmlns:xs=\"http://www.w3.org/2001/XMLSchema-instance\"\n" +
" xs:schemaLocation=\"http://drools.org/drools-5.0/process drools-processes-5.0.xsd\"\n" +
" type=\"RuleFlow\" name=\"flow\" id=\"org.drools.timer\" package-name=\"org.drools\" version=\"1\" >\n" +
"\n" +
" <header>\n" +
" <globals>\n" +
" <global identifier=\"myList\" type=\"java.util.List\" />\n" +
" </globals>\n" +
" </header>\n" +
"\n" +
" <nodes>\n" +
" <start id=\"1\" name=\"Start\" />\n" +
" <workItem id=\"2\" name=\"Work\" >\n" +
" <timers>\n" +
" <timer id=\"1\" delay=\"300\" >\n" +
" <action type=\"expression\" dialect=\"java\" >myList.add(\"Executing timer\");</action>\n" +
" </timer>\n" +
" </timers>\n" +
" <work name=\"Human Task\" >\n" +
" </work>\n" +
" </workItem>\n" +
" <end id=\"3\" name=\"End\" />\n" +
" </nodes>\n" +
"\n" +
" <connections>\n" +
" <connection from=\"1\" to=\"2\" />\n" +
" <connection from=\"2\" to=\"3\" />\n" +
" </connections>\n" +
"\n" +
"</process>");
builder.addRuleFlow(source);
StatefulKnowledgeSession session = createKieSession(builder.getPackage());
List<String> myList = new ArrayList<String>();
session.setGlobal("myList", myList);
session.getWorkItemManager().registerWorkItemHandler("Human Task", new DoNothingWorkItemHandler());
ProcessInstance processInstance = ( ProcessInstance )
session.startProcess("org.drools.timer");
assertEquals(0, myList.size());
assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState());
assertEquals(1, ((InternalProcessRuntime) ((InternalWorkingMemory) session).getProcessRuntime()).getTimerManager().getTimers().size());
try {
Thread.sleep(400);
} catch (InterruptedException e) {
// do nothing
}
assertEquals(1, myList.size());
session.dispose();
}
@Test
public void testIncorrectOnEntryTimer() throws Exception {
Reader source = new StringReader(
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<process xmlns=\"http://drools.org/drools-5.0/process\"\n" +
" xmlns:xs=\"http://www.w3.org/2001/XMLSchema-instance\"\n" +
" xs:schemaLocation=\"http://drools.org/drools-5.0/process drools-processes-5.0.xsd\"\n" +
" type=\"RuleFlow\" name=\"flow\" id=\"org.drools.timer\" package-name=\"org.drools\" version=\"1\" >\n" +
"\n" +
" <header>\n" +
" <globals>\n" +
" <global identifier=\"myList\" type=\"java.util.List\" />\n" +
" </globals>\n" +
" </header>\n" +
"\n" +
" <nodes>\n" +
" <start id=\"1\" name=\"Start\" />\n" +
" <milestone id=\"2\" name=\"Wait\" >\n" +
" <timers>\n" +
" <timer id=\"1\" delay=\"300asdf\" period=\"asfd\" >\n" +
" <action type=\"expression\" dialect=\"java\" >myList.add(\"Executing timer\");</action>\n" +
" </timer>\n" +
" </timers>\n" +
" <constraint type=\"rule\" dialect=\"mvel\" >eval(false)</constraint>\n" +
" </milestone>\n" +
" <end id=\"3\" name=\"End\" />\n" +
" </nodes>\n" +
"\n" +
" <connections>\n" +
" <connection from=\"1\" to=\"2\" />\n" +
" <connection from=\"2\" to=\"3\" />\n" +
" </connections>\n" +
"\n" +
"</process>");
builder.addRuleFlow(source);
assertEquals(2, builder.getErrors().size());
for (DroolsError error: builder.getErrors().getErrors()) {
logger.error(error.toString());
}
}
@Test
public void testOnEntryTimerExecutedMultipleTimes() throws Exception {
Reader source = new StringReader(
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<process xmlns=\"http://drools.org/drools-5.0/process\"\n" +
" xmlns:xs=\"http://www.w3.org/2001/XMLSchema-instance\"\n" +
" xs:schemaLocation=\"http://drools.org/drools-5.0/process drools-processes-5.0.xsd\"\n" +
" type=\"RuleFlow\" name=\"flow\" id=\"org.drools.timer\" package-name=\"org.drools\" version=\"1\" >\n" +
"\n" +
" <header>\n" +
" <globals>\n" +
" <global identifier=\"myList\" type=\"java.util.List\" />\n" +
" </globals>\n" +
" </header>\n" +
"\n" +
" <nodes>\n" +
" <start id=\"1\" name=\"Start\" />\n" +
" <milestone id=\"2\" name=\"Wait\" >\n" +
" <timers>\n" +
" <timer id=\"1\" delay=\"300\" period =\"200\" >\n" +
" <action type=\"expression\" dialect=\"java\" >myList.add(\"Executing timer\");</action>\n" +
" </timer>\n" +
" </timers>\n" +
" <constraint type=\"rule\" dialect=\"mvel\" >eval(false)</constraint>\n" +
" </milestone>\n" +
" <end id=\"3\" name=\"End\" />\n" +
" </nodes>\n" +
"\n" +
" <connections>\n" +
" <connection from=\"1\" to=\"2\" />\n" +
" <connection from=\"2\" to=\"3\" />\n" +
" </connections>\n" +
"\n" +
"</process>");
builder.addRuleFlow(source);
StatefulKnowledgeSession session = createKieSession(builder.getPackage());
List<String> myList = new ArrayList<String>();
session.setGlobal("myList", myList);
ProcessInstance processInstance = ( ProcessInstance )
session.startProcess("org.drools.timer");
assertEquals(0, myList.size());
assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState());
assertEquals(1, ((InternalProcessRuntime) ((InternalWorkingMemory) session).getProcessRuntime()).getTimerManager().getTimers().size());
session.halt();
try {
Thread.sleep(600);
} catch (InterruptedException e) {
// do nothing
}
assertEquals(2, myList.size());
session.dispose();
}
@Test
public void testMultipleTimers() throws Exception {
Reader source = new StringReader(
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<process xmlns=\"http://drools.org/drools-5.0/process\"\n" +
" xmlns:xs=\"http://www.w3.org/2001/XMLSchema-instance\"\n" +
" xs:schemaLocation=\"http://drools.org/drools-5.0/process drools-processes-5.0.xsd\"\n" +
" type=\"RuleFlow\" name=\"flow\" id=\"org.drools.timer\" package-name=\"org.drools\" version=\"1\" >\n" +
"\n" +
" <header>\n" +
" <globals>\n" +
" <global identifier=\"myList\" type=\"java.util.List\" />\n" +
" </globals>\n" +
" </header>\n" +
"\n" +
" <nodes>\n" +
" <start id=\"1\" name=\"Start\" />\n" +
" <milestone id=\"2\" name=\"Wait\" >\n" +
" <timers>\n" +
" <timer id=\"1\" delay=\"600\" >\n" +
" <action type=\"expression\" dialect=\"java\" >myList.add(\"Executing timer1\");</action>\n" +
" </timer>\n" +
" <timer id=\"2\" delay=\"200\" >\n" +
" <action type=\"expression\" dialect=\"java\" >myList.add(\"Executing timer2\");</action>\n" +
" </timer>\n" +
" </timers>\n" +
" <constraint type=\"rule\" dialect=\"mvel\" >eval(false)</constraint>\n" +
" </milestone>\n" +
" <end id=\"3\" name=\"End\" />\n" +
" </nodes>\n" +
"\n" +
" <connections>\n" +
" <connection from=\"1\" to=\"2\" />\n" +
" <connection from=\"2\" to=\"3\" />\n" +
" </connections>\n" +
"\n" +
"</process>");
builder.addRuleFlow(source);
final StatefulKnowledgeSession session;
{
InternalKnowledgePackage pkg = builder.getPackage();
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
kbase.addKnowledgePackages((Collection) Arrays.asList(builder.getPackages()));
SessionConfiguration conf = SessionConfiguration.newInstance();
conf.setOption( ClockTypeOption.get( ClockType.PSEUDO_CLOCK.getId() ) );
session = kbase.newStatefulKnowledgeSession(conf, null);
}
SessionPseudoClock clock = ( SessionPseudoClock) session.getSessionClock();
clock.advanceTime( 300,
TimeUnit.MILLISECONDS );
List<String> myList = new ArrayList<String>();
session.setGlobal("myList", myList);
ProcessInstance processInstance = ( ProcessInstance ) session.startProcess("org.drools.timer");
assertEquals(0, myList.size());
assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState());
assertEquals(2, ((InternalProcessRuntime) ((InternalWorkingMemory) session).getProcessRuntime()).getTimerManager().getTimers().size());
clock = ( SessionPseudoClock) session.getSessionClock();
clock.advanceTime( 500,
TimeUnit.MILLISECONDS );
assertEquals(1, myList.size());
assertEquals("Executing timer2", myList.get(0));
clock.advanceTime( 500,
TimeUnit.MILLISECONDS );
assertEquals(2, myList.size());
session.dispose();
}
@Test
public void testOnEntryTimerCancelled() throws Exception {
Reader source = new StringReader(
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<process xmlns=\"http://drools.org/drools-5.0/process\"\n" +
" xmlns:xs=\"http://www.w3.org/2001/XMLSchema-instance\"\n" +
" xs:schemaLocation=\"http://drools.org/drools-5.0/process drools-processes-5.0.xsd\"\n" +
" type=\"RuleFlow\" name=\"flow\" id=\"org.drools.timer\" package-name=\"org.drools\" version=\"1\" >\n" +
"\n" +
" <header>\n" +
" <globals>\n" +
" <global identifier=\"myList\" type=\"java.util.List\" />\n" +
" </globals>\n" +
" </header>\n" +
"\n" +
" <nodes>\n" +
" <start id=\"1\" name=\"Start\" />\n" +
" <milestone id=\"2\" name=\"Wait\" >\n" +
" <timers>\n" +
" <timer id=\"1\" delay=\"2000\" >\n" +
" <action type=\"expression\" dialect=\"java\" >myList.add(\"Executing timer\");</action>\n" +
" </timer>\n" +
" </timers>\n" +
" <constraint type=\"rule\" dialect=\"mvel\" >org.jbpm.integrationtests.test.Message( )</constraint>\n" +
" </milestone>\n" +
" <end id=\"3\" name=\"End\" />\n" +
" </nodes>\n" +
"\n" +
" <connections>\n" +
" <connection from=\"1\" to=\"2\" />\n" +
" <connection from=\"2\" to=\"3\" />\n" +
" </connections>\n" +
"\n" +
"</process>");
builder.addRuleFlow(source);
StatefulKnowledgeSession session = createKieSession(builder.getPackage());
List<String> myList = new ArrayList<String>();
session.setGlobal("myList", myList);
ProcessInstance processInstance = ( ProcessInstance )
session.startProcess("org.drools.timer");
assertEquals(0, myList.size());
assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState());
assertEquals(1, ((InternalProcessRuntime) ((InternalWorkingMemory) session).getProcessRuntime()).getTimerManager().getTimers().size());
session.insert(new Message());
session.fireAllRules();
assertEquals(0, myList.size());
assertEquals(0, ((InternalProcessRuntime) ((InternalWorkingMemory) session).getProcessRuntime()).getTimerManager().getTimers().size());
session.dispose();
}
}
|
|
/* Copyright (c) 1995-2000, The Hypersonic SQL Group.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the Hypersonic SQL Group nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE HYPERSONIC SQL GROUP,
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* This software consists of voluntary contributions made by many individuals
* on behalf of the Hypersonic SQL Group.
*
*
* For work added by the HSQL Development Group:
*
* Copyright (c) 2001-2008, The HSQL Development Group
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the HSQL Development Group nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG,
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hsqldb;
import org.hsqldb.lib.IntLookup;
import org.hsqldb.lib.java.JavaSystem;
import org.hsqldb.persist.CachedObject;
import org.hsqldb.rowio.RowOutputInterface;
// fredt@users 20020221 - patch 513005 by sqlbob@users (RMP)
// fredt@users 20020920 - patch 1.7.1 - refactoring to cut mamory footprint
// fredt@users 20021215 - doc 1.7.2 - javadoc comments
/**
* Base class for a database row object implementing rows for
* memory resident tables.<p>
*
* Subclass CachedRow implements rows for CACHED and TEXT tables
*
* @author Thomas Mueller (Hypersonic SQL Group)
* @version 1.8.0
* @since Hypersonic SQL
*/
public class Row implements CachedObject {
int tableId;
int iPos;
protected Object[] oData;
protected Node nPrimaryNode;
/**
* Default constructor used only in subclasses.
*/
protected Row() {}
/**
* Constructor for MEMORY table Row. The result is a Row with Nodes that
* are not yet linked with other Nodes in the AVL indexes.
*/
Row(Table t, Object[] o) throws HsqlException {
int index = t.getIndexCount();
nPrimaryNode = Node.newNode(this, 0, t);
Node n = nPrimaryNode;
for (int i = 1; i < index; i++) {
n.nNext = Node.newNode(this, i, t);
n = n.nNext;
}
tableId = t.getId();
oData = o;
}
/**
* Returns the Node for a given Index, using the ordinal position of the
* Index within the Table Object.
*/
Node getNode(int index) {
Node n = nPrimaryNode;
while (index-- > 0) {
n = n.nNext;
}
return n;
}
/**
* Returns the Node for the next Index on this database row, given the
* Node for any Index.
*/
Node getNextNode(Node n) {
if (n == null) {
n = nPrimaryNode;
} else {
n = n.nNext;
}
return n;
}
/**
* Returns the Row Object that currently represents the same database row.
* In current implementations of Row, this is always the same as the this
* Object for MEMORY tables, but could be a different Object for CachedRow
* or CachedDataRow implementation. For example the Row Object that
* represents a given database row can be freed from the Cache when other
* rows need to be loaded into the Cache. getUpdatedRow() returns a
* currently valid Row object that is in the Cache.
*/
Row getUpdatedRow() throws HsqlException {
return this;
}
/**
* Returns the array of fields in the database row.
*/
public Object[] getData() {
return oData;
}
/**
* Is used only when the database row is deleted, not when it is freed
* from the Cache.
*/
void delete() throws HsqlException {
JavaSystem.memoryRecords++;
nPrimaryNode = null;
}
void clearNodeLinks() {
Node last;
Node temp;
last = nPrimaryNode;
while (last.nNext != null) {
temp = last.nNext;
last.nNext = null;
last = temp;
}
nPrimaryNode = null;
}
boolean isCascadeDeleted() {
return nPrimaryNode == null;
}
public int getRealSize(RowOutputInterface out) {
return 0;
}
public void setStorageSize(int size) {
;
}
public int getStorageSize() {
return 0;
}
public long getId() {
return ((long) tableId << 32) + ((long) iPos);
}
public static long getId(Table table, int pos) {
return ((long) table.getId() << 32) + ((long) pos);
}
public int getPos() {
return iPos;
}
public void setPos(int pos) {
iPos = pos;
}
public boolean hasChanged() {
return false;
}
public boolean isKeepInMemory() {
return true;
}
public void keepInMemory(boolean keep) {}
public boolean isInMemory() {
return true;
}
public void setInMemory(boolean in) {}
public void write(RowOutputInterface out, boolean bFullSave) {}
public void write(RowOutputInterface out, IntLookup lookup) {}
/**
* Lifetime scope of this method depends on the operations performed on
* any cached tables since this row or the parameter were constructed.
* If only deletes or only inserts have been performed, this method
* remains valid. Otherwise it can return invalid results.
*
* @param obj row to compare
* @return boolean
*/
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj instanceof Row) {
return ((Row) obj).iPos == iPos;
}
return false;
}
/**
* Hash code is valid only until a modification to the cache
*
* @return file position of row
*/
public int hashCode() {
return iPos;
}
}
|
|
package com.croconaut.ratemebuddy.activities;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.ActivityInfo;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.content.res.Resources;
import android.graphics.Color;
import android.graphics.Typeface;
import android.graphics.drawable.ColorDrawable;
import android.net.Uri;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.annotation.NonNull;
import android.support.design.widget.NavigationView;
import android.support.v4.content.ContextCompat;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.Window;
import android.view.inputmethod.InputMethodManager;
import android.webkit.MimeTypeMap;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import android.widget.TextView;
import android.widget.Toast;
import com.bumptech.glide.Glide;
import com.bumptech.glide.signature.StringSignature;
import com.croconaut.cpt.data.Communication;
import com.croconaut.ratemebuddy.AppData;
import com.croconaut.ratemebuddy.ExternalUriContract;
import com.croconaut.ratemebuddy.R;
import com.croconaut.ratemebuddy.SharedFilesContract;
import com.croconaut.ratemebuddy.ui.views.transformation.CircleTransform;
import com.croconaut.ratemebuddy.utils.CommonUtils;
import com.croconaut.ratemebuddy.utils.ProfileUtils;
import com.croconaut.ratemebuddy.utils.ThemeManager;
import com.croconaut.ratemebuddy.utils.ThemeUtils;
import com.croconaut.ratemebuddy.utils.pojo.profiles.MyProfile;
import com.croconaut.tictactoe.ui.activities.MenuActivity;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.List;
import eu.inloop.viewmodel.base.ViewModelBaseEmptyActivity;
public class ToolbarDrawerActivity extends ViewModelBaseEmptyActivity implements CptProcessor {
// default color value for application (our case == green)
private static final String STATE_SELECTED_POSITION = "stateSelectedPosition";
private static final String TAG = ToolbarDrawerActivity.class.getName();
private static final int PENDING_INTENT_TXT_REQUEST_CODE = 55886;
protected DrawerLayout drawerLayout;
/**
* Fonts
*/
protected Typeface tLight;
protected Typeface tRegular;
protected Typeface tSemiBold;
protected Typeface tBold;
/**
* Utils.
*/
protected ThemeManager theme;
protected SharedPreferences prefs;
protected AppData appData;
protected Context mContext;
protected Resources mRes;
protected Toolbar toolbar;
protected ProfileUtils profileUtils;
protected CommonUtils commonUtils;
protected NavigationView navigationView;
private int mCurrentSelectedPosition;
@Override
protected void onDestroy() {
super.onDestroy();
Log.e(TAG, "On DESTROY CALLED");
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
initiliazeViewsAndResources();
this.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
initilizeAppBackground();
if (savedInstanceState != null) {
mCurrentSelectedPosition = savedInstanceState.getInt(STATE_SELECTED_POSITION);
}
if (getIntent().getExtras() != null) {
mCurrentSelectedPosition = getIntent().getExtras().getInt(STATE_SELECTED_POSITION);
}
setUpNavigationDrawer();
getWindow().getDecorView().setBackgroundColor(ContextCompat.getColor(mContext,R.color.material_white));
}
@Override
public void onBackPressed() {
if (drawerLayout != null && drawerLayout.isDrawerOpen(GravityCompat.START))
drawerLayout.closeDrawer(GravityCompat.START);
else {
super.onBackPressed();
overridePendingTransition(R.anim.fade_in, R.anim.fade_out);
}
}
@Override
protected void onResume() {
super.onResume();
initializeHeader();
}
@Override
protected void onPause() {
super.onPause();
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putInt(STATE_SELECTED_POSITION, mCurrentSelectedPosition);
}
@Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
mCurrentSelectedPosition = savedInstanceState.getInt(STATE_SELECTED_POSITION, 0);
if (navigationView != null) {
Menu menu = navigationView.getMenu();
if (mCurrentSelectedPosition != 0)
menu.getItem(mCurrentSelectedPosition - 1).setChecked(true);
}
}
protected void setUpNavigationDrawer() {
if (toolbar != null) {
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
toolbar.setNavigationIcon(R.drawable.ic_action_drawer);
drawerLayout = (DrawerLayout) findViewById(R.id.drawerLayout);
drawerLayout.setBackgroundColor(ContextCompat.getColor(mContext,R.color.material_white));
toolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
drawerLayout.openDrawer(GravityCompat.START);
}
});
ActionBarDrawerToggle actionBarDrawerToggle = new ActionBarDrawerToggle(this, drawerLayout, toolbar, R.string.app_name, R.string.app_name) {
@Override
public void onDrawerClosed(View drawerView) {
super.onDrawerClosed(drawerView);
InputMethodManager inputMethodManager = (InputMethodManager)
getSystemService(Context.INPUT_METHOD_SERVICE);
inputMethodManager.hideSoftInputFromWindow(getCurrentFocus().getWindowToken(), 0);
}
@Override
public void onDrawerOpened(View drawerView) {
super.onDrawerOpened(drawerView);
InputMethodManager inputMethodManager = (InputMethodManager)
getSystemService(Context.INPUT_METHOD_SERVICE);
inputMethodManager.hideSoftInputFromWindow(getCurrentFocus().getWindowToken(), 0);
}
};
drawerLayout.setDrawerListener(actionBarDrawerToggle);
}
}
@Override
public boolean process(Intent cptIntent) throws IOException, ClassNotFoundException {
initializeHeader();
return false;
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_MENU) {
if (drawerLayout == null)
return true;
if (!drawerLayout.isDrawerOpen(Gravity.LEFT)) {
drawerLayout.openDrawer(Gravity.LEFT);
} else if (drawerLayout.isDrawerOpen(Gravity.LEFT)) {
drawerLayout.closeDrawer(Gravity.LEFT);
}
return true;
}
return super.onKeyDown(keyCode, event);
}
protected void setUpNavigationView() {
navigationView = (NavigationView) findViewById(R.id.navigationView);
if (navigationView != null) {
navigationView.setNavigationItemSelectedListener(new NavigationView.OnNavigationItemSelectedListener() {
@Override
public boolean onNavigationItemSelected(MenuItem menuItem) {
Bundle bundle = new Bundle();
switch (menuItem.getItemId()) {
case R.id.timeline:
mCurrentSelectedPosition = 1;
bundle.putInt(STATE_SELECTED_POSITION, mCurrentSelectedPosition);
startActivity(new Intent(ToolbarDrawerActivity.this, TimelineActivity.class).putExtras(bundle));
return true;
case R.id.people:
mCurrentSelectedPosition = 2;
bundle.putInt(STATE_SELECTED_POSITION, mCurrentSelectedPosition);
startActivity(new Intent(mContext, PeopleParentActivityBB.class).putExtras(bundle));
return true;
case R.id.settings:
mCurrentSelectedPosition = 3;
bundle.putInt(STATE_SELECTED_POSITION, mCurrentSelectedPosition);
startActivity(new Intent(mContext, SettingsActivity.class).putExtras(bundle));
return true;
case R.id.share:
mCurrentSelectedPosition = 4;
bundle.putInt(STATE_SELECTED_POSITION, mCurrentSelectedPosition);
File apkFile = readApk();
if (apkFile == null) {
Toast.makeText(mContext, mRes.getString(R.string.toast_share_problem), Toast.LENGTH_LONG).show();
return true;
}
Intent sharingIntent = new Intent(android.content.Intent.ACTION_SEND);
sharingIntent.setType(MimeTypeMap.getSingleton().getMimeTypeFromExtension("apk"));
sharingIntent.setFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION); // needed because android:exported = false for our CP
//sharingIntent.setComponent(new ComponentName("com.android.bluetooth", "com.android.bluetooth.opp.BluetoothOppLauncherActivity"));
sharingIntent.putExtra(Intent.EXTRA_STREAM, Uri.withAppendedPath(SharedFilesContract.getRootUri(ToolbarDrawerActivity.this), apkFile.getPath()));
// ForResult is mandatory even if not used!
startActivityForResult(Intent.createChooser(sharingIntent, mRes.getString(R.string.action_share)), -1);
return true;
case R.id.inviteFriend:
mCurrentSelectedPosition = 5;
String name = MyProfile.getInstance(getApplicationContext()).getName();
String myProfileId = MyProfile.getInstance(getApplicationContext()).getProfileId();
Uri baseUri = ExternalUriContract.PROFILE_URI.buildUpon()
.appendQueryParameter(ExternalUriContract.PARAM_PROFILE_NAME, name)
.build();
String subject = String.format(
getResources().getString(R.string.invite_profile_subject),
getResources().getString(R.string.app_name),
name
);
String text = String.format(
getResources().getString(R.string.invite_profile_text),
getResources().getString(R.string.app_name),
"%1$s", // will be replaced by the complete uri
name.replace("%", "%%") // in case name contains '%'
);
String textHtml = String.format(
getResources().getString(R.string.invite_profile_text_hml),
getResources().getString(R.string.app_name),
"%1$s", // will be replaced by the complete uri
name.replace("%", "%%") // in case name contains '%'
);
Communication.inviteFriend(mContext,
mRes.getString(R.string.action_invite_friend),
subject,
text,
textHtml,
baseUri,
ExternalUriContract.PARAM_PROFILE_CROCO_ID
);
return true;
case R.id.about:
mCurrentSelectedPosition = 6;
bundle.putInt(STATE_SELECTED_POSITION, mCurrentSelectedPosition);
startActivity(new Intent(mContext, AboutActivity.class).putExtras(bundle));
return true;
case R.id.writeToCeo:
Intent ceoIntent = new Intent(mContext, CommunicationActivity.class);
ceoIntent.putExtra(CommunicationActivity.EXTRA_TARGET_CROCO_ID, CommonUtils.CEO_CROCO_ID);
startActivity(ceoIntent);
overridePendingTransition(R.anim.fade_in, R.anim.fade_out);
return true;
case R.id.reportBug:
Communication.sendCptLogs(mContext);
return true;
case R.id.gameTicTacToe:
startActivity(MenuActivity.newStartIntent(getApplicationContext()));
return true;
default:
return true;
}
}
});
View header = navigationView.getHeaderView(0);
header.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
startActivity(new Intent(mContext, EditProfileActivity.class));
drawerLayout.closeDrawer(GravityCompat.START);
}
});
}
initializeHeader();
}
private File readApk() {
PackageManager pm = getPackageManager();
List<ApplicationInfo> packages = pm.getInstalledApplications(PackageManager.GET_META_DATA);
for (ApplicationInfo packageInfo : packages) {
if (packageInfo.sourceDir.contains(getApplicationContext().getPackageName())) {
return new File(packageInfo.sourceDir);
}
}
return null;
}
private void initilizeAppBackground() {
getWindow().setBackgroundDrawable(new ColorDrawable(ContextCompat.getColor(mContext,R.color.white_background)));
}
private void initiliazeViewsAndResources() {
theme = new ThemeManager(this);
tLight = Typeface.createFromAsset(getAssets(), "fonts/light.ttf");
tRegular = Typeface.createFromAsset(getAssets(), "fonts/regular.ttf");
tSemiBold = Typeface.createFromAsset(getAssets(), "fonts/semibold.ttf");
tBold = Typeface.createFromAsset(getAssets(), "fonts/bold.ttf");
mRes = getResources();
prefs = PreferenceManager.getDefaultSharedPreferences(this);
commonUtils = new CommonUtils();
appData = (AppData) getApplication();
profileUtils = new ProfileUtils(appData);
mContext = this;
}
protected void initializeHeader() {
if (navigationView == null) return;
View header = navigationView.getHeaderView(0);
ImageView ivCover = (ImageView) header.findViewById(R.id.ivCoverDrawer);
final ImageView ivPhoto = (ImageView) header.findViewById(R.id.ivPhotoDrawer);
TextView tvName = (TextView) header.findViewById(R.id.tvNameDrawer);
TextView tvStatus = (TextView) header.findViewById(R.id.tvStatusDrawer);
TextView tvLikes = (TextView) header.findViewById(R.id.tvLikesDrawer);
TextView tvComments = (TextView) header.findViewById(R.id.tvCommentsDrawer);
MyProfile myProfile = MyProfile.getInstance(mContext);
if (myProfile == null) {
Log.e(TAG, "My profile is null, returning!");
return;
}
Glide.with(this)
.load(ThemeUtils.getBgCoverResId(prefs))
.asBitmap()
.thumbnail(0.2f)
.into(ivCover);
Glide.with(this)
.load(myProfile.getThumbUri())
.asBitmap()
.signature(new StringSignature(
String.valueOf(MyProfile.getInstance(this).getTimeStamp())))
.thumbnail(0.2f)
.transform(new CircleTransform(mContext))
.into(ivPhoto);
tvName.setText(myProfile.getName());
tvName.setTypeface(tSemiBold);
tvName.setShadowLayer(3, 0, 1, Color.BLACK);
tvStatus.setShadowLayer(5, 0, 1, Color.BLACK);
RelativeLayout statusStats = (RelativeLayout) header.findViewById(R.id.rlStatusStatsDrawer);
if (myProfile.getStatus() != null && myProfile.getStatus().getContent() != null) {
tvStatus.setText(myProfile.getStatus().getContent());
tvLikes.setText(String.valueOf(myProfile.getStatus().getVotes().size()));
tvComments.setText(String.valueOf(myProfile.getStatus().getComments().size()));
if (myProfile.getStatus().getContent().isEmpty()) {
statusStats.setVisibility(View.GONE);
tvStatus.setVisibility(View.GONE);
} else {
statusStats.setVisibility(View.VISIBLE);
tvStatus.setVisibility(View.VISIBLE);
statusStats.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(ToolbarDrawerActivity.this, CommentActivity.class);
intent.putExtra(CommentActivity.EXTRA_CROCO_ID, MyProfile.getInstance(mContext).getIdent());
startActivity(intent);
}
});
}
} else
{
tvStatus.setVisibility(View.GONE);
statusStats.setVisibility(View.GONE);
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.action_bar_menu, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
onBackPressed();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
/**
* Force overflow button to show on devices with hardware menu button.
*/
@Override
public boolean onMenuOpened(int featureId, Menu menu) {
if (featureId == Window.FEATURE_ACTION_BAR && menu != null) {
if (menu.getClass().getSimpleName().equals("MenuBuilder")) {
try {
Method m = menu.getClass().getDeclaredMethod(
"setOptionalIconsVisible", Boolean.TYPE);
m.setAccessible(true);
m.invoke(menu, true);
} catch (NoSuchMethodException e) {
Log.e(TAG, "onMenuOpened", e);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
return super.onMenuOpened(featureId, menu);
}
public void setToolbarTitle(@NonNull final String header){
toolbar.setTitleTextColor(Color.WHITE);
toolbar.setTitle(header);
}
public void initializeHeaderWithDrawer(String header, boolean showNavIcon) {
toolbar = (Toolbar) findViewById(R.id.idToolbar);
toolbar.setTitleTextColor(Color.WHITE);
toolbar.setTitle(header);
toolbar.setNavigationIcon(R.drawable.ic_action_drawer);
setSupportActionBar(toolbar);
setUpNavigationDrawer();
setUpNavigationView();
//if we dont want to show nav icon
if (!showNavIcon) {
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setHomeButtonEnabled(true);
getSupportActionBar().setHomeAsUpIndicator(R.drawable.ic_action_back);
toolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
onBackPressed();
}
});
}
Menu menu = navigationView.getMenu();
MenuItem checkedItem = null;
if (mCurrentSelectedPosition != 0)
checkedItem = menu.getItem(mCurrentSelectedPosition - 1).setChecked(true);
for (int i = 0; i < menu.size(); i++) {
MenuItem item = menu.getItem(i);
if (checkedItem == null)
item.setChecked(false);
else if (item.getItemId() != checkedItem.getItemId()) item.setChecked(false);
}
}
}
|
|
/*
* Copyright 2002-2005 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package info.jtrac.domain;
import info.jtrac.Jtrac;
import static info.jtrac.domain.ColumnHeading.Name.*;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.wicket.PageParameters;
import org.hibernate.criterion.DetachedCriteria;
import org.hibernate.criterion.Order;
import org.hibernate.criterion.Restrictions;
/**
* Object that holds filter criteria when searching for Items
* and also creates a Hibernate Criteria query to pass to the DAO
*/
@SuppressWarnings("serial")
public class ItemSearch implements Serializable {
private Space space; // if null, means aggregate across all spaces
private User user; // this will be set in the case space is null
private int pageSize = 25;
private int currentPage;
private long resultCount;
private String sortFieldName = "id";
private boolean sortDescending = true;
private boolean showHistory;
private boolean batchMode;
private long selectedItemId;
private String relatingItemRefId;
private Collection<Long> itemIds;
private List<ColumnHeading> columnHeadings;
private Map<String, FilterCriteria> filterCriteriaMap = new LinkedHashMap<String, FilterCriteria>();
private String defaultVisibleFlags;
public ItemSearch(User user) {
this.user = user;
this.columnHeadings = ColumnHeading.getColumnHeadings();
this.defaultVisibleFlags = getVisibleFlags();
}
public ItemSearch(Space space) {
this.space = space;
this.columnHeadings = ColumnHeading.getColumnHeadings(space);
this.defaultVisibleFlags = getVisibleFlags();
}
public void initFromPageParameters(PageParameters params, User user, Jtrac jtrac) {
showHistory = params.getBoolean("showHistory");
pageSize = params.getInt("pageSize", 25);
sortDescending = !params.getBoolean("sortAscending");
sortFieldName = params.getString("sortFieldName", "id");
for(Object o : params.keySet()) {
String name = o.toString();
if(ColumnHeading.isValidFieldOrColumnName(name)) {
ColumnHeading ch = getColumnHeading(name);
ch.loadFromQueryString(params.getString(name), user, jtrac);
}
}
relatingItemRefId = params.getString("relatingItemRefId", null);
String visibleFlags = params.getString("cols", null);
if(visibleFlags != null) {
int i = 0;
for(ColumnHeading ch : columnHeadings) {
if(i >= visibleFlags.length()) {
break;
}
char flag = visibleFlags.charAt(i);
if(flag == '1') {
ch.setVisible(true);
} else {
ch.setVisible(false);
}
i++;
}
}
}
private String getVisibleFlags() {
StringBuilder visibleFlags = new StringBuilder();
for(ColumnHeading ch : columnHeadings) {
if(ch.isVisible()) {
visibleFlags.append("1");
} else {
visibleFlags.append("0");
}
}
return visibleFlags.toString();
}
public PageParameters getAsQueryString() {
Map<String, String> map = new HashMap<String, String>();
if(space != null) {
map.put("s", space.getId() + "");
}
for(ColumnHeading ch : columnHeadings) {
String s = ch.getAsQueryString();
if(s != null) {
map.put(ch.getNameText(), s);
}
}
String visibleFlags = getVisibleFlags();
if(!visibleFlags.equals(defaultVisibleFlags)) {
map.put("cols", visibleFlags.toString());
}
if(showHistory) {
map.put("showHistory", "true");
}
if(pageSize != 25) {
map.put("pageSize", pageSize + "");
}
if(!sortDescending) {
map.put("sortAscending", "true");
}
if(!sortFieldName.equals("id")) {
map.put("sortFieldName", sortFieldName);
}
if(relatingItemRefId != null) {
map.put("relatingItemRefId", relatingItemRefId);
}
return new PageParameters(map);
}
private DetachedCriteria parent; // temp working variable hack
// have to do this two step process as "order by" clause conflicts with "count (*)" clause
// so the DAO has to use getCriteriaForCount() separately
public DetachedCriteria getCriteria() {
DetachedCriteria criteria = getCriteriaForCount();
if (sortFieldName == null) { // can happen only for multi-space search
sortFieldName = "id"; // effectively is a sort on created date
}
if(sortFieldName.equals("id") || sortFieldName.equals("space")) {
if(showHistory) {
// if showHistory: sort by item.id and then history.id
if(sortDescending) {
if(space == null) {
DetachedCriteria parentSpace = parent.createCriteria("space");
parentSpace.addOrder(Order.desc("name"));
}
criteria.addOrder(Order.desc("parent.id"));
criteria.addOrder(Order.desc("id"));
} else {
if(space == null) {
DetachedCriteria parentSpace = parent.createCriteria("space");
parentSpace.addOrder(Order.asc("name"));
}
criteria.addOrder(Order.asc("parent.id"));
criteria.addOrder(Order.asc("id"));
}
} else {
if (sortDescending) {
if(space == null) {
DetachedCriteria parentSpace = criteria.createCriteria("space");
parentSpace.addOrder(Order.desc("name"));
}
criteria.addOrder(Order.desc("id"));
} else {
if(space == null) {
DetachedCriteria parentSpace = criteria.createCriteria("space");
parentSpace.addOrder(Order.asc("name"));
}
criteria.addOrder(Order.asc("id"));
}
}
} else {
if (sortDescending) {
criteria.addOrder(Order.desc(sortFieldName));
} else {
criteria.addOrder(Order.asc(sortFieldName));
}
}
return criteria;
}
public DetachedCriteria getCriteriaForCount() {
DetachedCriteria criteria = null;
if (showHistory) {
criteria = DetachedCriteria.forClass(History.class);
// apply restrictions to parent, this is an inner join =============
parent = criteria.createCriteria("parent");
if(space == null) {
parent.add(Restrictions.in("space", getSelectedSpaces()));
} else {
parent.add(Restrictions.eq("space", space));
}
if (itemIds != null) {
parent.add(Restrictions.in("id", itemIds));
}
} else {
criteria = DetachedCriteria.forClass(Item.class);
if(space == null) {
criteria.add(Restrictions.in("space", getSelectedSpaces()));
} else {
criteria.add(Restrictions.eq("space", space));
}
if (itemIds != null) {
criteria.add(Restrictions.in("id", itemIds));
}
}
for(ColumnHeading ch : columnHeadings) {
ch.addRestrictions(criteria);
}
return criteria;
}
public List<Field> getFields() {
if(space == null) {
List<Field> list = new ArrayList<Field>(2);
Field severity = new Field(Field.Name.SEVERITY);
severity.initOptions();
list.add(severity);
Field priority = new Field(Field.Name.PRIORITY);
priority.initOptions();
list.add(priority);
return list;
} else {
return space.getMetadata().getFieldList();
}
}
private ColumnHeading getColumnHeading(ColumnHeading.Name name) {
for(ColumnHeading ch : columnHeadings) {
if(ch.getName() == name) {
return ch;
}
}
return null;
}
private ColumnHeading getColumnHeading(String name) {
for(ColumnHeading ch : columnHeadings) {
if(ch.getNameText().equals(name)) {
return ch;
}
}
return null;
}
private String getStringValue(ColumnHeading ch) {
String s = (String) ch.getFilterCriteria().getValue();
if(s == null || s.trim().length() == 0) {
ch.getFilterCriteria().setExpression(null);
return null;
}
return s;
}
public String getRefId() {
ColumnHeading ch = getColumnHeading(ID);
return getStringValue(ch);
}
public String getSearchText() {
ColumnHeading ch = getColumnHeading(DETAIL);
return getStringValue(ch);
}
@SuppressWarnings({ "rawtypes", "unchecked" })
public Collection<Space> getSelectedSpaces() {
ColumnHeading ch = getColumnHeading(SPACE);
List values = ch.getFilterCriteria().getValues();
if(values == null || values.size() == 0) {
ch.getFilterCriteria().setExpression(null);
return user.getSpaces();
}
return values;
}
public void toggleSortDirection() {
sortDescending = !sortDescending;
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private List getSingletonList(Object o) {
List list = new ArrayList(1);
list.add(o);
return list;
}
public void setLoggedBy(User loggedBy) {
ColumnHeading ch = getColumnHeading(LOGGED_BY);
ch.getFilterCriteria().setExpression(FilterCriteria.Expression.IN);
ch.getFilterCriteria().setValues(getSingletonList(loggedBy));
}
public void setAssignedTo(User assignedTo) {
ColumnHeading ch = getColumnHeading(ASSIGNED_TO);
ch.getFilterCriteria().setExpression(FilterCriteria.Expression.IN);
ch.getFilterCriteria().setValues(getSingletonList(assignedTo));
}
public void setStatus(int i) {
ColumnHeading ch = getColumnHeading(STATUS);
ch.getFilterCriteria().setExpression(FilterCriteria.Expression.IN);
ch.getFilterCriteria().setValues(getSingletonList(i));
}
public List<ColumnHeading> getColumnHeadingsToRender() {
List<ColumnHeading> list = new ArrayList<ColumnHeading>(columnHeadings.size());
for(ColumnHeading ch : columnHeadings) {
if(ch.isVisible()) {
list.add(ch);
}
}
return list;
}
//==========================================================================
public boolean isBatchMode() {
return batchMode;
}
public void setBatchMode(boolean batchMode) {
this.batchMode = batchMode;
}
public Space getSpace() {
return space;
}
public void setSpace(Space space) {
this.space = space;
}
public User getUser() {
return user;
}
public void setUser(User user) {
this.user = user;
}
public int getPageSize() {
return pageSize;
}
public void setPageSize(int pageSize) {
this.pageSize = pageSize;
}
public int getCurrentPage() {
return currentPage;
}
public void setCurrentPage(int currentPage) {
this.currentPage = currentPage;
}
public long getResultCount() {
return resultCount;
}
public void setResultCount(long resultCount) {
this.resultCount = resultCount;
}
public String getSortFieldName() {
return sortFieldName;
}
public void setSortFieldName(String sortFieldName) {
this.sortFieldName = sortFieldName;
}
public boolean isSortDescending() {
return sortDescending;
}
public void setSortDescending(boolean sortDescending) {
this.sortDescending = sortDescending;
}
public boolean isShowHistory() {
return showHistory;
}
public void setShowHistory(boolean showHistory) {
this.showHistory = showHistory;
}
public long getSelectedItemId() {
return selectedItemId;
}
public void setSelectedItemId(long selectedItemId) {
this.selectedItemId = selectedItemId;
}
public String getRelatingItemRefId() {
return relatingItemRefId;
}
public void setRelatingItemRefId(String relatingItemRefId) {
this.relatingItemRefId = relatingItemRefId;
}
public Collection<Long> getItemIds() {
return itemIds;
}
public void setItemIds(Collection<Long> itemIds) {
this.itemIds = itemIds;
}
public List<ColumnHeading> getColumnHeadings() {
return columnHeadings;
}
public void setColumnHeadings(List<ColumnHeading> columnHeadings) {
this.columnHeadings = columnHeadings;
}
public Map<String, FilterCriteria> getFilterCriteriaMap() {
return filterCriteriaMap;
}
public void setFilterCriteriaMap(Map<String, FilterCriteria> filterCriteriaMap) {
this.filterCriteriaMap = filterCriteriaMap;
}
}
|
|
/*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package org.deeplearning4j.exceptions;
import org.deeplearning4j.BaseDL4JTest;
import org.deeplearning4j.exception.DL4JException;
import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.inputs.InputType;
import org.deeplearning4j.nn.conf.layers.*;
import org.deeplearning4j.nn.conf.layers.recurrent.SimpleRnn;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.junit.Test;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import java.util.Map;
import static org.junit.Assert.*;
/**
* A set of tests to ensure that useful exceptions are thrown on invalid input
*/
public class TestInvalidInput extends BaseDL4JTest {
@Test
public void testInputNinMismatchDense() {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().list()
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
.layer(1, new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
try {
net.feedForward(Nd4j.create(1, 20));
fail("Expected DL4JException");
} catch (DL4JException e) {
System.out.println("testInputNinMismatchDense(): " + e.getMessage());
} catch (Exception e) {
e.printStackTrace();
fail("Expected DL4JException");
}
}
@Test
public void testInputNinMismatchOutputLayer() {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().list()
.layer(0, new DenseLayer.Builder().nIn(10).nOut(20).build())
.layer(1, new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
try {
net.feedForward(Nd4j.create(1, 10));
fail("Expected DL4JException");
} catch (DL4JException e) {
System.out.println("testInputNinMismatchOutputLayer(): " + e.getMessage());
} catch (Exception e) {
e.printStackTrace();
fail("Expected DL4JException");
}
}
@Test
public void testLabelsNOutMismatchOutputLayer() {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().list()
.layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
.layer(1, new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
try {
net.fit(Nd4j.create(1, 10), Nd4j.create(1, 20));
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
//From loss function
System.out.println("testLabelsNOutMismatchOutputLayer(): " + e.getMessage());
} catch (Exception e) {
e.printStackTrace();
fail("Expected DL4JException");
}
}
@Test
public void testLabelsNOutMismatchRnnOutputLayer() {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().list()
.layer(0, new GravesLSTM.Builder().nIn(5).nOut(5).build())
.layer(1, new RnnOutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
try {
net.fit(Nd4j.create(1, 5, 8), Nd4j.create(1, 10, 8));
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
//From loss function
System.out.println("testLabelsNOutMismatchRnnOutputLayer(): " + e.getMessage());
} catch (Exception e) {
e.printStackTrace();
fail("Expected DL4JException");
}
}
@Test
public void testInputNinMismatchConvolutional() {
//Rank 4 input, but input channels does not match nIn channels
int h = 16;
int w = 16;
int d = 3;
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().list()
.layer(0, new ConvolutionLayer.Builder().nIn(d).nOut(5).build())
.layer(1, new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build())
.setInputType(InputType.convolutional(h, w, d)).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
try {
net.feedForward(Nd4j.create(1, 5, h, w));
fail("Expected DL4JException");
} catch (DL4JException e) {
System.out.println("testInputNinMismatchConvolutional(): " + e.getMessage());
} catch (Exception e) {
e.printStackTrace();
fail("Expected DL4JException");
}
}
@Test
public void testInputNinRank2Convolutional() {
//Rank 2 input, instead of rank 4 input. For example, forgetting the
int h = 16;
int w = 16;
int d = 3;
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().list()
.layer(0, new ConvolutionLayer.Builder().nIn(d).nOut(5).build())
.layer(1, new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build())
.setInputType(InputType.convolutional(h, w, d)).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
try {
net.feedForward(Nd4j.create(1, 5 * h * w));
fail("Expected DL4JException");
} catch (DL4JException e) {
System.out.println("testInputNinRank2Convolutional(): " + e.getMessage());
} catch (Exception e) {
e.printStackTrace();
fail("Expected DL4JException");
}
}
@Test
public void testInputNinRank2Subsampling() {
//Rank 2 input, instead of rank 4 input. For example, using the wrong input type
int h = 16;
int w = 16;
int d = 3;
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().list()
.layer(0, new SubsamplingLayer.Builder().kernelSize(2, 2).build())
.layer(1, new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build())
.setInputType(InputType.convolutional(h, w, d)).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
try {
net.feedForward(Nd4j.create(1, 5 * h * w));
fail("Expected DL4JException");
} catch (DL4JException e) {
System.out.println("testInputNinRank2Subsampling(): " + e.getMessage());
} catch (Exception e) {
e.printStackTrace();
fail("Expected DL4JException");
}
}
@Test
public void testInputNinMismatchLSTM() {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().list()
.layer(0, new GravesLSTM.Builder().nIn(5).nOut(5).build())
.layer(1, new RnnOutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
try {
net.fit(Nd4j.create(1, 10, 5), Nd4j.create(1, 5, 5));
fail("Expected DL4JException");
} catch (DL4JException e) {
System.out.println("testInputNinMismatchLSTM(): " + e.getMessage());
} catch (Exception e) {
e.printStackTrace();
fail("Expected DL4JException");
}
}
@Test
public void testInputNinMismatchBidirectionalLSTM() {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().list()
.layer(0, new GravesBidirectionalLSTM.Builder().nIn(5).nOut(5).build())
.layer(1, new RnnOutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
try {
net.fit(Nd4j.create(1, 10, 5), Nd4j.create(1, 5, 5));
fail("Expected DL4JException");
} catch (DL4JException e) {
System.out.println("testInputNinMismatchBidirectionalLSTM(): " + e.getMessage());
} catch (Exception e) {
e.printStackTrace();
fail("Expected DL4JException");
}
}
@Test
public void testInputNinMismatchEmbeddingLayer() {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().list()
.layer(0, new EmbeddingLayer.Builder().nIn(10).nOut(10).build())
.layer(1, new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
try {
net.feedForward(Nd4j.create(10, 5));
fail("Expected DL4JException");
} catch (DL4JException e) {
System.out.println("testInputNinMismatchEmbeddingLayer(): " + e.getMessage());
} catch (Exception e) {
e.printStackTrace();
fail("Expected DL4JException");
}
}
@Test
public void testInvalidRnnTimeStep() {
//Idea: Using rnnTimeStep with a different number of examples between calls
//(i.e., not calling reset between time steps)
for(String layerType : new String[]{"simple", "lstm", "graves"}) {
Layer l;
switch (layerType){
case "simple":
l = new SimpleRnn.Builder().nIn(5).nOut(5).build();
break;
case "lstm":
l = new LSTM.Builder().nIn(5).nOut(5).build();
break;
case "graves":
l = new GravesLSTM.Builder().nIn(5).nOut(5).build();
break;
default:
throw new RuntimeException();
}
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().list()
.layer(l)
.layer(new RnnOutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
net.rnnTimeStep(Nd4j.create(3, 5, 10));
Map<String, INDArray> m = net.rnnGetPreviousState(0);
assertNotNull(m);
assertFalse(m.isEmpty());
try {
net.rnnTimeStep(Nd4j.create(5, 5, 10));
fail("Expected Exception - " + layerType);
} catch (Exception e) {
// e.printStackTrace();
String msg = e.getMessage();
assertTrue(msg, msg != null && msg.contains("rnn") && msg.contains("batch"));
}
}
}
}
|
|
/*
* Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.awt.dnd;
import java.awt.event.InputEvent;
/**
* The <code>DragSourceDragEvent</code> is
* delivered from the <code>DragSourceContextPeer</code>,
* via the <code>DragSourceContext</code>, to the <code>DragSourceListener</code>
* registered with that <code>DragSourceContext</code> and with its associated
* <code>DragSource</code>.
* <p>
* The <code>DragSourceDragEvent</code> reports the <i>target drop action</i>
* and the <i>user drop action</i> that reflect the current state of
* the drag operation.
* <p>
* <i>Target drop action</i> is one of <code>DnDConstants</code> that represents
* the drop action selected by the current drop target if this drop action is
* supported by the drag source or <code>DnDConstants.ACTION_NONE</code> if this
* drop action is not supported by the drag source.
* <p>
* <i>User drop action</i> depends on the drop actions supported by the drag
* source and the drop action selected by the user. The user can select a drop
* action by pressing modifier keys during the drag operation:
* <pre>
* Ctrl + Shift -> ACTION_LINK
* Ctrl -> ACTION_COPY
* Shift -> ACTION_MOVE
* </pre>
* If the user selects a drop action, the <i>user drop action</i> is one of
* <code>DnDConstants</code> that represents the selected drop action if this
* drop action is supported by the drag source or
* <code>DnDConstants.ACTION_NONE</code> if this drop action is not supported
* by the drag source.
* <p>
* If the user doesn't select a drop action, the set of
* <code>DnDConstants</code> that represents the set of drop actions supported
* by the drag source is searched for <code>DnDConstants.ACTION_MOVE</code>,
* then for <code>DnDConstants.ACTION_COPY</code>, then for
* <code>DnDConstants.ACTION_LINK</code> and the <i>user drop action</i> is the
* first constant found. If no constant is found the <i>user drop action</i>
* is <code>DnDConstants.ACTION_NONE</code>.
*
* @since 1.2
*
*/
public class DragSourceDragEvent extends DragSourceEvent {
private static final long serialVersionUID = 481346297933902471L;
/**
* Constructs a <code>DragSourceDragEvent</code>.
* This class is typically
* instantiated by the <code>DragSourceContextPeer</code>
* rather than directly
* by client code.
* The coordinates for this <code>DragSourceDragEvent</code>
* are not specified, so <code>getLocation</code> will return
* <code>null</code> for this event.
* <p>
* The arguments <code>dropAction</code> and <code>action</code> should
* be one of <code>DnDConstants</code> that represents a single action.
* The argument <code>modifiers</code> should be either a bitwise mask
* of old <code>java.awt.event.InputEvent.*_MASK</code> constants or a
* bitwise mask of extended <code>java.awt.event.InputEvent.*_DOWN_MASK</code>
* constants.
* This constructor does not throw any exception for invalid <code>dropAction</code>,
* <code>action</code> and <code>modifiers</code>.
*
* @param dsc the <code>DragSourceContext</code> that is to manage
* notifications for this event.
* @param dropAction the user drop action.
* @param action the target drop action.
* @param modifiers the modifier keys down during event (shift, ctrl,
* alt, meta)
* Either extended _DOWN_MASK or old _MASK modifiers
* should be used, but both models should not be mixed
* in one event. Use of the extended modifiers is
* preferred.
*
* @throws IllegalArgumentException if <code>dsc</code> is <code>null</code>.
*
* @see java.awt.event.InputEvent
* @see DragSourceEvent#getLocation
*/
public DragSourceDragEvent(DragSourceContext dsc, int dropAction,
int action, int modifiers) {
super(dsc);
targetActions = action;
gestureModifiers = modifiers;
this.dropAction = dropAction;
if ((modifiers & ~(JDK_1_3_MODIFIERS | JDK_1_4_MODIFIERS)) != 0) {
invalidModifiers = true;
} else if ((getGestureModifiers() != 0) && (getGestureModifiersEx() == 0)) {
setNewModifiers();
} else if ((getGestureModifiers() == 0) && (getGestureModifiersEx() != 0)) {
setOldModifiers();
} else {
invalidModifiers = true;
}
}
/**
* Constructs a <code>DragSourceDragEvent</code> given the specified
* <code>DragSourceContext</code>, user drop action, target drop action,
* modifiers and coordinates.
* <p>
* The arguments <code>dropAction</code> and <code>action</code> should
* be one of <code>DnDConstants</code> that represents a single action.
* The argument <code>modifiers</code> should be either a bitwise mask
* of old <code>java.awt.event.InputEvent.*_MASK</code> constants or a
* bitwise mask of extended <code>java.awt.event.InputEvent.*_DOWN_MASK</code>
* constants.
* This constructor does not throw any exception for invalid <code>dropAction</code>,
* <code>action</code> and <code>modifiers</code>.
*
* @param dsc the <code>DragSourceContext</code> associated with this
* event.
* @param dropAction the user drop action.
* @param action the target drop action.
* @param modifiers the modifier keys down during event (shift, ctrl,
* alt, meta)
* Either extended _DOWN_MASK or old _MASK modifiers
* should be used, but both models should not be mixed
* in one event. Use of the extended modifiers is
* preferred.
* @param x the horizontal coordinate for the cursor location
* @param y the vertical coordinate for the cursor location
*
* @throws IllegalArgumentException if <code>dsc</code> is <code>null</code>.
*
* @see java.awt.event.InputEvent
* @since 1.4
*/
public DragSourceDragEvent(DragSourceContext dsc, int dropAction,
int action, int modifiers, int x, int y) {
super(dsc, x, y);
targetActions = action;
gestureModifiers = modifiers;
this.dropAction = dropAction;
if ((modifiers & ~(JDK_1_3_MODIFIERS | JDK_1_4_MODIFIERS)) != 0) {
invalidModifiers = true;
} else if ((getGestureModifiers() != 0) && (getGestureModifiersEx() == 0)) {
setNewModifiers();
} else if ((getGestureModifiers() == 0) && (getGestureModifiersEx() != 0)) {
setOldModifiers();
} else {
invalidModifiers = true;
}
}
/**
* This method returns the target drop action.
*
* @return the target drop action.
*/
public int getTargetActions() {
return targetActions;
}
private static final int JDK_1_3_MODIFIERS = InputEvent.SHIFT_DOWN_MASK - 1;
private static final int JDK_1_4_MODIFIERS =
((InputEvent.ALT_GRAPH_DOWN_MASK << 1) - 1) & ~JDK_1_3_MODIFIERS;
/**
* This method returns an <code>int</code> representing
* the current state of the input device modifiers
* associated with the user's gesture. Typically these
* would be mouse buttons or keyboard modifiers.
* <P>
* If the <code>modifiers</code> passed to the constructor
* are invalid, this method returns them unchanged.
*
* @return the current state of the input device modifiers
*/
public int getGestureModifiers() {
return invalidModifiers ? gestureModifiers : gestureModifiers & JDK_1_3_MODIFIERS;
}
/**
* This method returns an <code>int</code> representing
* the current state of the input device extended modifiers
* associated with the user's gesture.
* See {@link InputEvent#getModifiersEx}
* <P>
* If the <code>modifiers</code> passed to the constructor
* are invalid, this method returns them unchanged.
*
* @return the current state of the input device extended modifiers
* @since 1.4
*/
public int getGestureModifiersEx() {
return invalidModifiers ? gestureModifiers : gestureModifiers & JDK_1_4_MODIFIERS;
}
/**
* This method returns the user drop action.
*
* @return the user drop action.
*/
public int getUserAction() { return dropAction; }
/**
* This method returns the logical intersection of
* the target drop action and the set of drop actions supported by
* the drag source.
*
* @return the logical intersection of the target drop action and
* the set of drop actions supported by the drag source.
*/
public int getDropAction() {
return targetActions & getDragSourceContext().getSourceActions();
}
/*
* fields
*/
/**
* The target drop action.
*
* @serial
*/
private int targetActions = DnDConstants.ACTION_NONE;
/**
* The user drop action.
*
* @serial
*/
private int dropAction = DnDConstants.ACTION_NONE;
/**
* The state of the input device modifiers associated with the user
* gesture.
*
* @serial
*/
private int gestureModifiers = 0;
/**
* Indicates whether the <code>gestureModifiers</code> are invalid.
*
* @serial
*/
private boolean invalidModifiers;
/**
* Sets new modifiers by the old ones.
* The mouse modifiers have higher priority than overlaying key
* modifiers.
*/
private void setNewModifiers() {
if ((gestureModifiers & InputEvent.BUTTON1_MASK) != 0) {
gestureModifiers |= InputEvent.BUTTON1_DOWN_MASK;
}
if ((gestureModifiers & InputEvent.BUTTON2_MASK) != 0) {
gestureModifiers |= InputEvent.BUTTON2_DOWN_MASK;
}
if ((gestureModifiers & InputEvent.BUTTON3_MASK) != 0) {
gestureModifiers |= InputEvent.BUTTON3_DOWN_MASK;
}
if ((gestureModifiers & InputEvent.SHIFT_MASK) != 0) {
gestureModifiers |= InputEvent.SHIFT_DOWN_MASK;
}
if ((gestureModifiers & InputEvent.CTRL_MASK) != 0) {
gestureModifiers |= InputEvent.CTRL_DOWN_MASK;
}
if ((gestureModifiers & InputEvent.ALT_GRAPH_MASK) != 0) {
gestureModifiers |= InputEvent.ALT_GRAPH_DOWN_MASK;
}
}
/**
* Sets old modifiers by the new ones.
*/
private void setOldModifiers() {
if ((gestureModifiers & InputEvent.BUTTON1_DOWN_MASK) != 0) {
gestureModifiers |= InputEvent.BUTTON1_MASK;
}
if ((gestureModifiers & InputEvent.BUTTON2_DOWN_MASK) != 0) {
gestureModifiers |= InputEvent.BUTTON2_MASK;
}
if ((gestureModifiers & InputEvent.BUTTON3_DOWN_MASK) != 0) {
gestureModifiers |= InputEvent.BUTTON3_MASK;
}
if ((gestureModifiers & InputEvent.SHIFT_DOWN_MASK) != 0) {
gestureModifiers |= InputEvent.SHIFT_MASK;
}
if ((gestureModifiers & InputEvent.CTRL_DOWN_MASK) != 0) {
gestureModifiers |= InputEvent.CTRL_MASK;
}
if ((gestureModifiers & InputEvent.ALT_GRAPH_DOWN_MASK) != 0) {
gestureModifiers |= InputEvent.ALT_GRAPH_MASK;
}
}
}
|
|
package com.guhanjie.model;
import java.math.BigDecimal;
import java.util.Date;
import java.util.List;
public class Order {
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column order.id
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
private Integer id;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column order.amount
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
private BigDecimal amount;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column order.tip
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
private BigDecimal tip;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column order.from_id
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
private Integer fromId;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column order.waypoints_ids
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
private String waypointsIds;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column order.to_id
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
private Integer toId;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column order.distance
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
private BigDecimal distance;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column order.vehicle
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
private Short vehicle;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column order.workers
*
* @mbggenerated Sat Oct 01 20:49:39 CST 2016
*/
private Integer workers;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column order.status
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
private Short status;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column order.user_id
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
private Integer userId;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column order.contactor
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
private String contactor;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column order.phone
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
private String phone;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column order.remark
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
private String remark;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column order.remark
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
private String source;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column order.pay_id
*
* @mbggenerated Sun Oct 16 16:25:00 CST 2016
*/
private String payId;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column order.pay_type
*
* @mbggenerated Sat Oct 01 20:49:39 CST 2016
*/
private Short payType;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column order.pay_status
*
* @mbggenerated Sat Oct 01 20:49:39 CST 2016
*/
private Short payStatus;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column order.pay_time
*
* @mbggenerated Sat Oct 01 20:49:39 CST 2016
*/
private Date payTime;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column order.start_time
*
* @mbggenerated Sat Sep 10 14:15:30 CST 2016
*/
private Date startTime;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column order.create_time
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
private Date createTime;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column order.update_time
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
private Date updateTime;
//------------------------- custom add -----------------------------
private User user;
private Position from;
private Position to;
private List<Position> waypoints;
public User getUser() {
return user;
}
public void setUser(User user) {
this.user = user;
}
public Position getFrom() {
return from;
}
public void setFrom(Position from) {
this.from = from;
}
public Position getTo() {
return to;
}
public void setTo(Position to) {
this.to = to;
}
public List<Position> getWaypoints() {
return waypoints;
}
public void setWaypoints(List<Position> waypoints) {
this.waypoints = waypoints;
}
//--------------------------------------------------------------------
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column order.id
*
* @return the value of order.id
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public Integer getId() {
return id;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column order.id
*
* @param id the value for order.id
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public void setId(Integer id) {
this.id = id;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column order.amount
*
* @return the value of order.amount
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public BigDecimal getAmount() {
return amount;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column order.amount
*
* @param amount the value for order.amount
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public void setAmount(BigDecimal amount) {
this.amount = amount;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column order.tip
*
* @return the value of order.tip
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public BigDecimal getTip() {
return tip;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column order.tip
*
* @param tip the value for order.tip
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public void setTip(BigDecimal tip) {
this.tip = tip;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column order.from_id
*
* @return the value of order.from_id
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public Integer getFromId() {
return fromId;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column order.from_id
*
* @param fromId the value for order.from_id
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public void setFromId(Integer fromId) {
this.fromId = fromId;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column order.waypoints_ids
*
* @return the value of order.waypoints_ids
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public String getWaypointsIds() {
return waypointsIds;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column order.waypoints_ids
*
* @param waypointsIds the value for order.waypoints_ids
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public void setWaypointsIds(String waypointsIds) {
this.waypointsIds = waypointsIds == null ? null : waypointsIds.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column order.to_id
*
* @return the value of order.to_id
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public Integer getToId() {
return toId;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column order.to_id
*
* @param toId the value for order.to_id
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public void setToId(Integer toId) {
this.toId = toId;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column order.distance
*
* @return the value of order.distance
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public BigDecimal getDistance() {
return distance;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column order.distance
*
* @param distance the value for order.distance
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public void setDistance(BigDecimal distance) {
this.distance = distance;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column order.vehicle
*
* @return the value of order.vehicle
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public Short getVehicle() {
return vehicle;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column order.vehicle
*
* @param vehicle the value for order.vehicle
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public void setVehicle(Short vehicle) {
this.vehicle = vehicle;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column order.workers
*
* @return the value of order.workers
*
* @mbggenerated Sat Oct 01 20:49:39 CST 2016
*/
public Integer getWorkers() {
return workers;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column order.workers
*
* @param workers the value for order.workers
*
* @mbggenerated Sat Oct 01 20:49:39 CST 2016
*/
public void setWorkers(Integer workers) {
this.workers = workers;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column order.status
*
* @return the value of order.status
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public Short getStatus() {
return status;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column order.status
*
* @param status the value for order.status
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public void setStatus(Short status) {
this.status = status;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column order.user_id
*
* @return the value of order.user_id
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public Integer getUserId() {
return userId;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column order.user_id
*
* @param userId the value for order.user_id
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public void setUserId(Integer userId) {
this.userId = userId;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column order.contactor
*
* @return the value of order.contactor
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public String getContactor() {
return contactor;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column order.contactor
*
* @param contactor the value for order.contactor
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public void setContactor(String contactor) {
this.contactor = contactor == null ? null : contactor.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column order.phone
*
* @return the value of order.phone
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public String getPhone() {
return phone;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column order.phone
*
* @param phone the value for order.phone
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public void setPhone(String phone) {
this.phone = phone == null ? null : phone.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column order.remark
*
* @return the value of order.remark
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public String getRemark() {
return remark;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column order.remark
*
* @param remark the value for order.remark
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public void setRemark(String remark) {
this.remark = remark == null ? null : remark.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column order.remark
*
* @return the value of order.remark
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public String getSource() {
return source;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column order.remark
*
* @param remark the value for order.remark
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public void setSource(String source) {
this.source = source == null ? null : source.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column order.pay_type
*
* @return the value of order.pay_type
*
* @mbggenerated Sat Oct 01 20:49:39 CST 2016
*/
public String getPayId() {
return payId;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column order.pay_type
*
* @param payType the value for order.pay_type
*
* @mbggenerated Sat Oct 01 20:49:39 CST 2016
*/
public void setPayId(String payId) {
this.payId = payId;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column order.pay_type
*
* @return the value of order.pay_type
*
* @mbggenerated Sat Oct 01 20:49:39 CST 2016
*/
public Short getPayType() {
return payType;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column order.pay_type
*
* @param payType the value for order.pay_type
*
* @mbggenerated Sat Oct 01 20:49:39 CST 2016
*/
public void setPayType(Short payType) {
this.payType = payType;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column order.pay_status
*
* @return the value of order.pay_status
*
* @mbggenerated Sat Oct 01 20:49:39 CST 2016
*/
public Short getPayStatus() {
return payStatus;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column order.pay_status
*
* @param payStatus the value for order.pay_status
*
* @mbggenerated Sat Oct 01 20:49:39 CST 2016
*/
public void setPayStatus(Short payStatus) {
this.payStatus = payStatus;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column order.pay_time
*
* @return the value of order.pay_time
*
* @mbggenerated Sat Oct 01 20:49:39 CST 2016
*/
public Date getPayTime() {
return payTime;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column order.pay_time
*
* @param payTime the value for order.pay_time
*
* @mbggenerated Sat Oct 01 20:49:39 CST 2016
*/
public void setPayTime(Date payTime) {
this.payTime = payTime;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column order.start_time
*
* @return the value of order.start_time
*
* @mbggenerated Sat Sep 10 14:15:30 CST 2016
*/
public Date getStartTime() {
return startTime;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column order.start_time
*
* @param startTime the value for order.start_time
*
* @mbggenerated Sat Sep 10 14:15:30 CST 2016
*/
public void setStartTime(Date startTime) {
this.startTime = startTime;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column order.create_time
*
* @return the value of order.create_time
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public Date getCreateTime() {
return createTime;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column order.create_time
*
* @param createTime the value for order.create_time
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column order.update_time
*
* @return the value of order.update_time
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public Date getUpdateTime() {
return updateTime;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column order.update_time
*
* @param updateTime the value for order.update_time
*
* @mbggenerated Thu Sep 01 14:34:30 CST 2016
*/
public void setUpdateTime(Date updateTime) {
this.updateTime = updateTime;
}
}
|
|
/*
* Zed Attack Proxy (ZAP) and its related class files.
*
* ZAP is an HTTP/HTTPS proxy for assessing web application security.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zaproxy.zap.spider;
import java.io.IOException;
import java.net.ConnectException;
import java.net.SocketException;
import java.net.SocketTimeoutException;
import java.net.UnknownHostException;
import java.nio.charset.StandardCharsets;
import java.util.List;
import javax.net.ssl.SSLException;
import net.htmlparser.jericho.Source;
import org.apache.commons.httpclient.URI;
import org.apache.commons.httpclient.URIException;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.log4j.Logger;
import org.parosproxy.paros.Constant;
import org.parosproxy.paros.control.Control;
import org.parosproxy.paros.db.DatabaseException;
import org.parosproxy.paros.extension.history.ExtensionHistory;
import org.parosproxy.paros.model.HistoryReference;
import org.parosproxy.paros.network.HttpHeader;
import org.parosproxy.paros.network.HttpMalformedHeaderException;
import org.parosproxy.paros.network.HttpMessage;
import org.parosproxy.paros.network.HttpRequestHeader;
import org.parosproxy.paros.network.HttpResponseHeader;
import org.zaproxy.zap.spider.filters.ParseFilter;
import org.zaproxy.zap.spider.filters.ParseFilter.FilterResult;
import org.zaproxy.zap.spider.parser.SpiderParser;
/**
* The SpiderTask representing a spidering task performed during the Spidering process.
*/
public class SpiderTask implements Runnable {
/** The parent spider. */
private Spider parent;
/**
* The history reference to the database record where the request message has been partially filled in.
* <p>
* Might be {@code null} if failed to create or persist the message, if the task was already executed or if a clean up was
* performed.
*
* @see #cleanup()
* @see #deleteHistoryReference()
* @see #prepareHttpMessage()
*/
private HistoryReference reference;
/** The depth of crawling where the uri was found. */
private int depth;
private ExtensionHistory extHistory = null;
/** The Constant log. */
private static final Logger log = Logger.getLogger(SpiderTask.class);
/**
* Instantiates a new spider task using the target URI. The purpose of this task is to crawl the given
* uri, using the provided method, find any other uris in the fetched resource and create other tasks.
*
*
* @param parent the spider controlling the crawling process
* @param uri the uri that this task should process
* @param depth the depth where this uri is located in the spidering process
* @param method the HTTP method that should be used to fetch the resource
*
*/
public SpiderTask(Spider parent, URI uri, int depth, String method) {
this(parent, null, uri, depth, method, null);
}
/**
* Instantiates a new spider task using the target URI. The purpose of this task is to crawl the given
* uri, using the provided method, find any other uris in the fetched resource and create other tasks.
*
* @param parent the spider controlling the crawling process
* @param sourceUri the URI where the given {@code uri} was found
* @param uri the uri that this task should process
* @param depth the depth where this uri is located in the spidering process
* @param method the HTTP method that should be used to fetch the resource
* @since 2.4.0
*/
public SpiderTask(Spider parent, URI sourceUri, URI uri, int depth, String method) {
this(parent, sourceUri, uri, depth, method, null);
}
/**
* Instantiates a new spider task using the target URI. The purpose of this task is to crawl the given
* uri, using the provided method, find any other uris in the fetched resource and create other tasks.
*
* <p>
* The body of the request message is also provided in the {@literal requestBody} parameter and will be
* used when fetching the resource from the specified uri.
* </p>
*
* @param parent the spider controlling the crawling process
* @param uri the uri that this task should process
* @param depth the depth where this uri is located in the spidering process
* @param method the HTTP method that should be used to fetch the resource
* @param requestBody the body of the request
*/
public SpiderTask(Spider parent, URI uri, int depth, String method, String requestBody) {
this(parent, null, uri, depth, method, requestBody);
}
/**
* Instantiates a new spider task using the target URI. The purpose of this task is to crawl the given
* uri, using the provided method, find any other uris in the fetched resource and create other tasks.
* <p>
* The body of the request message is also provided in the {@literal requestBody} parameter and will be
* used when fetching the resource from the specified uri.
*
* @param parent the spider controlling the crawling process
* @param sourceUri the URI where the given {@code uri} was found
* @param uri the uri that this task should process
* @param depth the depth where this uri is located in the spidering process
* @param method the HTTP method that should be used to fetch the resource
* @param requestBody the body of the request
* @since 2.4.0
*/
public SpiderTask(Spider parent, URI sourceUri, URI uri, int depth, String method, String requestBody) {
super();
this.parent = parent;
this.depth = depth;
// Log the new task
if (log.isDebugEnabled()) {
log.debug("New task submitted for uri: " + uri);
}
// Create a new HttpMessage that will be used for the request and persist it in the database using
// HistoryReference
try {
HttpRequestHeader requestHeader =
new HttpRequestHeader(method, uri, HttpHeader.HTTP11, parent.getConnectionParam());
if (sourceUri != null && parent.getSpiderParam().isSendRefererHeader()) {
requestHeader.setHeader(HttpRequestHeader.REFERER, sourceUri.toString());
}
HttpMessage msg = new HttpMessage(requestHeader);
if (requestBody != null) {
msg.getRequestHeader().setContentLength(requestBody.length());
msg.setRequestBody(requestBody);
}
this.reference = new HistoryReference(parent.getModel().getSession(), HistoryReference.TYPE_SPIDER_TASK,
msg);
} catch (HttpMalformedHeaderException e) {
log.error("Error while building HttpMessage for uri: " + uri, e);
} catch (DatabaseException e) {
log.error("Error while persisting HttpMessage for uri: " + uri, e);
}
}
@Override
public void run() {
if (reference == null) {
log.warn("Null URI. Skipping crawling task: " + this);
parent.postTaskExecution();
return;
}
// Log the task start
if (log.isDebugEnabled()) {
log.debug("Spider Task Started. Processing uri at depth " + depth
+ " using already constructed message: " + reference.getURI());
}
// Check if the should stop
if (parent.isStopped()) {
log.debug("Spider process is stopped. Skipping crawling task...");
deleteHistoryReference();
parent.postTaskExecution();
return;
}
// Check if the crawling process is paused and do any "before execution" processing
parent.preTaskExecution();
// Fetch the resource
HttpMessage msg;
try {
msg = prepareHttpMessage();
} catch (Exception e) {
log.error("Failed to prepare HTTP message: ", e);
parent.postTaskExecution();
return;
}
try {
fetchResource(msg);
} catch (Exception e) {
setErrorResponse(msg, e);
parent.notifyListenersSpiderTaskResult(new SpiderTaskResult(msg, getSkippedMessage("ioerror")));
// The exception was already logged, in fetchResource, with the URL (which we dont have here)
parent.postTaskExecution();
return;
}
// Check if the should stop
if (parent.isStopped()) {
parent.notifyListenersSpiderTaskResult(new SpiderTaskResult(msg, getSkippedMessage("stopped")));
log.debug("Spider process is stopped. Skipping crawling task...");
parent.postTaskExecution();
return;
}
// Check if the crawling process is paused
parent.checkPauseAndWait();
// Check the parse filters to see if the resource should be skipped from parsing
for (ParseFilter filter : parent.getController().getParseFilters()) {
FilterResult filterResult = filter.filtered(msg);
if (filterResult.isFiltered()) {
if (log.isDebugEnabled()) {
log.debug(
"Resource [" + msg.getRequestHeader().getURI()
+ "] fetched, but will not be parsed due to a ParseFilter rule: "
+ filterResult.getReason());
}
parent.notifyListenersSpiderTaskResult(new SpiderTaskResult(msg, filterResult.getReason()));
parent.postTaskExecution();
return;
}
}
// Check if the should stop
if (parent.isStopped()) {
parent.notifyListenersSpiderTaskResult(new SpiderTaskResult(msg, getSkippedMessage("stopped")));
log.debug("Spider process is stopped. Skipping crawling task...");
parent.postTaskExecution();
return;
}
// Check if the crawling process is paused
parent.checkPauseAndWait();
if (depth < parent.getSpiderParam().getMaxDepth()) {
parent.notifyListenersSpiderTaskResult(new SpiderTaskResult(msg));
processResource(msg);
} else {
parent.notifyListenersSpiderTaskResult(new SpiderTaskResult(msg, getSkippedMessage("maxdepth")));
}
// Update the progress and check if the spidering process should stop
parent.postTaskExecution();
log.debug("Spider Task finished.");
}
private String getSkippedMessage(String key) {
return parent.getExtensionSpider().getMessages().getString("spider.task.message.skipped." + key);
}
/**
* Prepares the HTTP message to be sent to the target server.
* <p>
* The HTTP message is read from the database and set up with common headers (e.g. User-Agent) and properties (e.g. user).
*
* @return the HTTP message
* @throws HttpMalformedHeaderException if an error occurred while parsing the HTTP message read from the database
* @throws DatabaseException if an error occurred while reading the HTTP message from the database
*/
private HttpMessage prepareHttpMessage() throws HttpMalformedHeaderException, DatabaseException {
// Build fetch the request message from the database
HttpMessage msg;
try {
msg = reference.getHttpMessage();
// HistoryReference is about to be deleted, so no point keeping referencing it.
msg.setHistoryRef(null);
} finally {
deleteHistoryReference();
}
msg.getRequestHeader().setHeader(HttpHeader.IF_MODIFIED_SINCE, null);
msg.getRequestHeader().setHeader(HttpHeader.IF_NONE_MATCH, null);
// Check if there is a custom user agent
if (parent.getSpiderParam().getUserAgent() != null) {
msg.getRequestHeader().setHeader(HttpHeader.USER_AGENT, parent.getSpiderParam().getUserAgent());
}
// Check if there's a need to send the message from the point of view of a User
if (parent.getScanUser() != null) {
msg.setRequestingUser(parent.getScanUser());
}
return msg;
}
/**
* Deletes the history reference, should be called when no longer needed.
* <p>
* The call to this method has no effect if the history reference no longer exists (i.e. {@code null}).
*
* @see #reference
*/
private void deleteHistoryReference() {
if (reference == null) {
return;
}
if (getExtensionHistory() != null) {
getExtensionHistory().delete(reference);
reference = null;
}
}
private void setErrorResponse(HttpMessage msg, Exception cause) {
StringBuilder strBuilder = new StringBuilder(250);
if (cause instanceof SSLException) {
strBuilder.append(Constant.messages.getString("network.ssl.error.connect"));
strBuilder.append(msg.getRequestHeader().getURI().toString()).append('\n');
strBuilder.append(Constant.messages.getString("network.ssl.error.exception"))
.append(cause.getMessage())
.append('\n');
strBuilder.append(Constant.messages.getString("network.ssl.error.exception.rootcause"))
.append(ExceptionUtils.getRootCauseMessage(cause))
.append('\n');
strBuilder.append(
Constant.messages
.getString("network.ssl.error.help", Constant.messages.getString("network.ssl.error.help.url")));
strBuilder.append("\n\nStack Trace:\n");
for (String stackTraceFrame : ExceptionUtils.getRootCauseStackTrace(cause)) {
strBuilder.append(stackTraceFrame).append('\n');
}
} else {
strBuilder.append(cause.getClass().getName())
.append(": ")
.append(cause.getLocalizedMessage())
.append("\n\nStack Trace:\n");
for (String stackTraceFrame : ExceptionUtils.getRootCauseStackTrace(cause)) {
strBuilder.append(stackTraceFrame).append('\n');
}
}
String message = strBuilder.toString();
HttpResponseHeader responseHeader;
try {
responseHeader = new HttpResponseHeader("HTTP/1.1 400 ZAP IO Error");
responseHeader.setHeader(HttpHeader.CONTENT_TYPE, "text/plain; charset=UTF-8");
responseHeader
.setHeader(HttpHeader.CONTENT_LENGTH, Integer.toString(message.getBytes(StandardCharsets.UTF_8).length));
msg.setResponseHeader(responseHeader);
msg.setResponseBody(message);
} catch (HttpMalformedHeaderException e) {
log.error("Failed to create error response:", e);
}
}
/**
* Process a resource, searching for links (uris) to other resources.
*
* @param message the HTTP Message
*/
private void processResource(HttpMessage message) {
List<SpiderParser> parsers = parent.getController().getParsers();
// Prepare the Jericho source
Source source = new Source(message.getResponseBody().toString());
// Get the full path of the file
String path = null;
try {
path = message.getRequestHeader().getURI().getPath();
} catch (URIException e) {
} finally {
// Handle null paths.
if (path == null)
path = "";
}
// Parse the resource
boolean alreadyConsumed = false;
for (SpiderParser parser : parsers) {
if (parser.canParseResource(message, path, alreadyConsumed)) {
if (log.isDebugEnabled()) log.debug("Parser "+ parser +" can parse resource '"+ path + "'");
if (parser.parseResource(message, source, depth))
alreadyConsumed = true;
} else {
if (log.isDebugEnabled()) log.debug("Parser "+ parser +" cannot parse resource '"+ path + "'");
}
}
}
private ExtensionHistory getExtensionHistory() {
if (this.extHistory == null) {
this.extHistory = (ExtensionHistory) Control.getSingleton().getExtensionLoader().getExtension(ExtensionHistory.NAME);
}
return this.extHistory;
}
/**
* Fetches a resource.
*
* @param msg the HTTP message that will be sent to the server
* @throws IOException Signals that an I/O exception has occurred.
*/
private void fetchResource(HttpMessage msg) throws IOException {
if (parent.getHttpSender() == null) {
return;
}
try {
parent.getHttpSender().sendAndReceive(msg);
} catch (ConnectException e) {
log.debug("Failed to connect to: " + msg.getRequestHeader().getURI(), e);
throw e;
} catch (SocketTimeoutException e) {
log.debug("Socket timeout: " + msg.getRequestHeader().getURI(), e);
throw e;
} catch (SocketException e) {
log.debug("Socket exception: " + msg.getRequestHeader().getURI(), e);
throw e;
} catch (UnknownHostException e) {
log.debug("Unknown host: " + msg.getRequestHeader().getURI(), e);
throw e;
} catch (Exception e) {
log.error("An error occurred while fetching the resource [" + msg.getRequestHeader().getURI() + "]: "
+ e.getMessage(), e);
throw e;
}
}
/**
* Cleans up the resources used by the task.
* <p>
* Should be called if the task was not executed.
*
* @since 2.5.0
*/
void cleanup() {
deleteHistoryReference();
}
}
|
|
/*
* Copyright (c) 2015 Dialogue Systems Group, University of Bielefeld
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package venice.lib.networkRSB;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import com.google.protobuf.Descriptors.FieldDescriptor;
import com.google.protobuf.Descriptors.FileDescriptor;
import com.google.protobuf.Message;
import rsb.Factory;
import rsb.InitializeException;
import rsb.Listener;
import rsb.RSBException;
import rsb.Informer;
import rsb.converter.DefaultConverterRepository;
import rsb.converter.ProtocolBufferConverter;
import venice.lib.AbstractSlot;
import venice.lib.AbstractSlotListener;
import venice.lib.Configuration;
import venice.lib.parser.XIOMaps;
/**
* Takes care of namespace- and slot-creating for the RSB network.
* <p>
* RSB-specific terminology:<br>
* In-slot is {@link rsb.Listener}.<br>
* Out-slot is {@link rsb.Informer}.<br>
* In this documentation the generic terminology is used, to be symmetric to
* the other NamespaceBuilders.
* <p>
* Scope consists of Namespace and Label. Namespace is the part before the last
* slash of the Scope and Label is the part after the last slash of the Scope.
* <p>
* For method parameters Scopes are handled without leading and trailing
* slashes (they will be added automatically when the RSBNamespaceBuilder
* interacts with the RSB network).
*/
public class RSBNamespaceBuilder{
private static Logger logger;
private static HashMap<String, Listener> inSlotMap = new HashMap<String, Listener>();
@SuppressWarnings("rawtypes")
private static HashMap<String, Informer> outSlotMap = new HashMap<String, Informer>();
private static boolean outSlotsPredefined;
private static AbstractSlotListener masterInSlotListener;
private static String prefix = "/";
private static boolean protobufInitialized = false;
private static String protobufDir = null;
private static String matchFilename = "match.xml";
private static String xiocodesFilename = null;
private static HashMap<String, Class<?>> primitiveTypeMap= new HashMap<>();
private static ArrayList<ClassMatcher> classMatcherList = new ArrayList<>();
private static HashMap<Class<?>, ClassMatcher> matchOtherToProtobufMap = new HashMap<>();
private static HashMap<Class<?>, ClassMatcher> matchProtobufToOtherMap = new HashMap<>();
static {
// setup logger
Configuration.setupLogger();
logger = Logger.getLogger(RSBNamespaceBuilder.class);
// create the map for primitive types
primitiveTypeMap.put("int", int.class);
primitiveTypeMap.put("float", float.class);
primitiveTypeMap.put("boolean", boolean.class);
primitiveTypeMap.put("string", String.class);
}
/**
* Private constructor, so no instances of this class can be made.
*/
private RSBNamespaceBuilder(){
// nothing
}
/**
* Creates in-slots and takes care of setting them up. If an array with
* {@link AbstractSlot}s is given, only those in-slots are created. If the
* array is <code>null</code> (or if the overloaded version without
* parameters is used), a in-slot is created that listens to the top most
* namespace (so, only prefix).<br>
* To every in-slot a handler will be added, that informs the main
* application of new data (if the main application has registered a
* master-lister).
*
* @param predefinedSlots an ArrayList with predefined in-slots, or
* <code>null</code> if not needed
*/
public static void initializeInSlots(ArrayList<AbstractSlot> predefinedSlots){
if(!protobufInitialized) {
logger.warn("Warning: Protobuf is not initialized.");
}
Factory factory = Factory.getInstance();
if(predefinedSlots == null || predefinedSlots.size() == 0){
// if there is no array with predefined slots, use a listener
// that listens to the top level scope (so, only prefix)
try {
logger.info("creating listener for "+prefix);
Listener listener = factory.createListener(prefix);
listener.activate();
listener.addHandler(new RSBHandler(), true);
inSlotMap.put(prefix, listener);
} catch (RSBException | InterruptedException e) {
e.printStackTrace();
}
}
else{
// if an array with predefined slots is given, create corresponding listener
for(AbstractSlot abstrSlot: predefinedSlots){
try {
String fullLabel = "/" + abstrSlot.getScope();
logger.debug("creating RSB-Listener for "+fullLabel);
Listener listener = factory.createListener(fullLabel);
listener.activate();
listener.addHandler(new RSBHandler(), true);
inSlotMap.put(fullLabel, listener);
} catch (RSBException | InterruptedException e) {
e.printStackTrace();
}
}
}
}
/**
* Initializes inslots in dynamic mode. See the parameterized version
* for more information. Essentially this method calls the
* parameterized version with <code>null</code> for the parameter.
*/
public static void initializeInSlots(){
initializeInSlots(null);
}
/**
* Creates out-slots (='informer') and takes care of setting them up. If
* an array with {@link AbstractSlot}s is given, only those out-slots are
* created. If the array is <code>null</code> (or if the overloaded
* version without parameters is used), out-slots are created dynamically
* on demand.
* <p>
* If using predefined out-slots, all data that is written to an unknown
* out-slot will be ignored.<p>
* If class matching pairs are given, out-slots will be created for the
* matching class (otherwise for the given type of the abstract slot).
*
* @param predefinedSlots an ArrayList with predefined in-slots, or
* <code>null</code> if not needed
*/
public static void initializeOutSlots(ArrayList<AbstractSlot> predefinedSlots){
if(!protobufInitialized) {
logger.warn("Warning: Protobuf is not initialized.");
return;
}
Factory factory = Factory.getInstance();
if(predefinedSlots == null || predefinedSlots.size() == 0){
// if there is no array with predefined out-slots, use dynamic Informer creation
outSlotsPredefined = false;
}
else{
// if an array with predefined out-slots is given, create corresponding RSB-Informers
outSlotsPredefined = true;
for(AbstractSlot abstrSlot: predefinedSlots){
try {
String rsbLabel = prefix + abstrSlot.getScope();
Class<?> type = abstrSlot.getType();
Class<?> informerType;
logger.debug("creating Informer for "+rsbLabel+" ("+type.getName()+")");
// first check, if this type is matched to a protobuf type
ClassMatcher cm = matchOtherToProtobufMap.get(type);
if(cm != null)
informerType = cm.target; // matched class
else
informerType = type; // original class
Informer<?> informer = factory.createInformer(rsbLabel, informerType);
informer.activate();
outSlotMap.put(abstrSlot.getScope(), informer);
logger.debug(" slot successfully created ("+informerType.getName()+")");
} catch (RSBException e) {
e.printStackTrace();
}
}
}
}
/**
* Initializes dynamic out-slot creation. New out-slots will be created
* on root namespace (plus prefix, if given).
* It's the same like <code>initializeOutSlots(null)</code>.
* See overloaded methods for more details.
*/
public static void initializeOutSlots(){
initializeOutSlots(null);
}
/**
* Parses class match entrys from XML file.
*
* @return NamedPairList with class matches found in XML file
*/
private static NamedPairList parseMatches(){
if(matchFilename == null) return null;
File matchFile = new File(matchFilename);
if(!matchFile.exists()){
logger.error("Error: MatchFile '"+matchFilename+"' doesn't exist.");
return null;
}
if(!matchFile.isFile()){
logger.error("Error: MatchFile '"+matchFilename+"' is not a file.");
return null;
}
DocumentBuilder builder = null;
try {
builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
} catch (ParserConfigurationException e) {
e.printStackTrace();
}
if(builder == null) return null; // DocumentBuilder doesn't want to work
Document document=null;
try {
document = builder.parse(matchFile);
} catch (SAXException | IOException e) {
e.printStackTrace();
}
if(document==null) return null; // XML Document can't be parsed
Element primeElement = document.getDocumentElement();
NodeList matchList = primeElement.getElementsByTagName("match");
NamedPairList namedPairList = new NamedPairList();
for(int iMatch=0; iMatch<matchList.getLength(); iMatch++){
NamedPair namedPair = new NamedPair();
Node match = matchList.item(iMatch);
namedPair.setSourceName(findAttr("from", match));
namedPair.setTargetName(findAttr("to", match));
if(findAttr("repeated", match) != null && findAttr("repeated", match).equals("true") )
namedPair.setRepeated(true);
for(Node pair: findChildElements(match, "methodpair")){
namedPair.addMethodPair(
findAttr("getter", pair),
findAttr("setter", pair),
findAttr("type", pair)
);
}
for(Node constr: findChildElements(match, "constructor")){
// first check if there is a single parameter as an attribute
String singleTypeName = findAttr("parameter", constr);
if(singleTypeName != null){
// if there is a single parameter as an attribute
namedPair.setConstructorWithSingleParam(singleTypeName);
}
else{
// if there are parameter nodes (instead of a single parameter-attribute)
for(Node param: findChildElements(constr, "parameter")){
String typeName = findAttr("type", param);
String indexName = findAttr("index", param);
int index = Integer.parseInt(indexName);
namedPair.addConstructorParam(index, typeName);
}
}
}
for(Node gn: findChildElements(match, "getter")){
String getterName = findAttr("name", gn);
String indexName = findAttr("index", gn);
int index = 0;
if(indexName != null) index = Integer.parseInt(indexName);
namedPair.setGetterName(index, getterName);
}
namedPairList.add(namedPair);
}
logger.info("matching class pairs for RSB:");
for(int i=0; i<namedPairList.size(); i++){
NamedPair np = namedPairList.get(i);
logger.info(" - "+np);
if(np.isUsingConstructor()){
String s = "Constructor "+np.getTargetName()+"(";
for(int j=0; j<np.numOfFields(); j++){
if(j>0) s+=", ";
s+=np.getTypeName(j);
}
s += ")";
logger.debug(" > "+s);
for(int j=0; j<np.numOfFields(); j++){
logger.debug(" > getter "+j+" "+np.getGetterName(j)+"("+np.getTypeName(j)+")");
}
}
else{
for(int j=0; j<np.numOfFields(); j++){
logger.debug(" > getter "+np.getGetterName(j)+"("+np.getTypeName(j)+") ->" +
" setter "+np.getSetterName(j)+"("+np.getTypeName(j)+")");
}
}
}
return namedPairList;
}
/**
* Finds an attribute in an XML element.
* @param attr The attribute to find.
* @param element The element where to search.
* @return The value of the attribute, or <code>null</code> if the
* attribute was not found.
*/
private static String findAttr(String attr, Node element){
String name = null;
if(element.getAttributes() != null)
if(element.getAttributes().getNamedItem(attr) != null)
name = element.getAttributes().getNamedItem(attr).getNodeValue();
return name;
}
/**
* Finds child elements with a given name in an XML element.
* @param parent The parent element.
* @param childName The name of the child element to find.
* @return A list with found child elements.
*/
private static ArrayList<Node> findChildElements(Node parent, String childName){
ArrayList<Node> list = new ArrayList<Node>();
for(int i=0; i<parent.getChildNodes().getLength(); i++)
if(parent.getChildNodes().item(i).getNodeType() == Node.ELEMENT_NODE)
if( parent.getChildNodes().item(i).getNodeName().equalsIgnoreCase(childName) )
list.add(parent.getChildNodes().item(i));
return list;
}
/**
* Initializes protobuf classes.<br>
* Parses the class matcher file.<br>
* Loads protobuf classes.<br>
* Creates entrys for XIOMaps (class <-> XIO tag)<br>
*/
public static void initializeProtobuf(){
if(protobufDir == null){
protobufInitialized = true;
return;
}
NamedPairList npl = parseMatches();
loadProtobufClasses(npl);
XIOMaps.loadXIOCodes(xiocodesFilename);
protobufInitialized = true;
}
/**
* Loads protobuf classes.
*
* @param namedPairList List with class-to-class matches
*/
private static void loadProtobufClasses(NamedPairList namedPairList){
File protobufPath = new File(protobufDir);
File dirForClassLoader = null;
if(protobufPath.isDirectory()){
dirForClassLoader = protobufPath.getParentFile();
}
else{
logger.error("Error: Given protobuf path doesn't exist or is not a directory: "+protobufPath);
return;
}
URL url;
try {
url = dirForClassLoader.toURI().toURL();
} catch (MalformedURLException e) {
e.printStackTrace();
return;
}
URL[] urls = new URL[]{url};
ClassLoader classLoader = new URLClassLoader(urls);
XIOMaps.setAdditionalClassLoader(classLoader);
File[] fileList = protobufPath.listFiles();
for(int i=0; i<fileList.length; i++){
String filename = fileList[i].getName();
String ending = ".class";
if(filename.endsWith(ending) && !filename.contains("$")){
String primeClassname = filename.substring(0, filename.length() - ending.length());
logger.debug("loading "+primeClassname+" from "+filename);
Class<?> primeClass;
try {
primeClass = classLoader.loadClass("protobuf."+primeClassname);
logger.debug(" primeClass: " + primeClass.getName());
} catch (ClassNotFoundException e) {
logger.error("Can not load class protobuf."+primeClassname);
e.printStackTrace();
continue;
}
Method getDescriptor = null;
try {
getDescriptor = primeClass.getMethod("getDescriptor");
} catch (NoSuchMethodException | SecurityException e) {
logger.error("Error: Method getDescriptor not found for "+primeClass.getName());
continue;
}
if(getDescriptor == null){
logger.error("Error: Found no Descriptor for "+primeClass.getName());
continue;
}
FileDescriptor fileDescriptor = null;
try {
fileDescriptor = (FileDescriptor) getDescriptor.invoke(primeClass);
} catch (IllegalAccessException | IllegalArgumentException
| InvocationTargetException e) {
logger.error("Error: Can't invoke "+getDescriptor.getName()+" on "+primeClass.getName());
continue;
}
if(fileDescriptor == null){
logger.error("Error: Found no FileDescriptor for "+primeClass.getName());
continue;
}
// there is only one type defined per message, so it's 0
String typeName = fileDescriptor.getMessageTypes().get(0).getName();
String typeFullName = "protobuf."+primeClassname+"$"+typeName;
Class<?> typeClass = null;
try {
typeClass = classLoader.loadClass(typeFullName);
} catch (ClassNotFoundException e) {
logger.error("Error: ClassLoader failed to load "+typeFullName);
continue;
}
if(typeClass == null){
logger.error("Error: "+typeFullName+" was not loaded!");
continue;
}
Method newBuilderMethod = null;
try {
newBuilderMethod = typeClass.getMethod("newBuilder");
} catch (NoSuchMethodException | SecurityException e1) {
logger.error("Error: Method newBuilder() not found");
continue;
}
if(newBuilderMethod == null){
logger.error("Error: Method newBuilder() not found");
continue;
}
Class<?> builderClass = null;
Class<?>[] subClasses = typeClass.getClasses();
for(int s=0; s<subClasses.length; s++){
if(subClasses[s].getName().endsWith(typeName+"$Builder")){
builderClass = subClasses[s];
}
}
if(builderClass == null){
logger.error("builderclass not found");
continue;
}
Object builderInstance = null;
try {
builderInstance = newBuilderMethod.invoke(typeClass);
} catch (IllegalAccessException | IllegalArgumentException
| InvocationTargetException e2) {
// nothing
}
if(builderInstance == null){
logger.debug("Error: Failed to create an instance of builder for "+typeClass.getName());
continue;
}
Method buildMethod = null;
try {
buildMethod = builderClass.getMethod("build");
} catch (NoSuchMethodException | SecurityException e1) {
logger.error("Error: Method build() not found.");
continue;
}
if(buildMethod == null){
logger.error("Error: Method build() not found.");
continue;
}
// register this class in protobuf message converter
Method getDefaultInstance;
try {
getDefaultInstance = typeClass.getMethod("getDefaultInstance");
} catch (NoSuchMethodException | SecurityException e) {
logger.error("Error: Can not access method from "+typeClass.getName());
e.printStackTrace();
continue;
}
ProtocolBufferConverter<?> conv;
try {
conv = new ProtocolBufferConverter((Message)getDefaultInstance.invoke(typeClass));
DefaultConverterRepository.getDefaultConverterRepository().addConverter(conv);
} catch (IllegalAccessException | IllegalArgumentException
| InvocationTargetException e) {
logger.error("Can not register converter for "+typeClass.getName());
e.printStackTrace();
continue;
}
logger.info("registering "+typeClass.getName()+" for RSB");
// this protobuf class as a
// TARGET
if(namedPairList != null && namedPairList.size() > 0){
NamedPair namedPairT = namedPairList.findNamedPairForTarget(typeClass.getCanonicalName());
if(namedPairT == null){
logger.error("Error: Abstract name pair not found for "+typeClass.getCanonicalName()+" as target");
for(int iP = 0; iP<namedPairList.size(); iP++){
if(iP == 0)
logger.error("Pairs are: - "+namedPairList.get(iP));
else
logger.error(" - "+namedPairList.get(iP));
}
continue;
}
logger.debug("Working on pair '"+namedPairT+"'");
Method[] setterMethodArray = new Method[namedPairT.numOfFields()];
Class<?>[] typeArray = new Class[namedPairT.numOfFields()];
boolean somethingMissing = false;
for( int iField=0; iField<namedPairT.numOfFields(); iField++){
String setterName = namedPairT.getSetterName(iField);
if(setterName == null){
logger.error("Error: Name for a setter-method not found.");
somethingMissing = true;
break;
}
typeArray[iField] = primitiveTypeMap.get(namedPairT.getTypeName(iField));
if(typeArray[iField] == null){
logger.error("Error: Class for type "+namedPairT.getTypeName(iField)+" not found.");
logger.debug("Known Classes are:");
for(Map.Entry<String, Class<?>> me : primitiveTypeMap.entrySet()){
logger.debug(" - "+me.getKey());
}
somethingMissing = true;
break;
}
try {
setterMethodArray[iField] = builderClass.getMethod(setterName, typeArray[iField]);
} catch (NoSuchMethodException | SecurityException e) {
logger.error("Error: Method "+setterName+" for Class "+builderClass+" not found.");
somethingMissing = true;
break;
}
}
if(somethingMissing) continue;
Method resetter = null;
try {
resetter = builderClass.getMethod("clear");
} catch (NoSuchMethodException | SecurityException e1) {
logger.error("Error: Method 'clear' not found for "+builderClass.getName());
}
Class<?> sourceClass = null;
try {
sourceClass = Class.forName(namedPairT.getSourceName());
} catch (ClassNotFoundException e) {
logger.error("Error: Source class "+namedPairT.getSourceName()+" not found.");
continue;
}
Method[] getterMethodArray = new Method[namedPairT.numOfFields()];
somethingMissing = false;
for(int iM = 0; iM<namedPairT.numOfFields(); iM++){
try {
if(namedPairT.isRepeated()){
Class<?> componentClass = sourceClass.getComponentType();
getterMethodArray[iM] = componentClass.getMethod(namedPairT.getGetterName(iM));
}
else{
getterMethodArray[iM] = sourceClass.getMethod(namedPairT.getGetterName(iM));
}
} catch (NoSuchMethodException | SecurityException e) {
logger.error("Error: Can't find getter method "+namedPairT.getGetterName(iM)+" for source class "+sourceClass.getName());
somethingMissing = true;
break;
}
}
if(somethingMissing) continue;
logger.debug("Parameters for "+typeClass.getName()+" as TARGET");
logger.debug(" source class = "+sourceClass.getName());
for(int iM=0; iM<getterMethodArray.length; iM++){
if(iM == 0)
logger.debug(" getters: - "+getterMethodArray[iM].getName());
else
logger.debug(" - "+getterMethodArray[iM].getName());
}
logger.debug(" target class = "+typeClass.getName());
logger.debug(" builder class = "+builderClass.getName());
for(int iS=0; iS<typeArray.length; iS++){
if(iS == 0)
logger.debug(" setters: - "+setterMethodArray[iS].getName());
else
logger.debug(" - "+setterMethodArray[iS].getName());
}
logger.debug(" build method: "+buildMethod.getName());
for(int iT=0; iT<typeArray.length; iT++){
if(iT == 0)
logger.debug(" types: - "+typeArray[iT].getName());
else
logger.debug(" - "+typeArray[iT].getName());
}
ClassMatcher cm = new ClassMatcher(
sourceClass,
typeClass,
builderInstance,
resetter,
getterMethodArray,
setterMethodArray,
buildMethod,
typeArray,
namedPairT.isRepeated());
classMatcherList.add(cm);
matchOtherToProtobufMap.put(sourceClass, cm);
// this protobuf class as a
// SOURCE
NamedPair namedPairS = namedPairList.findNamedPairForSource(typeClass.getCanonicalName());
if(namedPairS == null){
logger.error("Error: Abstract name pair not found for "+typeClass.getCanonicalName()+" as source");
for(int iP = 0; iP<namedPairList.size(); iP++){
if(iP == 0)
logger.debug("Pairs are: - "+namedPairList.get(iP));
else
logger.debug(" - "+namedPairList.get(iP));
}
continue;
}
logger.debug("Working on pair '"+namedPairS+"'");
Class<?> targetClass = null;
try {
targetClass = Class.forName(namedPairS.getTargetName());
} catch (ClassNotFoundException e) {
logger.error("Error: Target class "+namedPairS.getTargetName()+" not found.");
continue;
}
getterMethodArray = new Method[namedPairS.numOfFields()];
somethingMissing = false;
for(int iM = 0; iM<namedPairS.numOfFields(); iM++){
try {
if(namedPairS.isRepeated()){
getterMethodArray[iM] = typeClass.getMethod(namedPairS.getGetterName(iM), int.class);
}
else{
getterMethodArray[iM] = typeClass.getMethod(namedPairS.getGetterName(iM));
}
} catch (NoSuchMethodException | SecurityException e) {
logger.error("Error: Can't find getter method "+namedPairS.getGetterName(iM)+" for "+typeClass.getName());
somethingMissing = true;
break;
}
}
if(somethingMissing) continue;
Constructor<?> constructor = null;
try {
if(namedPairS.isRepeated()){
constructor = targetClass.getComponentType().getConstructor(typeArray);
}
else{
constructor = targetClass.getConstructor(typeArray);
}
} catch (NoSuchMethodException | SecurityException e) {
logger.error("Error: Constructor for "+targetClass.getName()+" not found.");
continue;
}
logger.debug("Parameters for "+typeClass.getName()+" as SOURCE");
logger.debug(" source class = "+typeClass.getName());
for(int iM=0; iM<getterMethodArray.length; iM++){
if(iM == 0)
logger.debug(" getters: - "+getterMethodArray[iM].getName());
else
logger.debug(" - "+getterMethodArray[iM].getName());
}
logger.debug(" target class = "+targetClass.getName());
logger.debug(" constructor = "+constructor.getName());
for(int iT=0; iT<typeArray.length; iT++){
if(iT == 0)
logger.debug(" types: - "+typeArray[iT].getName());
else
logger.debug(" - "+typeArray[iT].getName());
}
cm = new ClassMatcher(
typeClass,
targetClass,
getterMethodArray,
constructor,
typeArray,
namedPairS.isRepeated());
classMatcherList.add(cm);
matchProtobufToOtherMap.put(typeClass, cm);
} // end if namedPairList
} // end if filename
}
protobufInitialized = true;
}
/**
* Remove all slots.
*/
public static void removeAll(){
// remove all in-slots
for(Map.Entry<String, Listener> lis: inSlotMap.entrySet()){
try {
lis.getValue().deactivate();
} catch (RSBException | InterruptedException e) {
e.printStackTrace();
}
}
// remove all out-slots
for(@SuppressWarnings("rawtypes") Map.Entry<String, Informer> inf: outSlotMap.entrySet()){
try {
inf.getValue().deactivate();
} catch (RSBException | InterruptedException e) {
e.printStackTrace();
}
}
}
/**
* Sends data to the outslot with the given label and namespace.
* If that outslot doesn't exists, it will be created (dynamic mode)
* or ignored (predefined mode).
* <p>
* The concatenation of the Namespace a slash and the Label gives
* the Scope.
* <p>
* Example:<br>
* Namespace <code>'Venice/test'</code> and Label <code>'slotA'</code>
* gives the scope <code>'Venice/test/slotA'</code>.
*
* @param label Label of the outslot (w/o leading or trailing slash)
* @param data the data to be send (class should match the datatype of the
* outslot.
* @param namespace Namespace of the outslot (w/o leading or trailing slash)
* @return <code>true</code> if the data was send without problems,
* otherwise <code>false</code>
*/
public static boolean write(String label, Object data, String namespace){
return write(namespace + "/" + label, data);
}
/**
* Sends given data to given outslot (='informer'). If that outslot does
* not exist, it will be created (dynamic mode) or ignored (predefined mode).
* <p>
* A Scope consists of a Namespace and a Label. The part before the last
* slash is the Namespace and the part after the last slash is the Label.
* <p>
* Example:<br>
* The scope <code>'Venice/test/slotA'</code> consists of the Namespace
* <code>'Venice/test'</code> and the Label <code>'slotA'</code>.
*
* @param scope the scope of the outslot (='informer'), w/o leading or trailing slash
* @param data the data to be send (class should match the datatype of the
* out-slot.
* @return <code>true</code> if the data was send without problems,
* otherwise <code>false</code>
*/
public static boolean write(String scope, Object data){
logger.debug("(write) got data to write, type: "+data.getClass().getName());
if(data.getClass().getName().startsWith("protobuf")){
Message m = (Message) data;
List<FieldDescriptor> fdList = m.getDescriptorForType().getFields();
for(FieldDescriptor fd : fdList){
logger.debug(" field: "+fd.getName());
if(fd.isRepeated()){
int nField = m.getRepeatedFieldCount(fdList.get(0));
for(int iField=0; iField<nField; iField++){
Object value = m.getRepeatedField(fd, iField);
logger.debug(" value "+iField+": "+value);
}
}
else{
Object value = m.getField(fd);
logger.debug(" value: "+value);
}
}
}
// first, try to find a slot with the given scope in the map
@SuppressWarnings("rawtypes")
Informer informer = outSlotMap.get(scope);
if(informer == null && !outSlotsPredefined){
// if no outslot with the given scope was found
// in dynamic mode, create missing outslot
try {
informer = Factory.getInstance().createInformer(prefix + scope);
informer.activate();
outSlotMap.put(scope, informer);
} catch (InitializeException e) {
e.printStackTrace();
}
}
if(informer == null){
// this will happen, if no outslot with this name exists and
// RSB is used in predefined mode
return false; // do nothing
}
else{
// this will happen, if
// - outslot is found, or
// - outslot is not found, but created (because of dynamic mode)
Class<?> type = data.getClass();
if(type == String.class)
try {
logger.debug("(write) sending: "+data);
informer.send(data);
} catch (RSBException e) {
logger.error("Failed to send "+data.toString()+" ("+data.getClass().getName()+")"+" to "+prefix+scope+" (RSBException)");
return false;
}
else{
logger.debug("processing non-string");
ClassMatcher cm = matchOtherToProtobufMap.get(type);
if(cm == null){
logger.debug("no cm");
// no matching class found, try to send data directly
try {
informer.send(data);
} catch (RSBException e) {
logger.error("Failed to send "+data.toString()+" ("+data.getClass().getName()+")"+" to "+prefix+scope+" (RSBException)");
return false;
}
}
else{
// matching class found, convert data to target class
logger.debug("cm available");
if(cm.isRepeated){
// data is a
// MULTIFIELD
Object[] dataArray = (Object[]) data;
logger.debug("(write) detected multifield with "+dataArray.length+" fields.");
cm.resetBuilder();
for(int iA=0; iA<dataArray.length; iA++){
logger.debug("(write) working on field "+iA);
for(int iV=0; iV<cm.type.length; iV++){
logger.debug("(write) working on value "+iV);
Object value;
try {
value = cm.getter[iV].invoke(dataArray[iA]);
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e1) {
logger.error("Error: Failed to get value.");
return false;
}
try {
cm.setter[iV].invoke(cm.builderInstance, value);
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
logger.error("Error: Failed to set value.");
e.printStackTrace();
return false;
}
}
}
try {
informer.send(cm.build.invoke(cm.builderInstance));
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException | RSBException e) {
logger.error("Error: Failed to send data to informer.");
return false;
}
}
else{
// data is a
// SINGLEFIELD
for(int i=0; i<cm.type.length; i++){
Object value;
try {
value = cm.getter[i].invoke(data);
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e1) {
logger.error("Error: Failed to get value.");
return false;
}
//logger.debug(" value["+i+"]="+value+" ("+cm.type[i].getName()+")");
try {
cm.setter[i].invoke(cm.builderInstance, value);
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
logger.error("Error: Failed to set value.");
e.printStackTrace();
return false;
}
}
try {
informer.send(cm.build.invoke(cm.builderInstance));
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException | RSBException e) {
logger.error("Error: Failed to send data to informer.");
return false;
}
}
}
}
return true;
}
}
/**
* Here the application can register its listener to receive incoming data.
* @param inSlotListener A listener object, which will receive incoming data.
*/
public static void setMasterInSlotListener(AbstractSlotListener inSlotListener){
masterInSlotListener = inSlotListener;
}
/**
* @return The registered listener that receives incoming data.
*/
public static AbstractSlotListener getMasterInSlotListener(){
return masterInSlotListener;
}
/**
* Sets a prefix for all namespaces for all slots created by this
* NamespaceBuilder. This is especially useful for dynamic slot creation
* if the slots should not be created on the root, but on a namespace
* given by the application.
* @param prfx
*/
public static void setPrefix(String prfx){
prefix = prfx;
if(!prefix.startsWith("/")) prefix = "/" + prefix;
if(!prefix.endsWith("/")) prefix += "/";
logger.info("prefix set to "+prefix);
}
/**
* Sets the path to the protobuf folder.
* @param dir A path to the protobuf folder.
*/
public static void setProtobufDir(String dir){
protobufDir = dir;
}
/**
* Sets the name of a XML file with class-to-class matching
* definitions.
* @param dir Filename (can include path)
*/
public static void setMatchFile(String dir){
matchFilename = dir;
}
/**
* Gets the map of matching between protobuf classes and other classes.
* @return Map with class-matching-definitions
*/
public static HashMap<Class<?>, ClassMatcher> getMatchProtobufToOtherMap(){
return matchProtobufToOtherMap;
}
/**
* Set the name of a XML file with XIO code mapping.
* @param filename A filename (can include path)
*/
public static void setXioCodesFilename(String filename){
xiocodesFilename = filename;
}
/**
* Returns the name of the file with XIO code mapping.
* @return Name of the file with XIO code mapping
*/
public static String getXioCodesFilename(){
return xiocodesFilename;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.