gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/** * Copyright 5AM Solutions Inc, ESAC, ScenPro & SAIC * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/caintegrator/LICENSE.txt for details. */ package gov.nih.nci.caintegrator.data; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import gov.nih.nci.caintegrator.application.query.InvalidCriterionException; import gov.nih.nci.caintegrator.application.study.AnnotationTypeEnum; import gov.nih.nci.caintegrator.application.study.ImageDataSourceConfiguration; import gov.nih.nci.caintegrator.application.study.StudyConfiguration; import gov.nih.nci.caintegrator.domain.annotation.AnnotationDefinition; import gov.nih.nci.caintegrator.domain.annotation.NumericAnnotationValue; import gov.nih.nci.caintegrator.domain.annotation.PermissibleValue; import gov.nih.nci.caintegrator.domain.annotation.StringAnnotationValue; import gov.nih.nci.caintegrator.domain.annotation.SubjectAnnotation; import gov.nih.nci.caintegrator.domain.application.CopyNumberAlterationCriterion; import gov.nih.nci.caintegrator.domain.application.CopyNumberCriterionTypeEnum; import gov.nih.nci.caintegrator.domain.application.EntityTypeEnum; import gov.nih.nci.caintegrator.domain.application.GenomicIntervalTypeEnum; import gov.nih.nci.caintegrator.domain.application.NumericComparisonCriterion; import gov.nih.nci.caintegrator.domain.application.NumericComparisonOperatorEnum; import gov.nih.nci.caintegrator.domain.application.SegmentBoundaryTypeEnum; import gov.nih.nci.caintegrator.domain.application.SelectedValueCriterion; import gov.nih.nci.caintegrator.domain.application.StringComparisonCriterion; import gov.nih.nci.caintegrator.domain.application.StudySubscription; import gov.nih.nci.caintegrator.domain.application.UserWorkspace; import gov.nih.nci.caintegrator.domain.application.WildCardTypeEnum; import gov.nih.nci.caintegrator.domain.genomic.ArrayData; import gov.nih.nci.caintegrator.domain.genomic.ArrayDataType; import gov.nih.nci.caintegrator.domain.genomic.ChromosomalLocation; import gov.nih.nci.caintegrator.domain.genomic.Gene; import gov.nih.nci.caintegrator.domain.genomic.GeneChromosomalLocation; import gov.nih.nci.caintegrator.domain.genomic.GeneExpressionReporter; import gov.nih.nci.caintegrator.domain.genomic.GeneLocationConfiguration; import gov.nih.nci.caintegrator.domain.genomic.GenomeBuildVersionEnum; import gov.nih.nci.caintegrator.domain.genomic.Platform; import gov.nih.nci.caintegrator.domain.genomic.ReporterList; import gov.nih.nci.caintegrator.domain.genomic.ReporterTypeEnum; import gov.nih.nci.caintegrator.domain.genomic.Sample; import gov.nih.nci.caintegrator.domain.genomic.SampleAcquisition; import gov.nih.nci.caintegrator.domain.genomic.SegmentData; import gov.nih.nci.caintegrator.domain.imaging.ImageSeries; import gov.nih.nci.caintegrator.domain.translational.Study; import gov.nih.nci.caintegrator.domain.translational.StudySubjectAssignment; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.transaction.annotation.Transactional; /** * caIntegrator dao integration tests. * * @author Abraham J. Evans-EL <[email protected]> */ @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration("classpath:integration-test-config.xml") @Transactional public class CaIntegrator2DaoTestIntegration { @Autowired private CaIntegrator2Dao dao; @Test public void testGetWorkspace() { UserWorkspace workspace = new UserWorkspace(); workspace.setUsername("username"); dao.save(workspace); UserWorkspace workspace2 = this.dao.getWorkspace("username"); assertEquals(workspace.getId(), workspace2.getId()); } @Test public void testRetrieveAllSubscribedWorkspaces() { Study study = new Study(); Study study2 = new Study(); UserWorkspace workspace1 = new UserWorkspace(); StudySubscription subscription1 = new StudySubscription(); subscription1.setStudy(study); workspace1.getSubscriptionCollection().add(subscription1); UserWorkspace workspace2 = new UserWorkspace(); StudySubscription subscription2 = new StudySubscription(); subscription2.setStudy(study); workspace2.getSubscriptionCollection().add(subscription2); UserWorkspace workspace3 = new UserWorkspace(); StudySubscription subscription3 = new StudySubscription(); subscription3.setStudy(study2); workspace3.getSubscriptionCollection().add(subscription3); dao.save(study); dao.save(study2); dao.save(workspace1); dao.save(workspace2); dao.save(workspace3); assertEquals(2, dao.retrieveAllSubscribedWorkspaces(study).size()); assertEquals(1, dao.retrieveAllSubscribedWorkspaces(study2).size()); } @Test public void testSave() { StudyConfiguration studyConfiguration1 = new StudyConfiguration(); Study study1 = studyConfiguration1.getStudy(); study1.setLongTitleText("longTitleText"); study1.setShortTitleText("shortTitleText"); assertNull(studyConfiguration1.getId()); assertNull(study1.getId()); dao.save(studyConfiguration1); assertNotNull(studyConfiguration1.getId()); assertNotNull(study1.getId()); dao.refresh(studyConfiguration1); StudyConfiguration studyConfiguration2 = dao.get(studyConfiguration1.getId(), StudyConfiguration.class); Study study2 = studyConfiguration2.getStudy(); assertEquals(studyConfiguration2, study2.getStudyConfiguration()); assertEquals(study1.getShortTitleText(), study2.getShortTitleText()); assertEquals(study1.getLongTitleText(), study2.getLongTitleText()); assertEquals(study1, study2); assertEquals(studyConfiguration1, studyConfiguration2); } @Test public void testFindMatches() { // First load 2 AnnotationFieldDescriptors. AnnotationDefinition afd = new AnnotationDefinition(); afd.setKeywords("congestive heart failure"); afd.setDisplayName("Congestive Heart Failure"); dao.save(afd); AnnotationDefinition afd2 = new AnnotationDefinition(); afd2.setKeywords("congestive"); afd2.setDisplayName("Congestive"); dao.save(afd2); AnnotationDefinition afd3 = new AnnotationDefinition(); afd3.setKeywords("congestive failure"); afd3.setDisplayName("Congestive Failure"); dao.save(afd3); // Now search for our item on the string "congestive" List<String> searchWords = new ArrayList<String>(); searchWords.add("CoNgeStiVe"); searchWords.add("HearT"); searchWords.add("failure"); List<AnnotationDefinition> afds1 = dao.findMatches(searchWords); assertNotNull(afds1); // Make sure it sorted them properly. assertEquals(afds1.get(0).getDisplayName(), "Congestive Heart Failure"); assertEquals(afds1.get(1).getDisplayName(), "Congestive Failure"); assertEquals(afds1.get(2).getDisplayName(), "Congestive"); List<String> searchWords2 = new ArrayList<String>(); searchWords2.add("afdsefda"); List<AnnotationDefinition> afds2 = dao.findMatches(searchWords2); assertEquals(0, afds2.size()); } @Test public void testFindMatchingSamples() { StudyHelper studyHelper = new StudyHelper(); dao.save(studyHelper.getPlatform()); Study study = studyHelper.populateAndRetrieveStudy().getStudy(); dao.save(study); // Now need to create the criterion items and see if we can retrieve back the proper values. NumericComparisonCriterion criterion = new NumericComparisonCriterion(); criterion.setNumericValue(12.0); criterion.setNumericComparisonOperator(NumericComparisonOperatorEnum.GREATEROREQUAL); criterion.setAnnotationFieldDescriptor(studyHelper.getSampleAnnotationFieldDescriptor()); criterion.setEntityType(EntityTypeEnum.SAMPLE); List<SampleAcquisition> matchingSamples = dao.findMatchingSamples(criterion, study); assertEquals(4, matchingSamples.size()); // Try a different number combination to test a different operator NumericComparisonCriterion criterion2 = new NumericComparisonCriterion(); criterion2.setNumericValue(11.0); criterion2.setNumericComparisonOperator(NumericComparisonOperatorEnum.LESSOREQUAL); criterion2.setAnnotationFieldDescriptor(studyHelper.getSampleAnnotationFieldDescriptor()); criterion2.setEntityType(EntityTypeEnum.SAMPLE); List<SampleAcquisition> matchingSamples2 = dao.findMatchingSamples(criterion2, study); assertEquals(3, matchingSamples2.size()); // Try a selectedValueCriterion now (should be size 3) SelectedValueCriterion criterion3 = new SelectedValueCriterion(); Collection<PermissibleValue> permissibleValues1 = new HashSet<PermissibleValue>(); permissibleValues1.add(studyHelper.getPermval1()); criterion3.setValueCollection(permissibleValues1); criterion3.setEntityType(EntityTypeEnum.SAMPLE); criterion3.setAnnotationFieldDescriptor(studyHelper.getSampleAnnotationFieldDescriptor()); List<SampleAcquisition> matchingSamples3 = dao.findMatchingSamples(criterion3, study); assertEquals(1, matchingSamples3.size()); // Try the other permissible values (should be size 2) SelectedValueCriterion criterion4 = new SelectedValueCriterion(); Collection<PermissibleValue> permissibleValues2 = new HashSet<PermissibleValue>(); permissibleValues2.add(studyHelper.getPermval2()); criterion4.setValueCollection(permissibleValues2); criterion4.setEntityType(EntityTypeEnum.SAMPLE); criterion4.setAnnotationFieldDescriptor(studyHelper.getSampleAnnotationFieldDescriptor()); List<SampleAcquisition> matchingSamples4 = dao.findMatchingSamples(criterion4, study); assertEquals(0, matchingSamples4.size()); // Try using a different Annotation Definition and verify that it returns 0 from that. NumericComparisonCriterion criterion5 = new NumericComparisonCriterion(); criterion5.setNumericValue(13.0); criterion5.setNumericComparisonOperator(NumericComparisonOperatorEnum.GREATEROREQUAL); criterion5.setAnnotationFieldDescriptor(studyHelper.getImageSeriesAnnotationFieldDescriptor()); criterion5.setEntityType(EntityTypeEnum.SAMPLE); List<SampleAcquisition> matchingSamples5 = dao.findMatchingSamples(criterion5, study); assertEquals(0, matchingSamples5.size()); } @Test public void testFindMatchingImageSeries() { StudyHelper studyHelper = new StudyHelper(); dao.save(studyHelper.getPlatform()); Study study = studyHelper.populateAndRetrieveStudy().getStudy(); dao.save(study); StringComparisonCriterion criterion1 = new StringComparisonCriterion(); criterion1.setStringValue("string1"); criterion1.setEntityType(EntityTypeEnum.IMAGESERIES); criterion1.setAnnotationFieldDescriptor(studyHelper.getImageSeriesAnnotationFieldDescriptor()); List<ImageSeries> matchingImageSeries = dao.findMatchingImageSeries(criterion1, study); assertEquals(1, matchingImageSeries.size()); // Try a wildcard search now. StringComparisonCriterion criterion2 = new StringComparisonCriterion(); criterion2.setStringValue("string"); criterion2.setEntityType(EntityTypeEnum.IMAGESERIES); criterion2.setWildCardType(WildCardTypeEnum.WILDCARD_AFTER_STRING); criterion2.setAnnotationFieldDescriptor(studyHelper.getImageSeriesAnnotationFieldDescriptor()); List<ImageSeries> matchingImageSeries2 = dao.findMatchingImageSeries(criterion2, study); assertEquals(5, matchingImageSeries2.size()); // Change only the annotation definition and see if it returns 0. StringComparisonCriterion criterion3 = new StringComparisonCriterion(); criterion3.setStringValue("string1"); criterion3.setEntityType(EntityTypeEnum.IMAGESERIES); criterion3.setAnnotationFieldDescriptor(studyHelper.getSampleAnnotationFieldDescriptor()); List<ImageSeries> matchingImageSeries3 = dao.findMatchingImageSeries(criterion3, study); assertEquals(0, matchingImageSeries3.size()); } @Test public void testFindMatchingSubjects() { StudyHelper studyHelper = new StudyHelper(); dao.save(studyHelper.getPlatform()); Study study = studyHelper.populateAndRetrieveStudy().getStudy(); dao.save(study); NumericComparisonCriterion criterion1 = new NumericComparisonCriterion(); criterion1.setNumericValue(2.0); criterion1.setNumericComparisonOperator(NumericComparisonOperatorEnum.GREATER); criterion1.setEntityType(EntityTypeEnum.SUBJECT); criterion1.setAnnotationFieldDescriptor(studyHelper.getSubjectAnnotationFieldDescriptor()); List<StudySubjectAssignment> matchingStudySubjectAssignments = dao.findMatchingSubjects(criterion1, study); assertEquals(3, matchingStudySubjectAssignments.size()); // Change only the annotation definition and see if it returns 0. NumericComparisonCriterion criterion2 = new NumericComparisonCriterion(); criterion2.setNumericValue(2.0); criterion2.setNumericComparisonOperator(NumericComparisonOperatorEnum.GREATER); criterion2.setEntityType(EntityTypeEnum.SUBJECT); criterion2.setAnnotationFieldDescriptor(studyHelper.getSampleAnnotationFieldDescriptor()); List<StudySubjectAssignment> matchingStudySubjectAssignments2 = dao.findMatchingSubjects(criterion2, study); assertEquals(0, matchingStudySubjectAssignments2.size()); } @Test public void testFindGeneExpressionReporters() { Study study = new Study(); Gene gene = new Gene(); gene.setSymbol("TEST"); GeneExpressionReporter reporter = new GeneExpressionReporter(); Platform platform = new Platform(); ReporterList reporterList = platform.addReporterList("reporterList", ReporterTypeEnum.GENE_EXPRESSION_PROBE_SET); reporter.setReporterList(reporterList); reporterList.getReporters().add(reporter); reporter.setIndex(0); reporter.getGenes().add(gene); StudySubjectAssignment studySubjectAssignment = new StudySubjectAssignment(); study.getAssignmentCollection().add(studySubjectAssignment); SampleAcquisition sampleAcquisition = new SampleAcquisition(); studySubjectAssignment.getSampleAcquisitionCollection().add(sampleAcquisition); Sample sample = new Sample(); sampleAcquisition.setSample(sample); ArrayData arrayData = new ArrayData(); arrayData.setStudy(study); sample.getArrayDataCollection().add(arrayData); arrayData.getReporterLists().add(reporterList); reporterList.getArrayDatas().add(arrayData); dao.save(sample); dao.save(study); dao.save(gene); Set<String> geneSymbols = new HashSet<String>(); geneSymbols.add("TEST"); assertEquals(1, dao.findReportersForGenes(geneSymbols, ReporterTypeEnum.GENE_EXPRESSION_PROBE_SET, study, null).size()); } @Test public void testRetrieveUniqueValuesForStudyAnnotation() { Study study = new Study(); StudySubjectAssignment studySubjectAssignment1 = new StudySubjectAssignment(); studySubjectAssignment1.setStudy(study); StudySubjectAssignment studySubjectAssignment2 = new StudySubjectAssignment(); studySubjectAssignment2.setStudy(study); StudySubjectAssignment studySubjectAssignment3 = new StudySubjectAssignment(); studySubjectAssignment3.setStudy(study); study.getAssignmentCollection().add(studySubjectAssignment1); study.getAssignmentCollection().add(studySubjectAssignment3); study.getAssignmentCollection().add(studySubjectAssignment3); SubjectAnnotation subjectAnnotation1 = new SubjectAnnotation(); SubjectAnnotation subjectAnnotation2 = new SubjectAnnotation(); SubjectAnnotation subjectAnnotation3 = new SubjectAnnotation(); SubjectAnnotation subjectAnnotation4 = new SubjectAnnotation(); SubjectAnnotation subjectAnnotation5 = new SubjectAnnotation(); SubjectAnnotation subjectAnnotation6 = new SubjectAnnotation(); studySubjectAssignment1.getSubjectAnnotationCollection().add(subjectAnnotation1); studySubjectAssignment2.getSubjectAnnotationCollection().add(subjectAnnotation2); studySubjectAssignment3.getSubjectAnnotationCollection().add(subjectAnnotation3); studySubjectAssignment1.getSubjectAnnotationCollection().add(subjectAnnotation4); studySubjectAssignment2.getSubjectAnnotationCollection().add(subjectAnnotation5); studySubjectAssignment3.getSubjectAnnotationCollection().add(subjectAnnotation6); // First test is for Strings AnnotationDefinition annotationDefinition = new AnnotationDefinition(); annotationDefinition.setDataType(AnnotationTypeEnum.STRING); StringAnnotationValue genderStringValue1 = new StringAnnotationValue(); genderStringValue1.setStringValue("M"); genderStringValue1.setSubjectAnnotation(subjectAnnotation1); subjectAnnotation1.setAnnotationValue(genderStringValue1); genderStringValue1.setAnnotationDefinition(annotationDefinition); StringAnnotationValue genderStringValue2 = new StringAnnotationValue(); genderStringValue2.setStringValue("M"); genderStringValue2.setSubjectAnnotation(subjectAnnotation2); subjectAnnotation2.setAnnotationValue(genderStringValue2); genderStringValue2.setAnnotationDefinition(annotationDefinition); StringAnnotationValue genderStringValue3 = new StringAnnotationValue(); genderStringValue3.setStringValue("F"); genderStringValue3.setSubjectAnnotation(subjectAnnotation3); subjectAnnotation3.setAnnotationValue(genderStringValue3); genderStringValue3.setAnnotationDefinition(annotationDefinition); annotationDefinition.getAnnotationValueCollection().add(genderStringValue1); annotationDefinition.getAnnotationValueCollection().add(genderStringValue2); annotationDefinition.getAnnotationValueCollection().add(genderStringValue3); // Next test is for numerics. AnnotationDefinition annotationDefinition2 = new AnnotationDefinition(); annotationDefinition2.setDataType(AnnotationTypeEnum.NUMERIC); NumericAnnotationValue numericValue1 = new NumericAnnotationValue(); numericValue1.setNumericValue(1.0); numericValue1.setSubjectAnnotation(subjectAnnotation4); subjectAnnotation4.setAnnotationValue(numericValue1); numericValue1.setAnnotationDefinition(annotationDefinition2); NumericAnnotationValue numericValue2 = new NumericAnnotationValue(); numericValue2.setNumericValue(1.0); numericValue2.setSubjectAnnotation(subjectAnnotation5); subjectAnnotation5.setAnnotationValue(numericValue2); numericValue2.setAnnotationDefinition(annotationDefinition2); NumericAnnotationValue numericValue3 = new NumericAnnotationValue(); numericValue3.setNumericValue(2.0); numericValue3.setSubjectAnnotation(subjectAnnotation6); subjectAnnotation6.setAnnotationValue(numericValue3); numericValue3.setAnnotationDefinition(annotationDefinition2); annotationDefinition2.getAnnotationValueCollection().add(numericValue1); annotationDefinition2.getAnnotationValueCollection().add(numericValue2); annotationDefinition2.getAnnotationValueCollection().add(numericValue3); dao.save(annotationDefinition); dao.save(annotationDefinition2); dao.save(study); // First test is 3 strings, M, M, and F, and we want just M / F to come out of it. List<String> values = dao.retrieveUniqueValuesForStudyAnnotation(study, annotationDefinition, EntityTypeEnum.SUBJECT, String.class); int numberM = 0; int numberF = 0; for(String value : values) { if (value.equals("M")) { numberM++; } if (value.equals("F")) { numberF++; } } assertEquals(1, numberM); assertEquals(1, numberF); assertEquals(2, values.size()); assertEquals(3, annotationDefinition.getAnnotationValueCollection().size()); // Next test is 3 numbers, 1.0, 1.0, and 2.0, and we want just 1.0 / 2.0 to come out of it. List<Double> numericValues = dao.retrieveUniqueValuesForStudyAnnotation(study, annotationDefinition2, EntityTypeEnum.SUBJECT, Double.class); int number1 = 0; int number2 = 0; for(Double value : numericValues) { if (value.equals(1.0)) { number1++; } if (value.equals(2.0)) { number2++; } } assertEquals(1, number1); assertEquals(1, number2); assertEquals(2, numericValues.size()); assertEquals(3, annotationDefinition2.getAnnotationValueCollection().size()); } @Test public void testRetrieveImagingDataSourceForStudy() { StudyConfiguration studyConfiguration = new StudyConfiguration(); Study study = studyConfiguration.getStudy(); ImageDataSourceConfiguration imageDataSource = new ImageDataSourceConfiguration(); studyConfiguration.getImageDataSources().add(imageDataSource); dao.save(studyConfiguration); assertEquals(imageDataSource, dao.retrieveImagingDataSourceForStudy(study)); } @Test public void testRetrieveNumberImage() { StudyHelper studyHelper = new StudyHelper(); dao.save(studyHelper.getPlatform()); Study study = studyHelper.populateAndRetrieveStudyWithSourceConfigurations(); dao.save(study.getStudyConfiguration()); dao.save(study); int numImageSeries = dao.retrieveNumberImages( study.getStudyConfiguration().getImageDataSources().get(0) .getImageSeriesAcquisitions().get(0).getSeriesCollection()); assertEquals(2, numImageSeries); } @Test public void testRetrievePlatformsForGenomicSource() { StudyHelper studyHelper = new StudyHelper(); dao.save(studyHelper.getPlatform()); Study study = studyHelper.populateAndRetrieveStudyWithSourceConfigurations(); dao.save(study.getStudyConfiguration()); dao.save(study); List<Platform> platforms = dao.retrievePlatformsForGenomicSource( study.getStudyConfiguration().getGenomicDataSources().get(0)); assertEquals(2, platforms.size()); } @Test public void testFindMatchingSegmentDatas() throws InvalidCriterionException { Platform platform = new Platform(); platform.setName("platform"); ReporterList reporterList = platform.addReporterList("name", ReporterTypeEnum.DNA_ANALYSIS_REPORTER); reporterList.setGenomeVersion("hg18"); dao.save(platform); StudyHelper studyHelper = new StudyHelper(); studyHelper.setArrayDataType(ArrayDataType.COPY_NUMBER); studyHelper.setPlatform(platform); Study study = studyHelper.populateAndRetrieveStudy().getStudy(); dao.save(study); // Segment Data type CopyNumberAlterationCriterion copyNumberCriterion = new CopyNumberAlterationCriterion(); copyNumberCriterion.setCopyNumberCriterionType(CopyNumberCriterionTypeEnum.SEGMENT_VALUE); copyNumberCriterion.setLowerLimit(.02f); copyNumberCriterion.setUpperLimit(50f); copyNumberCriterion.setGenomicIntervalType(GenomicIntervalTypeEnum.CHROMOSOME_COORDINATES); copyNumberCriterion.setChromosomeCoordinateHigh(1800000); copyNumberCriterion.setChromosomeCoordinateLow(20000); copyNumberCriterion.setChromosomeNumber("3"); List<SegmentData> segmentDatas = dao.findMatchingSegmentDatas(copyNumberCriterion, study, platform); assertEquals(1, segmentDatas.size()); copyNumberCriterion.setUpperLimit(.08f); segmentDatas = dao.findMatchingSegmentDatas(copyNumberCriterion, study, platform); assertEquals(1, segmentDatas.size()); copyNumberCriterion.setChromosomeCoordinateLow(0); copyNumberCriterion.setChromosomeCoordinateHigh(1000000); copyNumberCriterion.setUpperLimit(.12f); segmentDatas = dao.findMatchingSegmentDatas(copyNumberCriterion, study, platform); assertEquals(1, segmentDatas.size()); copyNumberCriterion.setGenomicIntervalType(GenomicIntervalTypeEnum.GENE_NAME); copyNumberCriterion.setGeneSymbol("GENE_3, GENE_4"); segmentDatas = dao.findMatchingSegmentDatas(copyNumberCriterion, study, platform); assertEquals(4, segmentDatas.size()); // Try matching the segment datas based on the location of the two previous ones. segmentDatas = dao.findMatchingSegmentDatasByLocation(segmentDatas, study, platform); assertEquals(4, segmentDatas.size()); copyNumberCriterion.setUpperLimit(.03f); copyNumberCriterion.setLowerLimit(null); segmentDatas = dao.findMatchingSegmentDatas(copyNumberCriterion, study, platform); assertEquals(3, segmentDatas.size()); copyNumberCriterion.setGeneSymbol(""); copyNumberCriterion.setLowerLimit(.04f); copyNumberCriterion.setUpperLimit(.02f); segmentDatas = dao.findMatchingSegmentDatas(copyNumberCriterion, study, platform); assertEquals(5, segmentDatas.size()); copyNumberCriterion.setUpperLimit(.15f); copyNumberCriterion.setLowerLimit(.01f); copyNumberCriterion.setChromosomeCoordinateLow(20000); copyNumberCriterion.setChromosomeCoordinateHigh(40000); copyNumberCriterion.setSegmentBoundaryType(SegmentBoundaryTypeEnum.ONE_OR_MORE); copyNumberCriterion.setGenomicIntervalType(GenomicIntervalTypeEnum.CHROMOSOME_COORDINATES); copyNumberCriterion.setChromosomeNumber("2"); segmentDatas = dao.findMatchingSegmentDatas(copyNumberCriterion, study, platform); assertEquals(1, segmentDatas.size()); // Calls type copyNumberCriterion = new CopyNumberAlterationCriterion(); copyNumberCriterion.setCopyNumberCriterionType(CopyNumberCriterionTypeEnum.CALLS_VALUE); copyNumberCriterion.setGenomicIntervalType(GenomicIntervalTypeEnum.CHROMOSOME_COORDINATES); copyNumberCriterion.setChromosomeNumber("4"); copyNumberCriterion.getCallsValues().add(0); segmentDatas = dao.findMatchingSegmentDatas(copyNumberCriterion, study, platform); assertEquals(0, segmentDatas.size()); copyNumberCriterion.getCallsValues().add(-1); segmentDatas = dao.findMatchingSegmentDatas(copyNumberCriterion, study, platform); assertEquals(1, segmentDatas.size()); } @Test public void testFindGenesByLocation() { Gene gene1 = createGene("gene1"); Gene gene2 = createGene("gene2"); Gene gene3 = createGene("gene3"); Gene gene4 = createGene("gene4"); Gene gene5 = createGene("gene5"); Gene gene6 = createGene("gene6"); Gene gene7 = createGene("gene7"); dao.save(gene1); dao.save(gene2); dao.save(gene3); dao.save(gene4); dao.save(gene5); dao.save(gene6); dao.save(gene7); GeneLocationConfiguration geneLocationConfiguration = new GeneLocationConfiguration(); geneLocationConfiguration.setGenomeBuildVersion(GenomeBuildVersionEnum.HG18); geneLocationConfiguration.getGeneLocations().add(createGeneChromosomalLocation("gene1", "1", 1, 3)); geneLocationConfiguration.getGeneLocations().add(createGeneChromosomalLocation("gene2", "1", 2, 6)); geneLocationConfiguration.getGeneLocations().add(createGeneChromosomalLocation("gene3", "1", 2, 11)); geneLocationConfiguration.getGeneLocations().add(createGeneChromosomalLocation("gene4", "1", 8, 9)); geneLocationConfiguration.getGeneLocations().add(createGeneChromosomalLocation("gene5", "1", 10, 12)); geneLocationConfiguration.getGeneLocations().add(createGeneChromosomalLocation("gene6", "1", 11, 15)); geneLocationConfiguration.getGeneLocations().add(createGeneChromosomalLocation("gene7", "2", 1, 15)); dao.save(geneLocationConfiguration); List<Gene> genes = dao.findGenesByLocation("1", 2, 10, GenomeBuildVersionEnum.HG18); assertEquals(5, genes.size()); genes = dao.findGenesByLocation("1", 6, 8, GenomeBuildVersionEnum.HG18); assertEquals(3, genes.size()); genes = dao.findGenesByLocation("2", 3, 4, GenomeBuildVersionEnum.HG18); assertEquals(1, genes.size()); genes = dao.findGenesByLocation("2", 3, 4, GenomeBuildVersionEnum.HG19); assertEquals(0, genes.size()); genes = dao.findGenesByLocation("3", 3, 4, GenomeBuildVersionEnum.HG18); assertEquals(0, genes.size()); } /** * */ private Gene createGene(String symbol) { Gene gene = new Gene(); gene.setSymbol(symbol); return gene; } /** * @return */ private GeneChromosomalLocation createGeneChromosomalLocation(String symbol, String chromosome, Integer start, Integer end) { GeneChromosomalLocation gcl = new GeneChromosomalLocation(); gcl.setGeneSymbol(symbol); ChromosomalLocation location = new ChromosomalLocation(); location.setChromosome(chromosome); location.setStartPosition(start); location.setEndPosition(end); gcl.setLocation(location); return gcl; } }
package org.robotninjas.barge.state; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.collect.Sets; import java.util.Collections; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Executor; import javax.annotation.Nonnull; import javax.annotation.concurrent.NotThreadSafe; import javax.inject.Inject; import org.jetlang.fibers.Fiber; import org.robotninjas.barge.RaftException; import org.robotninjas.barge.RaftExecutor; import org.robotninjas.barge.api.AppendEntries; import org.robotninjas.barge.api.AppendEntriesResponse; import org.robotninjas.barge.api.RequestVote; import org.robotninjas.barge.api.RequestVoteResponse; import org.robotninjas.barge.log.RaftLog; import org.slf4j.MDC; @NotThreadSafe class RaftStateContext implements Raft { private final StateFactory stateFactory; private final Executor executor; private final String name; private final Set<StateTransitionListener> listeners = Sets.newConcurrentHashSet(); private final Set<RaftProtocolListener> protocolListeners = Sets.newConcurrentHashSet(); private volatile StateType state; private volatile State delegate; private boolean stop; @Inject RaftStateContext(String name, StateFactory stateFactory, @RaftExecutor Fiber executor, Set<StateTransitionListener> listeners, Set<RaftProtocolListener> protocolListeners) { MDC.put("self", name); this.stateFactory = stateFactory; this.executor = executor; this.name = name; this.listeners.add(new LogListener()); this.listeners.addAll(listeners); this.protocolListeners.addAll(protocolListeners); } RaftStateContext(RaftLog log, StateFactory stateFactory, Fiber executor, Set<StateTransitionListener> listeners) { this(log.self().toString(), stateFactory, executor, listeners); } RaftStateContext(String name, StateFactory stateFactory, Fiber executor, Set<StateTransitionListener> listeners) { this(name, stateFactory, executor, listeners, Collections.<RaftProtocolListener>emptySet()); } @Override public CompletableFuture<StateType> init() { CompletableFuture<StateType> init = CompletableFuture.supplyAsync(() -> { setState(null, StateType.START); return StateType.START; }, executor); notifiesInit(); return init; } @Override @Nonnull public RequestVoteResponse requestVote(@Nonnull final RequestVote request) { checkNotNull(request); CompletableFuture<RequestVoteResponse> response = CompletableFuture.supplyAsync(() -> delegate.requestVote(RaftStateContext.this, request), executor ); try { return response.get(); } catch (Exception e) { throw new RuntimeException(e); } finally { notifyRequestVote(request); } } @Override @Nonnull public AppendEntriesResponse appendEntries(@Nonnull final AppendEntries request) { checkNotNull(request); CompletableFuture<AppendEntriesResponse> response = CompletableFuture.supplyAsync(() -> delegate.appendEntries(RaftStateContext.this, request), executor ); try { AppendEntriesResponse r = response.get(); return response.get(); } catch (Exception e) { throw new RuntimeException(e); } finally { notifyAppendEntries(request); } } @Override @Nonnull public CompletableFuture<Object> commitOperation(@Nonnull final byte[] op) throws RaftException { checkNotNull(op); CompletableFuture<Object> response = CompletableFuture.supplyAsync(() -> { try { return delegate.commitOperation(RaftStateContext.this, op); } catch (RaftException e) { throw new RuntimeException(); } }, executor); notifyCommit(op); return response; } public synchronized void setState(State oldState, @Nonnull StateType state) { if (this.delegate != oldState) { notifiesInvalidTransition(oldState); throw new IllegalStateException(); } if (stop) { state = StateType.STOPPED; notifiesStop(); } if (this.delegate != null) { this.delegate.destroy(this); } this.state = checkNotNull(state); delegate = stateFactory.makeState(state); MDC.put("state", this.state.toString()); notifiesChangeState(oldState); delegate.init(this); } @Override public void addTransitionListener(@Nonnull StateTransitionListener transitionListener) { listeners.add(transitionListener); } @Override public void addRaftProtocolListener(@Nonnull RaftProtocolListener protocolListener) { protocolListeners.add(protocolListener); } @Override @Nonnull public StateType type() { return state; } public synchronized void stop() { stop = true; if (this.delegate != null) { this.delegate.doStop(this); } } @Override public String toString() { return name; } private void notifiesStop() { for (StateTransitionListener listener : listeners) { listener.stop(this); } } private void notifiesInvalidTransition(State oldState) { for (StateTransitionListener listener : listeners) { listener.invalidTransition(this, state, (oldState == null) ? null : oldState.type()); } } private void notifiesChangeState(State oldState) { for (StateTransitionListener listener : listeners) { listener.changeState(this, (oldState == null) ? null : oldState.type(), state); } } private void notifiesInit() { for (RaftProtocolListener protocolListener : protocolListeners) { protocolListener.init(this); } } private void notifyAppendEntries(AppendEntries request) { for (RaftProtocolListener protocolListener : protocolListeners) { protocolListener.appendEntries(this, request); } } private void notifyRequestVote(RequestVote vote) { for (RaftProtocolListener protocolListener : protocolListeners) { protocolListener.requestVote(this, vote); } } private void notifyCommit(byte[] bytes) { for (RaftProtocolListener protocolListener : protocolListeners) { protocolListener.commit(this, bytes); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.giraffa; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.SortedSet; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.giraffa.hbase.BlockManagementAgent; import org.apache.giraffa.hbase.NamespaceProcessor; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.hdfs.server.namenode.HLMAdapter; import org.apache.hadoop.util.Daemon; /** * Giraffa LeaseManager maintains leases for open files belonging to this * namespace partition. * * Implemented as HDFS.LeaseManager, which is accessed through HLMAdapter. */ public class LeaseManager { static final Log LOG = LogFactory.getLog(LeaseManager.class.getName()); /** * The map is needed in unit tests with MiniCluster. * When multiple RegionServers run in the same JVM they should have * different instances of LeaseManager. */ private static final ConcurrentMap<String, LeaseManager> leaseManagerMap = new ConcurrentHashMap<String, LeaseManager>(); /** * Lease manager is a shared state between {@link NamespaceProcessor} and * {@link BlockManagementAgent}. * Any of them can instantiate LeaseManager if it has not been created yet. * Once created its reference is stored in a shared environment. */ public synchronized static LeaseManager originateSharedLeaseManager( String key) { LeaseManager leaseManager = leaseManagerMap.get(key); if(leaseManager != null) { LOG.info("LeaseManager already exists in shared state for " + key); return leaseManager; } leaseManager = new LeaseManager(); LOG.info("Creating new LeaseManager for " + key); LeaseManager prevLeaseManager = leaseManagerMap.putIfAbsent(key, leaseManager); if(prevLeaseManager != null) { leaseManager = prevLeaseManager; } return leaseManager; } private HLMAdapter hlmAdapter; private Daemon monitor; private volatile boolean shouldRunMonitor = false; private volatile long hardLimit = HdfsConstants.LEASE_HARDLIMIT_PERIOD; public LeaseManager() { this.hlmAdapter = new HLMAdapter(); } public synchronized boolean removeLease(FileLease lease) { return hlmAdapter.removeLease(lease); } public synchronized FileLease addLease(FileLease lease) { return hlmAdapter.addLease(lease); } public synchronized void setHardLimit(long newHardLimit) { this.hardLimit = newHardLimit; } public boolean isLeaseSoftLimitExpired(String holder) { return hlmAdapter.isLeaseSoftLimitExpired(holder); } public synchronized Collection<FileLease> renewLease(String clientName) { hlmAdapter.renewLease(clientName); return hlmAdapter.getLeases(clientName); } public Collection<FileLease> getLeases(String holder) { return hlmAdapter.getLeases(holder); } public synchronized Daemon initializeMonitor(NamespaceProcessor namesystem) { if(monitor != null) return monitor; monitor = new Daemon(new LeaseMonitor(namesystem)); return monitor; } public synchronized void triggerLeaseRecovery() { monitor.interrupt(); } public void startMonitor() { assert monitor != null : "LeaseMonitor was not constructed."; if(shouldRunMonitor) return; shouldRunMonitor = true; monitor.start(); } public void stopMonitor() { if (monitor != null) { shouldRunMonitor = false; try { monitor.interrupt(); monitor.join(3000); } catch (InterruptedException ie) { LOG.warn("Encountered exception ", ie); } monitor = null; } } class LeaseMonitor implements Runnable { private final NamespaceProcessor namesystem; private final String name = getClass().getSimpleName(); public LeaseMonitor(NamespaceProcessor namesystem) { this.namesystem = namesystem; } /** Check leases periodically. */ @Override public void run() { while(shouldRunMonitor) { if(namesystem.isRunning()) checkLeases(); try { Thread.sleep(HdfsServerConstants.NAMENODE_LEASE_RECHECK_INTERVAL); } catch(InterruptedException ie) { if (LOG.isDebugEnabled()) { LOG.debug(name + " is interrupted.", ie); } } } LOG.warn("Exiting LeaseMonitor."); } private void checkLeases() { LOG.debug("Checking leases."); SortedSet<FileLease> sortedLeases = hlmAdapter.getSortedLeases(); if(sortedLeases.size() == 0) return; final FileLease oldest = sortedLeases.first(); if (!oldest.expiredHardLimit(hardLimit)) { return; } LOG.info(oldest + " has expired hard limit."); final List<String> removing = new ArrayList<String>(); // need to create a copy of the oldest lease paths, because // internalReleaseLease() removes paths corresponding to empty files, // i.e. it needs to modify the collection being iterated over // causing ConcurrentModificationException Collection<FileLease> leases = hlmAdapter.getLeases(oldest.getHolder()); if(leases == null) return; for(FileLease lease : leases) { String p = lease.getPath(); try { boolean completed = namesystem.internalReleaseLease(oldest, p); if (LOG.isDebugEnabled()) { if (completed) { LOG.debug("Lease recovery for " + p + " is complete." + " File closed."); } else { LOG.debug("Started block recovery " + p + " lease " + oldest); } } if(!completed) return; // If a lease recovery happened, we need to sync later. } catch (IOException e) { LOG.error("Cannot release the path " + p + " in the lease " + oldest, e); removing.add(p); } } for(String p : removing) { hlmAdapter.removeLease(oldest, p); } } } }
/* * Copyright Terracotta, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terracotta.dynamic_config.system_tests.activated; import com.terracotta.connection.api.TerracottaConnectionService; import org.junit.Test; import org.terracotta.dynamic_config.api.model.Cluster; import org.terracotta.dynamic_config.api.model.Node; import org.terracotta.dynamic_config.api.model.UID; import org.terracotta.dynamic_config.entity.topology.client.DynamicTopologyEntity; import org.terracotta.dynamic_config.entity.topology.client.DynamicTopologyEntityFactory; import org.terracotta.dynamic_config.test_support.ClusterDefinition; import org.terracotta.dynamic_config.test_support.DynamicConfigIT; import org.terracotta.dynamic_config.test_support.InlineServers; import java.net.InetSocketAddress; import java.time.Duration; import java.util.Collections; import java.util.concurrent.CountDownLatch; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.Matchers.both; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.terracotta.angela.client.support.hamcrest.AngelaMatchers.containsOutput; import static org.terracotta.angela.client.support.hamcrest.AngelaMatchers.successful; /** * @author Mathieu Carbou */ @ClusterDefinition(nodesPerStripe = 2, autoActivate = true) public class DetachCommand1x2IT extends DynamicConfigIT { public DetachCommand1x2IT() { super(Duration.ofSeconds(180)); } @Test public void test_force_detach_active_node() throws Exception { final int activeId = findActive(1).getAsInt(); final int passiveId = findPassives(1)[0]; assertThat(configTool("detach", "-f", "-d", "localhost:" + getNodePort(1, passiveId), "-s", "localhost:" + getNodePort(1, activeId)), is(successful())); // failover - existing passive becomes active waitForActive(1, passiveId); withTopologyService(1, passiveId, topologyService -> assertTrue(topologyService.isActivated())); waitUntil(() -> angela.tsa().getStopped().size(), is(1)); assertTopologyChanged(passiveId); } @Test public void test_force_detach_passive_from_activated_cluster() throws Exception { final int activeId = findActive(1).getAsInt(); final int passiveId = findPassives(1)[0]; assertThat(configTool("detach", "-f", "-d", "localhost:" + getNodePort(1, activeId), "-s", "localhost:" + getNodePort(1, passiveId)), is(successful())); waitUntil(() -> angela.tsa().getStopped().size(), is(1)); assertTopologyChanged(activeId); } @Test public void test_topology_entity_callback_onNodeRemoval() throws Exception { final int activeId = findActive(1).getAsInt(); final int passiveId = findPassives(1)[0]; try (DynamicTopologyEntity dynamicTopologyEntity = DynamicTopologyEntityFactory.fetch( new TerracottaConnectionService(), Collections.singletonList(InetSocketAddress.createUnresolved("localhost", getNodePort(1, activeId))), "dynamic-config-topology-entity", getConnectionTimeout(), new DynamicTopologyEntity.Settings().setRequestTimeout(getConnectionTimeout()), null)) { CountDownLatch called = new CountDownLatch(1); dynamicTopologyEntity.setListener(new DynamicTopologyEntity.Listener() { @Override public void onNodeRemoval(Cluster cluster, UID stripeUID, Node removedNode) { called.countDown(); } }); assertThat(configTool("detach", "-f", "-d", "localhost:" + getNodePort(1, activeId), "-s", "localhost:" + getNodePort(1, passiveId)), is(successful())); called.await(); } } @Test public void test_detach_passive_from_activated_cluster_requiring_restart() throws Exception { final int activeId = findActive(1).getAsInt(); final int passiveId = findPassives(1)[0]; // do a change requiring a restart on the remaining nodes assertThat( configTool("set", "-s", "localhost:" + getNodePort(1, activeId), "-c", "stripe.1.node." + activeId + ".tc-properties.foo=bar"), containsOutput("Restart required for nodes:")); stopNode(1, passiveId); // try to detach the passive node assertThat( configTool("detach", "-d", "localhost:" + getNodePort(1, activeId), "-s", "localhost:" + getNodePort(1, passiveId)), both(not(successful())).and(containsOutput("Impossible to do any topology change"))); // try forcing the detach assertThat(configTool("detach", "-f", "-d", "localhost:" + getNodePort(1, activeId), "-s", "localhost:" + getNodePort(1, passiveId)), is(successful())); assertTopologyChanged(activeId); } @Test public void test_detach_passive_requiring_restart_from_activated_cluster() throws Exception { final int activeId = findActive(1).getAsInt(); final int passiveId = findPassives(1)[0]; // do a change requiring a restart on the remaining nodes assertThat( configTool("set", "-s", "localhost:" + getNodePort(1, passiveId), "-c", "stripe.1.node." + passiveId + ".tc-properties.foo=bar"), containsOutput("Restart required for nodes:")); stopNode(1, passiveId); // try to detach the passive node assertThat( configTool("detach", "-d", "localhost:" + getNodePort(1, activeId), "-s", "localhost:" + getNodePort(1, passiveId)), is(successful())); assertTopologyChanged(activeId); } @Test public void test_detach_online_node_in_availability_mode() throws Exception { final int activeId = findActive(1).getAsInt(); final int passiveId = findPassives(1)[0]; // detaching an online node needs to be forced assertThat( configTool("detach", "-d", "localhost:" + getNodePort(1, activeId), "-s", "localhost:" + getNodePort(1, passiveId)), containsOutput("Nodes must be safely shutdown first. Please refer to the Troubleshooting Guide for more help.")); assertThat(configTool("detach", "-d", "localhost:" + getNodePort(1, activeId), "-s", "localhost:" + getNodePort(1, passiveId), "-f"), is(successful())); waitUntil(() -> angela.tsa().getStopped().size(), is(1)); assertTopologyChanged(activeId); } @Test public void detachNodeFailInActiveAtPrepare() throws Exception { final int activeId = findActive(1).getAsInt(); final int passiveId = findPassives(1)[0]; String propertySettingString = "stripe.1.node." + activeId + ".tc-properties.detachStatus=prepareDeletion-failure"; //create prepare failure on active assertThat(configTool("set", "-s", "localhost:" + getNodePort(1, 1), "-c", propertySettingString), is(successful())); // detach failure (forcing detach otherwise we have to restart cluster) assertThat( configTool("detach", "-f", "-d", "localhost:" + getNodePort(1, activeId), "-s", "localhost:" + getNodePort(1, passiveId)), containsOutput("Two-Phase commit failed")); waitUntil(() -> angela.tsa().getStopped().size(), is(1)); // Nomad rollback happened // we end up with a cluster of 2 nodes with 1 of them removed withTopologyService(1, activeId, topologyService -> assertTrue(topologyService.isActivated())); assertThat(getUpcomingCluster("localhost", getNodePort(1, activeId)).getNodeCount(), is(equalTo(2))); assertThat(getRuntimeCluster("localhost", getNodePort(1, activeId)).getNodeCount(), is(equalTo(2))); } @Test @InlineServers(false) public void testFailoverDuringNomadCommitForPassiveRemoval() throws Exception { final int activeId = findActive(1).getAsInt(); final int passiveId = findPassives(1)[0]; String propertySettingString = "stripe.1.node." + activeId + ".tc-properties.failoverDeletion=killDeletion-commit"; //setup for failover in commit phase on active assertThat(configTool("set", "-s", "localhost:" + getNodePort(1, 1), "-c", propertySettingString), is(successful())); // Stripe is lost no active assertThat( configTool("-er", "40s", "detach", "-f", "-d", "localhost:" + getNodePort(1, activeId), "-s", "localhost:" + getNodePort(1, passiveId)), containsOutput("Commit failed for node localhost:" + getNodePort(1, activeId) + ". Reason: java.util.concurrent.TimeoutException")); waitUntil(() -> angela.tsa().getStopped().size(), is(2)); startNode(1, activeId, "-r", getNode(1, activeId).getConfigRepo()); waitForActive(1, activeId); // Active has prepare changes for node removal withTopologyService(1, activeId, topologyService -> assertTrue(topologyService.isActivated())); withTopologyService(1, activeId, topologyService -> assertTrue(topologyService.hasIncompleteChange())); assertThat(getUpcomingCluster(1, activeId).getNodeCount(), is(equalTo(2))); assertThat(getRuntimeCluster(1, activeId).getNodeCount(), is(equalTo(2))); } @Test public void test_detach_passive_prepare_fail_at_active() throws Exception { final int activeId = findActive(1).getAsInt(); final int passiveId = findPassives(1)[0]; //create prepare failure on active String propertySettingString = "stripe.1.node." + activeId + ".tc-properties.detachStatus=prepareDeletion-failure"; assertThat(configTool("set", "-s", "localhost:" + getNodePort(1, 1), "-c", propertySettingString), is(successful())); // detach failure (forcing detach otherwise we have to restart cluster) assertThat( configTool("detach", "-f", "-d", "localhost:" + getNodePort(1, activeId), "-s", "localhost:" + getNodePort(1, passiveId)), containsOutput("Two-Phase commit failed")); withTopologyService(1, activeId, topologyService -> assertTrue(topologyService.isActivated())); withTopologyService(1, activeId, topologyService -> assertFalse(topologyService.hasIncompleteChange())); assertThat(getUpcomingCluster("localhost", getNodePort(1, activeId)).getNodeCount(), is(equalTo(2))); assertThat(getRuntimeCluster("localhost", getNodePort(1, activeId)).getNodeCount(), is(equalTo(2))); } @Test @InlineServers(false) public void test_detach_passive_commit_fail_at_active() throws Exception { final int activeId = findActive(1).getAsInt(); final int passiveId = findPassives(1)[0]; //create failover while committing String propertySettingString = "stripe.1.node." + activeId + ".tc-properties.failoverDeletion=killDeletion-commit"; assertThat(configTool("set", "-s", "localhost:" + getNodePort(1, 1), "-c", propertySettingString), is(successful())); //Both active and passive are down. assertThat( configTool("-er", "40s", "detach", "-f", "-d", "localhost:" + getNodePort(1, activeId), "-s", "localhost:" + getNodePort(1, passiveId)), containsOutput("Two-Phase commit failed")); waitUntil(() -> angela.tsa().getStopped().size(), is(2)); startNode(1, activeId, "-r", getNode(1, activeId).getConfigRepo()); waitForActive(1, activeId); withTopologyService(1, activeId, topologyService -> assertTrue(topologyService.isActivated())); withTopologyService(1, activeId, topologyService -> assertTrue(topologyService.hasIncompleteChange())); assertThat(getUpcomingCluster("localhost", getNodePort(1, activeId)).getNodeCount(), is(equalTo(2))); assertThat(getRuntimeCluster("localhost", getNodePort(1, activeId)).getNodeCount(), is(equalTo(2))); } private void assertTopologyChanged(int nodeId) throws Exception { withTopologyService(1, nodeId, topologyService -> assertTrue(topologyService.isActivated())); assertThat(getUpcomingCluster("localhost", getNodePort(1, nodeId)).getNodeCount(), is(equalTo(1))); assertThat(getRuntimeCluster("localhost", getNodePort(1, nodeId)).getNodeCount(), is(equalTo(1))); assertThat(getRuntimeCluster("localhost", getNodePort(1, nodeId)).getSingleNode().get().getPort().orDefault(), is(equalTo(getNodePort(1, nodeId)))); } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.compute.v2017_12_01; import java.util.Collection; import com.fasterxml.jackson.annotation.JsonCreator; import com.microsoft.rest.ExpandableStringEnum; /** * Defines values for VirtualMachineSizeTypes. */ public final class VirtualMachineSizeTypes extends ExpandableStringEnum<VirtualMachineSizeTypes> { /** Static value Basic_A0 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes BASIC_A0 = fromString("Basic_A0"); /** Static value Basic_A1 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes BASIC_A1 = fromString("Basic_A1"); /** Static value Basic_A2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes BASIC_A2 = fromString("Basic_A2"); /** Static value Basic_A3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes BASIC_A3 = fromString("Basic_A3"); /** Static value Basic_A4 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes BASIC_A4 = fromString("Basic_A4"); /** Static value Standard_A0 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A0 = fromString("Standard_A0"); /** Static value Standard_A1 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A1 = fromString("Standard_A1"); /** Static value Standard_A2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A2 = fromString("Standard_A2"); /** Static value Standard_A3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A3 = fromString("Standard_A3"); /** Static value Standard_A4 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A4 = fromString("Standard_A4"); /** Static value Standard_A5 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A5 = fromString("Standard_A5"); /** Static value Standard_A6 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A6 = fromString("Standard_A6"); /** Static value Standard_A7 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A7 = fromString("Standard_A7"); /** Static value Standard_A8 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A8 = fromString("Standard_A8"); /** Static value Standard_A9 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A9 = fromString("Standard_A9"); /** Static value Standard_A10 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A10 = fromString("Standard_A10"); /** Static value Standard_A11 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A11 = fromString("Standard_A11"); /** Static value Standard_A1_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A1_V2 = fromString("Standard_A1_v2"); /** Static value Standard_A2_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A2_V2 = fromString("Standard_A2_v2"); /** Static value Standard_A4_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A4_V2 = fromString("Standard_A4_v2"); /** Static value Standard_A8_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A8_V2 = fromString("Standard_A8_v2"); /** Static value Standard_A2m_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A2M_V2 = fromString("Standard_A2m_v2"); /** Static value Standard_A4m_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A4M_V2 = fromString("Standard_A4m_v2"); /** Static value Standard_A8m_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A8M_V2 = fromString("Standard_A8m_v2"); /** Static value Standard_B1s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_B1S = fromString("Standard_B1s"); /** Static value Standard_B1ms for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_B1MS = fromString("Standard_B1ms"); /** Static value Standard_B2s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_B2S = fromString("Standard_B2s"); /** Static value Standard_B2ms for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_B2MS = fromString("Standard_B2ms"); /** Static value Standard_B4ms for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_B4MS = fromString("Standard_B4ms"); /** Static value Standard_B8ms for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_B8MS = fromString("Standard_B8ms"); /** Static value Standard_D1 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D1 = fromString("Standard_D1"); /** Static value Standard_D2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D2 = fromString("Standard_D2"); /** Static value Standard_D3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D3 = fromString("Standard_D3"); /** Static value Standard_D4 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D4 = fromString("Standard_D4"); /** Static value Standard_D11 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D11 = fromString("Standard_D11"); /** Static value Standard_D12 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D12 = fromString("Standard_D12"); /** Static value Standard_D13 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D13 = fromString("Standard_D13"); /** Static value Standard_D14 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D14 = fromString("Standard_D14"); /** Static value Standard_D1_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D1_V2 = fromString("Standard_D1_v2"); /** Static value Standard_D2_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D2_V2 = fromString("Standard_D2_v2"); /** Static value Standard_D3_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D3_V2 = fromString("Standard_D3_v2"); /** Static value Standard_D4_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D4_V2 = fromString("Standard_D4_v2"); /** Static value Standard_D5_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D5_V2 = fromString("Standard_D5_v2"); /** Static value Standard_D2_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D2_V3 = fromString("Standard_D2_v3"); /** Static value Standard_D4_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D4_V3 = fromString("Standard_D4_v3"); /** Static value Standard_D8_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D8_V3 = fromString("Standard_D8_v3"); /** Static value Standard_D16_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D16_V3 = fromString("Standard_D16_v3"); /** Static value Standard_D32_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D32_V3 = fromString("Standard_D32_v3"); /** Static value Standard_D64_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D64_V3 = fromString("Standard_D64_v3"); /** Static value Standard_D2s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D2S_V3 = fromString("Standard_D2s_v3"); /** Static value Standard_D4s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D4S_V3 = fromString("Standard_D4s_v3"); /** Static value Standard_D8s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D8S_V3 = fromString("Standard_D8s_v3"); /** Static value Standard_D16s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D16S_V3 = fromString("Standard_D16s_v3"); /** Static value Standard_D32s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D32S_V3 = fromString("Standard_D32s_v3"); /** Static value Standard_D64s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D64S_V3 = fromString("Standard_D64s_v3"); /** Static value Standard_D11_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D11_V2 = fromString("Standard_D11_v2"); /** Static value Standard_D12_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D12_V2 = fromString("Standard_D12_v2"); /** Static value Standard_D13_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D13_V2 = fromString("Standard_D13_v2"); /** Static value Standard_D14_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D14_V2 = fromString("Standard_D14_v2"); /** Static value Standard_D15_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D15_V2 = fromString("Standard_D15_v2"); /** Static value Standard_DS1 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS1 = fromString("Standard_DS1"); /** Static value Standard_DS2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS2 = fromString("Standard_DS2"); /** Static value Standard_DS3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS3 = fromString("Standard_DS3"); /** Static value Standard_DS4 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS4 = fromString("Standard_DS4"); /** Static value Standard_DS11 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS11 = fromString("Standard_DS11"); /** Static value Standard_DS12 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS12 = fromString("Standard_DS12"); /** Static value Standard_DS13 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS13 = fromString("Standard_DS13"); /** Static value Standard_DS14 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS14 = fromString("Standard_DS14"); /** Static value Standard_DS1_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS1_V2 = fromString("Standard_DS1_v2"); /** Static value Standard_DS2_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS2_V2 = fromString("Standard_DS2_v2"); /** Static value Standard_DS3_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS3_V2 = fromString("Standard_DS3_v2"); /** Static value Standard_DS4_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS4_V2 = fromString("Standard_DS4_v2"); /** Static value Standard_DS5_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS5_V2 = fromString("Standard_DS5_v2"); /** Static value Standard_DS11_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS11_V2 = fromString("Standard_DS11_v2"); /** Static value Standard_DS12_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS12_V2 = fromString("Standard_DS12_v2"); /** Static value Standard_DS13_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS13_V2 = fromString("Standard_DS13_v2"); /** Static value Standard_DS14_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS14_V2 = fromString("Standard_DS14_v2"); /** Static value Standard_DS15_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS15_V2 = fromString("Standard_DS15_v2"); /** Static value Standard_DS13-4_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS13_4_V2 = fromString("Standard_DS13-4_v2"); /** Static value Standard_DS13-2_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS13_2_V2 = fromString("Standard_DS13-2_v2"); /** Static value Standard_DS14-8_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS14_8_V2 = fromString("Standard_DS14-8_v2"); /** Static value Standard_DS14-4_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS14_4_V2 = fromString("Standard_DS14-4_v2"); /** Static value Standard_E2_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E2_V3 = fromString("Standard_E2_v3"); /** Static value Standard_E4_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E4_V3 = fromString("Standard_E4_v3"); /** Static value Standard_E8_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E8_V3 = fromString("Standard_E8_v3"); /** Static value Standard_E16_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E16_V3 = fromString("Standard_E16_v3"); /** Static value Standard_E32_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E32_V3 = fromString("Standard_E32_v3"); /** Static value Standard_E64_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E64_V3 = fromString("Standard_E64_v3"); /** Static value Standard_E2s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E2S_V3 = fromString("Standard_E2s_v3"); /** Static value Standard_E4s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E4S_V3 = fromString("Standard_E4s_v3"); /** Static value Standard_E8s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E8S_V3 = fromString("Standard_E8s_v3"); /** Static value Standard_E16s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E16S_V3 = fromString("Standard_E16s_v3"); /** Static value Standard_E32s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E32S_V3 = fromString("Standard_E32s_v3"); /** Static value Standard_E64s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E64S_V3 = fromString("Standard_E64s_v3"); /** Static value Standard_E32-16_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E32_16_V3 = fromString("Standard_E32-16_v3"); /** Static value Standard_E32-8s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E32_8S_V3 = fromString("Standard_E32-8s_v3"); /** Static value Standard_E64-32s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E64_32S_V3 = fromString("Standard_E64-32s_v3"); /** Static value Standard_E64-16s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E64_16S_V3 = fromString("Standard_E64-16s_v3"); /** Static value Standard_F1 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F1 = fromString("Standard_F1"); /** Static value Standard_F2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F2 = fromString("Standard_F2"); /** Static value Standard_F4 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F4 = fromString("Standard_F4"); /** Static value Standard_F8 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F8 = fromString("Standard_F8"); /** Static value Standard_F16 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F16 = fromString("Standard_F16"); /** Static value Standard_F1s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F1S = fromString("Standard_F1s"); /** Static value Standard_F2s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F2S = fromString("Standard_F2s"); /** Static value Standard_F4s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F4S = fromString("Standard_F4s"); /** Static value Standard_F8s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F8S = fromString("Standard_F8s"); /** Static value Standard_F16s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F16S = fromString("Standard_F16s"); /** Static value Standard_F2s_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F2S_V2 = fromString("Standard_F2s_v2"); /** Static value Standard_F4s_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F4S_V2 = fromString("Standard_F4s_v2"); /** Static value Standard_F8s_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F8S_V2 = fromString("Standard_F8s_v2"); /** Static value Standard_F16s_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F16S_V2 = fromString("Standard_F16s_v2"); /** Static value Standard_F32s_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F32S_V2 = fromString("Standard_F32s_v2"); /** Static value Standard_F64s_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F64S_V2 = fromString("Standard_F64s_v2"); /** Static value Standard_F72s_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F72S_V2 = fromString("Standard_F72s_v2"); /** Static value Standard_G1 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_G1 = fromString("Standard_G1"); /** Static value Standard_G2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_G2 = fromString("Standard_G2"); /** Static value Standard_G3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_G3 = fromString("Standard_G3"); /** Static value Standard_G4 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_G4 = fromString("Standard_G4"); /** Static value Standard_G5 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_G5 = fromString("Standard_G5"); /** Static value Standard_GS1 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_GS1 = fromString("Standard_GS1"); /** Static value Standard_GS2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_GS2 = fromString("Standard_GS2"); /** Static value Standard_GS3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_GS3 = fromString("Standard_GS3"); /** Static value Standard_GS4 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_GS4 = fromString("Standard_GS4"); /** Static value Standard_GS5 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_GS5 = fromString("Standard_GS5"); /** Static value Standard_GS4-8 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_GS4_8 = fromString("Standard_GS4-8"); /** Static value Standard_GS4-4 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_GS4_4 = fromString("Standard_GS4-4"); /** Static value Standard_GS5-16 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_GS5_16 = fromString("Standard_GS5-16"); /** Static value Standard_GS5-8 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_GS5_8 = fromString("Standard_GS5-8"); /** Static value Standard_H8 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_H8 = fromString("Standard_H8"); /** Static value Standard_H16 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_H16 = fromString("Standard_H16"); /** Static value Standard_H8m for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_H8M = fromString("Standard_H8m"); /** Static value Standard_H16m for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_H16M = fromString("Standard_H16m"); /** Static value Standard_H16r for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_H16R = fromString("Standard_H16r"); /** Static value Standard_H16mr for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_H16MR = fromString("Standard_H16mr"); /** Static value Standard_L4s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_L4S = fromString("Standard_L4s"); /** Static value Standard_L8s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_L8S = fromString("Standard_L8s"); /** Static value Standard_L16s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_L16S = fromString("Standard_L16s"); /** Static value Standard_L32s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_L32S = fromString("Standard_L32s"); /** Static value Standard_M64s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_M64S = fromString("Standard_M64s"); /** Static value Standard_M64ms for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_M64MS = fromString("Standard_M64ms"); /** Static value Standard_M128s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_M128S = fromString("Standard_M128s"); /** Static value Standard_M128ms for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_M128MS = fromString("Standard_M128ms"); /** Static value Standard_M64-32ms for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_M64_32MS = fromString("Standard_M64-32ms"); /** Static value Standard_M64-16ms for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_M64_16MS = fromString("Standard_M64-16ms"); /** Static value Standard_M128-64ms for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_M128_64MS = fromString("Standard_M128-64ms"); /** Static value Standard_M128-32ms for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_M128_32MS = fromString("Standard_M128-32ms"); /** Static value Standard_NC6 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NC6 = fromString("Standard_NC6"); /** Static value Standard_NC12 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NC12 = fromString("Standard_NC12"); /** Static value Standard_NC24 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NC24 = fromString("Standard_NC24"); /** Static value Standard_NC24r for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NC24R = fromString("Standard_NC24r"); /** Static value Standard_NC6s_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NC6S_V2 = fromString("Standard_NC6s_v2"); /** Static value Standard_NC12s_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NC12S_V2 = fromString("Standard_NC12s_v2"); /** Static value Standard_NC24s_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NC24S_V2 = fromString("Standard_NC24s_v2"); /** Static value Standard_NC24rs_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NC24RS_V2 = fromString("Standard_NC24rs_v2"); /** Static value Standard_NC6s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NC6S_V3 = fromString("Standard_NC6s_v3"); /** Static value Standard_NC12s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NC12S_V3 = fromString("Standard_NC12s_v3"); /** Static value Standard_NC24s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NC24S_V3 = fromString("Standard_NC24s_v3"); /** Static value Standard_NC24rs_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NC24RS_V3 = fromString("Standard_NC24rs_v3"); /** Static value Standard_ND6s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_ND6S = fromString("Standard_ND6s"); /** Static value Standard_ND12s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_ND12S = fromString("Standard_ND12s"); /** Static value Standard_ND24s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_ND24S = fromString("Standard_ND24s"); /** Static value Standard_ND24rs for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_ND24RS = fromString("Standard_ND24rs"); /** Static value Standard_NV6 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NV6 = fromString("Standard_NV6"); /** Static value Standard_NV12 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NV12 = fromString("Standard_NV12"); /** Static value Standard_NV24 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NV24 = fromString("Standard_NV24"); /** * Creates or finds a VirtualMachineSizeTypes from its string representation. * @param name a name to look for * @return the corresponding VirtualMachineSizeTypes */ @JsonCreator public static VirtualMachineSizeTypes fromString(String name) { return fromString(name, VirtualMachineSizeTypes.class); } /** * @return known VirtualMachineSizeTypes values */ public static Collection<VirtualMachineSizeTypes> values() { return values(VirtualMachineSizeTypes.class); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.atlas.data; import java.io.File ; import java.io.FileNotFoundException ; import java.util.ArrayList ; import java.util.Comparator ; import java.util.Iterator ; import java.util.List ; import java.util.NoSuchElementException ; import org.apache.jena.atlas.AtlasException ; import org.apache.jena.atlas.iterator.Iter ; import org.apache.jena.atlas.lib.Closeable ; import org.apache.jena.atlas.lib.FileOps ; /** * This class is like {@link DistinctDataBag} except that you are informed if the item you just * added was known to be distinct. This will normally only work until the first spill. After that, * the system may not be able to tell for sure, and will thus return false. When you are finished * adding items, you may call {@link #netIterator()} to get any distinct items that are in the * spill files but were not indicated as distinct previously. */ public class DistinctDataNet<E> extends DistinctDataBag<E> { protected File firstSpillFile; public DistinctDataNet(ThresholdPolicy<E> policy, SerializationFactory<E> serializerFactory, Comparator<E> comparator) { super(policy, serializerFactory, comparator) ; } /** * @return true if the item added is known to be distinct. */ public boolean netAdd(E item) { long s = size ; super.add(item) ; return !spilled && size > s ; } @Override protected void registerSpillFile(File spillFile) { // If this is the first time spilling, then keep this spill file separate if (!spilled) { firstSpillFile = spillFile; } else { super.registerSpillFile(spillFile); } } @Override protected void deleteSpillFiles() { super.deleteSpillFiles(); if (null != firstSpillFile) { FileOps.delete(firstSpillFile, false); firstSpillFile = null; } } // Used by the .iterator() method @Override protected List<File> getSpillFiles() { List<File> toReturn = new ArrayList<>(super.getSpillFiles()); if (null != firstSpillFile) { toReturn.add(firstSpillFile); } return toReturn; } // TODO: Will be used by the .netIterator() method protected List<File> getNetSpillFiles() { return super.getSpillFiles(); } /** * Returns an iterator to all additional items that are distinct but were * not reported to be so at the time {@link #netAdd(Object)} was invoked. * <p/> * If you do not exhaust the iterator, you should call {@link org.apache.jena.atlas.iterator.Iter#close(Iterator)} * to be sure any open file handles are closed. */ public Iterator<E> netIterator() { // If we havn't spilled, then we have already indicated all distinct values via .netAdd() if (!spilled) { return Iter.nullIterator(); } Iterator<E> blacklist; try { blacklist = getInputIterator(firstSpillFile); } catch ( FileNotFoundException e ) { throw new AtlasException("Cannot find the first spill file", e); } // TODO: Improve performance by making the superclass .iterator() use getNetSpillFiles() // instead of getSpillFiles() so it doesn't contain the contents of the first file Iterator<E> rest = super.iterator(); SortedDiffIterator<E> sdi = SortedDiffIterator.create(rest, blacklist, comparator); registerCloseableIterator(sdi); return sdi; } /** * Produces the set difference of two sorted set sequences. */ protected static class SortedDiffIterator<T> implements Iterator<T>, Closeable { private final Iterator<T> grayList; private final Iterator<T> blackList; private final Comparator<? super T> comp; private boolean finished = false; private boolean blackSlotFull = false; private T white; private T black; /** * Produces the set difference of two sorted set sequences using the natural ordering of the items * (null items will always be considered less than any other items). * * @param first An Iterator&lt;T&gt; whose elements that are not also in second will be returned. * @param second An Iterator&lt;T&gt; whose elements that also occur in the first sequence will cause those elements to be removed from the returned sequence. */ public static <S extends Comparable<? super S>> SortedDiffIterator<S> create(Iterator<S> first, Iterator<S> second) { return create(first, second, new Comparator<S>() { @Override public int compare(S o1, S o2) { if (null == o1 && null == o2) return 0; if (null == o1) return -1; if (null == o2) return 1; return o1.compareTo(o2); } }); } /** * Produces the set difference of two sorted set sequences using the specified comparator. * * @param first An Iterator&lt;T&gt; whose elements that are not also in second will be returned. * @param second An Iterator&lt;T&gt; whose elements that also occur in the first sequence will cause those elements to be removed from the returned sequence. * @param comparator The comparator used to compare the elements from each iterator. */ public static <S> SortedDiffIterator<S> create(Iterator<S> first, Iterator<S> second, Comparator<? super S> comparator) { return new SortedDiffIterator<>(first, second, comparator); } private SortedDiffIterator(Iterator<T> first, Iterator<T> second, Comparator<? super T> comparator) { this.grayList = first; this.blackList = second; this.comp = comparator; // Prime the white item fill(); } private void fill() { if (finished) return; if (!grayList.hasNext()) { close(); return; } if (!blackSlotFull) { if (!blackList.hasNext()) { white = grayList.next(); return; } black = blackList.next(); blackSlotFull = true; } // Outer loop advances white while (true) { if (!grayList.hasNext()) { close(); return; } white = grayList.next(); int cmp = comp.compare(white, black); if (cmp < 0) return; // Inner loop advances black until white is less than or equal to it while (cmp > 0) { if (!blackList.hasNext()) { black = null; blackSlotFull = false; return; } black = blackList.next(); cmp = comp.compare(white, black); if (cmp < 0) return; } } } @Override public boolean hasNext() { return !finished; } @Override public T next() { if (finished) throw new NoSuchElementException(); T toReturn = white; fill(); return toReturn; } @Override public void remove() { throw new UnsupportedOperationException("SortedDiffIterator.remove"); } @Override public void close() { finished = true; white = null; black = null; Iter.close(grayList); Iter.close(blackList); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.mnemonic.collections; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Random; import org.apache.mnemonic.NonVolatileMemAllocator; import org.apache.mnemonic.RestorableAllocator; import org.apache.commons.lang3.tuple.Pair; import org.apache.mnemonic.Durable; import org.apache.mnemonic.EntityFactoryProxy; import org.apache.mnemonic.Reclaim; import org.apache.mnemonic.Utils; import org.apache.mnemonic.DurableType; import org.apache.mnemonic.ParameterHolder; import org.testng.AssertJUnit; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; /** * * */ public class DurableSinglyLinkedListWithParamHolderNGTest { private long cKEYCAPACITY; private Random m_rand; private NonVolatileMemAllocator m_act; private ParameterHolder ph; @BeforeClass public void setUp() { m_rand = Utils.createRandom(); m_act = new NonVolatileMemAllocator(Utils.getNonVolatileMemoryAllocatorService("pmalloc"), 1024 * 1024 * 1024, "./pobj_NodeValue_WithParamHolder.dat", true); ph = new ParameterHolder(); cKEYCAPACITY = m_act.handlerCapacity(); m_act.setBufferReclaimer(new Reclaim<ByteBuffer>() { @Override public boolean reclaim(ByteBuffer mres, Long sz) { System.out.println(String.format("Reclaim Memory Buffer: %X Size: %s", System.identityHashCode(mres), null == sz ? "NULL" : sz.toString())); return false; } }); m_act.setChunkReclaimer(new Reclaim<Long>() { @Override public boolean reclaim(Long mres, Long sz) { System.out.println(String.format("Reclaim Memory Chunk: %X Size: %s", System.identityHashCode(mres), null == sz ? "NULL" : sz.toString())); return false; } }); for (long i = 0; i < cKEYCAPACITY; ++i) { m_act.setHandler(i, 0L); } } @AfterClass public void tearDown() { m_act.close(); } @Test(enabled = false) public void testSingleNodeValueWithInteger() { int val = m_rand.nextInt(); DurableType gtypes[] = {DurableType.INTEGER}; SinglyLinkedNode<Integer> plln = SinglyLinkedNodeFactory.create( m_act, null, gtypes, false); plln.setItem(val, false); Long handler = plln.getHandler(); System.err.println("-------------Start to Restore Integer -----------"); SinglyLinkedNode<Integer> plln2 = SinglyLinkedNodeFactory.restore( m_act, null, gtypes, handler, false); AssertJUnit.assertEquals(val, (int) plln2.getItem()); } @Test(enabled = false) public void testNodeValueWithString() { String val = Utils.genRandomString(); DurableType gtypes[] = {DurableType.STRING}; SinglyLinkedNode<String> plln = SinglyLinkedNodeFactory.create(m_act, null, gtypes, false); plln.setItem(val, false); Long handler = plln.getHandler(); System.err.println("-------------Start to Restore String-----------"); SinglyLinkedNode<String> plln2 = SinglyLinkedNodeFactory.restore(m_act, null, gtypes, handler, false); AssertJUnit.assertEquals(val, plln2.getItem()); } @Test(enabled = false) public void testNodeValueWithPerson() { DurableType gtypes[] = {DurableType.DURABLE}; EntityFactoryProxy efproxies[] = {new EntityFactoryProxy() { @Override public <A extends RestorableAllocator<A>> Person<Long> restore( A allocator, EntityFactoryProxy[] factoryproxys, DurableType[] gfields, long phandler, boolean autoreclaim) { return PersonFactory.restore(allocator, factoryproxys, gfields, phandler, autoreclaim); } @Override public <A extends RestorableAllocator<A>> Person<Long> restore(ParameterHolder<A> ph) { return PersonFactory.restore(ph.getAllocator(), ph.getEntityFactoryProxies(), ph.getGenericTypes(), ph.getHandler(), ph.getAutoReclaim()); } @Override public <A extends RestorableAllocator<A>> Person<Long> create( A allocator, EntityFactoryProxy[] factoryproxys, DurableType[] gfields, boolean autoreclaim) { return PersonFactory.create(allocator, factoryproxys, gfields, autoreclaim); } @Override public <A extends RestorableAllocator<A>> Person<Long> create(ParameterHolder<A> ph) { return PersonFactory.create(ph.getAllocator(), ph.getEntityFactoryProxies(), ph.getGenericTypes(), ph.getAutoReclaim()); } } }; ph.setAllocator(m_act); ph.setEntityFactoryProxies(efproxies); ph.setGenericTypes(gtypes); ph.setAutoReclaim(false); @SuppressWarnings("unchecked") Person<Long> person = (Person<Long>) efproxies[0].create(m_act, null, null, false); person.setAge((short) 31); SinglyLinkedNode<Person<Long>> plln = SinglyLinkedNodeFactory.create(ph); plln.setItem(person, false); long handler = plln.getHandler(); ph.setHandler(handler); SinglyLinkedNode<Person<Long>> plln2 = SinglyLinkedNodeFactory.restore(ph); AssertJUnit.assertEquals(31, (int) plln2.getItem().getAge()); } @SuppressWarnings("unchecked") @Test(enabled = false) public void testLinkedNodeValueWithPerson() { int elem_count = 10; List<Long> referlist = new ArrayList(); DurableType listgftypes[] = {DurableType.DURABLE}; EntityFactoryProxy listefproxies[] = {new EntityFactoryProxy() { @Override public <A extends RestorableAllocator<A>> Person<Long> restore( A allocator, EntityFactoryProxy[] factoryproxys, DurableType[] gfields, long phandler, boolean autoreclaim) { return PersonFactory.restore(allocator, factoryproxys, gfields, phandler, autoreclaim); } @Override public <A extends RestorableAllocator<A>> Person<Long> restore(ParameterHolder<A> ph) { return PersonFactory.restore(ph.getAllocator(), ph.getEntityFactoryProxies(), ph.getGenericTypes(), ph.getHandler(), ph.getAutoReclaim()); } @Override public <A extends RestorableAllocator<A>> Person<Long> create( A allocator, EntityFactoryProxy[] factoryproxys, DurableType[] gfields, boolean autoreclaim) { return PersonFactory.create(allocator, factoryproxys, gfields, autoreclaim); } @Override public <A extends RestorableAllocator<A>> Person<Long> create(ParameterHolder<A> ph) { return PersonFactory.create(ph.getAllocator(), ph.getEntityFactoryProxies(), ph.getGenericTypes(), ph.getAutoReclaim()); } } }; ph.setAllocator(m_act); ph.setEntityFactoryProxies(listefproxies); ph.setGenericTypes(listgftypes); ph.setAutoReclaim(false); DurableSinglyLinkedList<Person<Long>> list = DurableSinglyLinkedListFactory.create(ph); SinglyLinkedNode<Person<Long>> firstnv = list.createNode(); SinglyLinkedNode<Person<Long>> nextnv = firstnv; Person<Long> person; long val; SinglyLinkedNode<Person<Long>> newnv; for (int i = 0; i < elem_count; ++i) { person = (Person<Long>) listefproxies[0].create(m_act, null, null, false); person.setAge((short) m_rand.nextInt(50)); person.setName(String.format("Name: [%s]", Utils.genRandomString()), true); nextnv.setItem(person, false); newnv = list.createNode(); nextnv.setNext(newnv, false); nextnv = newnv; } Person<Long> eval; list.setHeadHandler(firstnv.getHandler()); list.reset(); while (list.forwardNode()) { System.out.printf(" Stage 1 --->\n"); eval = list.getCurrentNode().getItem(); if (null != eval) { eval.testOutput(); } } // Assert.assert, expected);(plist, plist2); } @Test(enabled = true) public void testLinkedNodeValueWithLinkedNodeValue() { int elem_count = 10; long slotKeyId = 10; ParameterHolder phe = new ParameterHolder(); ParameterHolder phl = new ParameterHolder(); DurableType[] elem_gftypes = {DurableType.DOUBLE}; EntityFactoryProxy[] elem_efproxies = null; DurableType linkedgftypes[] = {DurableType.DURABLE, DurableType.DOUBLE}; EntityFactoryProxy linkedefproxies[] = {new EntityFactoryProxy() { @Override public <A extends RestorableAllocator<A>> Durable restore(A allocator, EntityFactoryProxy[] factoryproxys, DurableType[] gfields, long phandler, boolean autoreclaim) { Pair<DurableType[], EntityFactoryProxy[]> dpt = Utils.shiftDurableParams(gfields, factoryproxys, 1); return DurableSinglyLinkedListFactory.restore(allocator, dpt.getRight(), dpt.getLeft(), phandler, autoreclaim); } @Override public <A extends RestorableAllocator<A>> Durable restore(ParameterHolder<A> ph) { Pair<DurableType[], EntityFactoryProxy[]> dpt = Utils.shiftDurableParams(ph.getGenericTypes(), ph.getEntityFactoryProxies(), 1); return DurableSinglyLinkedListFactory.restore(ph.getAllocator(), dpt.getRight(), dpt.getLeft(), ph.getHandler(), ph.getAutoReclaim()); } @Override public <A extends RestorableAllocator<A>> Durable create( A allocator, EntityFactoryProxy[] factoryproxys, DurableType[] gfields, boolean autoreclaim) { Pair<DurableType[], EntityFactoryProxy[]> dpt = Utils.shiftDurableParams(gfields, factoryproxys, 1); return DurableSinglyLinkedListFactory.create(allocator, dpt.getRight(), dpt.getLeft(), autoreclaim); } @Override public <A extends RestorableAllocator<A>> Durable create(ParameterHolder<A> ph) { Pair<DurableType[], EntityFactoryProxy[]> dpt = Utils.shiftDurableParams(ph.getGenericTypes(), ph.getEntityFactoryProxies(), 1); return DurableSinglyLinkedListFactory.create(ph.getAllocator(), dpt.getRight(), dpt.getLeft(), ph.getAutoReclaim()); } } }; SinglyLinkedNode<SinglyLinkedNode<Double>> nextnv = null, pre_nextnv = null; SinglyLinkedNode<Double> elem = null, pre_elem = null, first_elem = null; Long linkhandler = 0L; System.out.printf(" Stage 1 -testLinkedNodeValueWithLinkedNodeValue--> \n"); pre_nextnv = null; Double val; phe.setAllocator(m_act); phe.setEntityFactoryProxies(elem_efproxies); phe.setGenericTypes(elem_gftypes); phe.setAutoReclaim(false); phl.setAllocator(m_act); phl.setEntityFactoryProxies(linkedefproxies); phl.setGenericTypes(linkedgftypes); phl.setAutoReclaim(false); for (int i = 0; i < elem_count; ++i) { first_elem = null; pre_elem = null; for (int v = 0; v < 3; ++v) { elem = SinglyLinkedNodeFactory.create(phe); val = m_rand.nextDouble(); elem.setItem(val, false); if (null == pre_elem) { first_elem = elem; } else { pre_elem.setNext(elem, false); } pre_elem = elem; System.out.printf("%f ", val); } nextnv = SinglyLinkedNodeFactory.create(phl); nextnv.setItem(first_elem, false); if (null == pre_nextnv) { linkhandler = nextnv.getHandler(); } else { pre_nextnv.setNext(nextnv, false); } pre_nextnv = nextnv; System.out.printf(" generated an item... \n"); } m_act.setHandler(slotKeyId, linkhandler); long handler = m_act.getHandler(slotKeyId); phl.setHandler(handler); DurableSinglyLinkedList<DurableSinglyLinkedList<Double>> linkedvals = DurableSinglyLinkedListFactory.restore(phl); Iterator<DurableSinglyLinkedList<Double>> iter = linkedvals.iterator(); Iterator<Double> elemiter = null; System.out.printf(" Stage 2 -testLinkedNodeValueWithLinkedNodeValue--> \n"); while (iter.hasNext()) { elemiter = iter.next().iterator(); while (elemiter.hasNext()) { System.out.printf("%f ", elemiter.next()); } System.out.printf(" Fetched an item... \n"); } // Assert.assert, expected);(plist, plist2); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.cache.Cache; import javax.cache.processor.EntryProcessor; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.IgniteDhtDemandedPartitionsMap; import org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtLocalPartition; import org.apache.ignite.internal.processors.cache.mvcc.MvccSnapshot; import org.apache.ignite.internal.processors.cache.mvcc.MvccVersion; import org.apache.ignite.internal.processors.cache.persistence.CacheDataRow; import org.apache.ignite.internal.processors.cache.persistence.CacheSearchRow; import org.apache.ignite.internal.processors.cache.persistence.DataRowCacheAware; import org.apache.ignite.internal.processors.cache.persistence.RootPage; import org.apache.ignite.internal.processors.cache.persistence.RowStore; import org.apache.ignite.internal.processors.cache.persistence.freelist.SimpleDataRow; import org.apache.ignite.internal.processors.cache.persistence.partstate.GroupPartitionId; import org.apache.ignite.internal.processors.cache.persistence.partstorage.PartitionMetaStorage; import org.apache.ignite.internal.processors.cache.persistence.tree.reuse.ReuseList; import org.apache.ignite.internal.processors.cache.tree.PendingEntriesTree; import org.apache.ignite.internal.processors.cache.tree.mvcc.data.MvccUpdateResult; import org.apache.ignite.internal.processors.cache.tree.mvcc.search.MvccLinkAwareSearchRow; import org.apache.ignite.internal.processors.cache.version.GridCacheVersion; import org.apache.ignite.internal.processors.query.GridQueryRowCacheCleaner; import org.apache.ignite.internal.util.GridAtomicLong; import org.apache.ignite.internal.util.GridLongList; import org.apache.ignite.internal.util.IgniteTree; import org.apache.ignite.internal.util.lang.GridCloseableIterator; import org.apache.ignite.internal.util.lang.GridCursor; import org.apache.ignite.internal.util.lang.GridIterator; import org.apache.ignite.internal.util.lang.IgniteInClosure2X; import org.apache.ignite.internal.util.lang.IgnitePredicateX; import org.apache.ignite.lang.IgniteBiTuple; import org.jetbrains.annotations.Nullable; /** * */ @SuppressWarnings("WeakerAccess") public interface IgniteCacheOffheapManager { /** * @param ctx Context. * @param grp Cache group. * @throws IgniteCheckedException If failed. */ public void start(GridCacheSharedContext ctx, CacheGroupContext grp) throws IgniteCheckedException; /** * @param cctx Cache context. * @throws IgniteCheckedException If failed. */ public void onCacheStarted(GridCacheContext cctx) throws IgniteCheckedException; /** * */ public void onKernalStop(); /** * @param cacheId Cache ID. * @param destroy Destroy data flag. Setting to <code>true</code> will remove all cache data. */ public void stopCache(int cacheId, boolean destroy); /** * */ public void stop(); /** * Pre-create partitions that resides in page memory or WAL and restores their state. * * @param partitionRecoveryStates Partition recovery states. * @return Number of processed partitions. * @throws IgniteCheckedException If failed. */ long restorePartitionStates(Map<GroupPartitionId, Integer> partitionRecoveryStates) throws IgniteCheckedException; /** * Partition counter update callback. May be overridden by plugin-provided subclasses. * * @param part Partition. * @param cntr Partition counter. */ public void onPartitionCounterUpdated(int part, long cntr); /** * Initial counter will be updated on state restore only * * @param part Partition * @param start Start. * @param delta Delta. */ public void onPartitionInitialCounterUpdated(int part, long start, long delta); /** * Partition counter provider. May be overridden by plugin-provided subclasses. * * @param part Partition ID. * @return Last updated counter. */ public long lastUpdatedPartitionCounter(int part); /** * @param entry Cache entry. * @return Cached row, if available, null otherwise. * @throws IgniteCheckedException If failed. */ @Nullable public CacheDataRow read(GridCacheMapEntry entry) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @return Cached row, if available, null otherwise. * @throws IgniteCheckedException If failed. */ @Nullable public CacheDataRow read(GridCacheContext cctx, KeyCacheObject key) throws IgniteCheckedException; /** * @param p Partition. * @return Data store. * @throws IgniteCheckedException If failed. */ public CacheDataStore createCacheDataStore(int p) throws IgniteCheckedException; /** * @return Iterable over all existing cache data stores. */ public Iterable<CacheDataStore> cacheDataStores(); /** * @param part Partition. * @return Data store. */ public CacheDataStore dataStore(GridDhtLocalPartition part); /** * @param store Data store. * @throws IgniteCheckedException If failed. */ public void destroyCacheDataStore(CacheDataStore store) throws IgniteCheckedException; /** * TODO: GG-10884, used on only from initialValue. */ public boolean containsKey(GridCacheMapEntry entry); /** * @param cctx Cache context. * @param c Closure. * @param amount Limit of processed entries by single call, {@code -1} for no limit. * @return {@code True} if unprocessed expired entries remains. * @throws IgniteCheckedException If failed. */ public boolean expire(GridCacheContext cctx, IgniteInClosure2X<GridCacheEntryEx, GridCacheVersion> c, int amount) throws IgniteCheckedException; /** * Gets the number of entries pending expire. * * @return Number of pending entries. * @throws IgniteCheckedException If failed to get number of pending entries. */ public long expiredSize() throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @param part Partition. * @param c Tree update closure. * @throws IgniteCheckedException If failed. */ public void invoke(GridCacheContext cctx, KeyCacheObject key, GridDhtLocalPartition part, OffheapInvokeClosure c) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @param mvccSnapshot MVCC snapshot. * @return Cached row, if available, null otherwise. * @throws IgniteCheckedException If failed. */ @Nullable public CacheDataRow mvccRead(GridCacheContext cctx, KeyCacheObject key, MvccSnapshot mvccSnapshot) throws IgniteCheckedException; /** * For testing only. * * @param cctx Cache context. * @param key Key. * @return All stored versions for given key. * @throws IgniteCheckedException If failed. */ public List<IgniteBiTuple<Object, MvccVersion>> mvccAllVersions(GridCacheContext cctx, KeyCacheObject key) throws IgniteCheckedException; /** * Returns iterator over the all row versions for the given key. * * @param cctx Cache context. * @param key Key. * @param x Implementation specific argument, {@code null} always means that we need to return full detached data row. * @return Iterator over all versions. * @throws IgniteCheckedException If failed. */ GridCursor<CacheDataRow> mvccAllVersionsCursor(GridCacheContext cctx, KeyCacheObject key, Object x) throws IgniteCheckedException; /** * @param entry Entry. * @param val Value. * @param ver Version. * @param expireTime Expire time. * @param mvccVer MVCC version. * @param newMvccVer New MVCC version. * @return {@code True} if value was inserted. * @throws IgniteCheckedException If failed. */ public boolean mvccInitialValue( GridCacheMapEntry entry, @Nullable CacheObject val, GridCacheVersion ver, long expireTime, MvccVersion mvccVer, MvccVersion newMvccVer ) throws IgniteCheckedException; /** * Tries to apply entry history. * Either applies full entry history or do nothing. * * @param entry Entry to update. * @param hist Full entry history. * @return {@code True} if history applied successfully, {@code False} otherwise. */ boolean mvccApplyHistoryIfAbsent(GridCacheMapEntry entry, List<GridCacheMvccEntryInfo> hist) throws IgniteCheckedException; /** * @param entry Entry. * @param val Value. * @param ver Cache version. * @param expireTime Expire time. * @param mvccSnapshot MVCC snapshot. * @param primary {@code True} if on primary node. * @param needHist Flag to collect history. * @param noCreate Flag indicating that row should not be created if absent. * @param needOldVal {@code True} if need old value. * @param filter Filter. * @param retVal Flag to return previous value. * @param keepBinary Keep binary flag. * @param entryProc Entry processor. * @param invokeArgs Entry processor invoke arguments. * @return Update result. * @throws IgniteCheckedException If failed. */ public MvccUpdateResult mvccUpdate( GridCacheMapEntry entry, CacheObject val, GridCacheVersion ver, long expireTime, MvccSnapshot mvccSnapshot, boolean primary, boolean needHist, boolean noCreate, boolean needOldVal, @Nullable CacheEntryPredicate filter, boolean retVal, boolean keepBinary, EntryProcessor entryProc, Object[] invokeArgs) throws IgniteCheckedException; /** * @param entry Entry. * @param mvccSnapshot MVCC snapshot. * @param primary {@code True} if on primary node. * @param needHist Flag to collect history. * @param needOldVal {@code True} if need old value. * @param filter Filter. * @param retVal Flag to return previous value. * @return Update result. * @throws IgniteCheckedException If failed. */ @Nullable public MvccUpdateResult mvccRemove( GridCacheMapEntry entry, MvccSnapshot mvccSnapshot, boolean primary, boolean needHist, boolean needOldVal, @Nullable CacheEntryPredicate filter, boolean retVal) throws IgniteCheckedException; /** * @param entry Entry. * @param mvccSnapshot MVCC snapshot. * @return Update result. * @throws IgniteCheckedException If failed. */ @Nullable public MvccUpdateResult mvccLock( GridCacheMapEntry entry, MvccSnapshot mvccSnapshot ) throws IgniteCheckedException; /** * Apply update with full history. * Note: History version may be skipped if it have already been actualized with previous update operation. * * @param entry Entry. * @param val Value. * @param ver Version. * @param mvccVer MVCC version. * @param newMvccVer New MVCC version. * @return {@code True} if value was inserted. * @throws IgniteCheckedException If failed. */ public boolean mvccUpdateRowWithPreloadInfo( GridCacheMapEntry entry, @Nullable CacheObject val, GridCacheVersion ver, long expireTime, MvccVersion mvccVer, MvccVersion newMvccVer, byte mvccTxState, byte newMvccTxState ) throws IgniteCheckedException; /** * @param entry Entry. * @throws IgniteCheckedException If failed. */ public void mvccRemoveAll(GridCacheMapEntry entry) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @param val Value. * @param ver Version. * @param expireTime Expire time. * @param oldRow Old row if available. * @param part Partition. * @throws IgniteCheckedException If failed. */ public void update( GridCacheContext cctx, KeyCacheObject key, CacheObject val, GridCacheVersion ver, long expireTime, GridDhtLocalPartition part, @Nullable CacheDataRow oldRow ) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @param val Value. * @param ver Version. * @param expireTime Expire time. * @param part Partition. * @param mvccVer Mvcc version. * @throws IgniteCheckedException If failed. */ void mvccApplyUpdate( GridCacheContext cctx, KeyCacheObject key, CacheObject val, GridCacheVersion ver, long expireTime, GridDhtLocalPartition part, MvccVersion mvccVer) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @param partId Partition number. * @param part Partition. * @throws IgniteCheckedException If failed. */ public void remove( GridCacheContext cctx, KeyCacheObject key, int partId, GridDhtLocalPartition part ) throws IgniteCheckedException; /** * @param ldr Class loader. * @return Number of undeployed entries. */ public int onUndeploy(ClassLoader ldr); /** * @param cacheId Cache ID. * @param primary Primary entries flag. * @param backup Backup entries flag. * @param topVer Topology version. * @param mvccSnapshot MVCC snapshot. * @param dataPageScanEnabled Flag to enable data page scan. * @return Rows iterator. * @throws IgniteCheckedException If failed. */ public GridIterator<CacheDataRow> cacheIterator(int cacheId, boolean primary, boolean backup, AffinityTopologyVersion topVer, @Nullable MvccSnapshot mvccSnapshot, Boolean dataPageScanEnabled ) throws IgniteCheckedException; /** * @param cacheId Cache ID. * @param part Partition. * @param mvccSnapshot MVCC snapshot. * @param dataPageScanEnabled Flag to enable data page scan. * @return Partition data iterator. * @throws IgniteCheckedException If failed. */ public GridIterator<CacheDataRow> cachePartitionIterator(int cacheId, final int part, @Nullable MvccSnapshot mvccSnapshot, Boolean dataPageScanEnabled) throws IgniteCheckedException; /** * @param part Partition number. * @return Iterator for given partition. * @throws IgniteCheckedException If failed. */ public GridIterator<CacheDataRow> partitionIterator(final int part) throws IgniteCheckedException; /** * @param part Partition number. * @param topVer Topology version. * @return Iterator for given partition that will reserve partition state until it is closed. * @throws IgniteCheckedException If failed. */ public GridCloseableIterator<CacheDataRow> reservedIterator(final int part, final AffinityTopologyVersion topVer) throws IgniteCheckedException; /** * @param parts Partitions. * @return Partition data iterator. * @throws IgniteCheckedException If failed. */ // TODO: MVCC> public IgniteRebalanceIterator rebalanceIterator(IgniteDhtDemandedPartitionsMap parts, AffinityTopologyVersion topVer) throws IgniteCheckedException; /** * @param cctx Cache context. * @param primary {@code True} if need to return primary entries. * @param backup {@code True} if need to return backup entries. * @param topVer Topology version. * @param keepBinary Keep binary flag. * @param mvccSnapshot MVCC snapshot. * @param dataPageScanEnabled Flag to enable data page scan. * @return Entries iterator. * @throws IgniteCheckedException If failed. */ public <K, V> GridCloseableIterator<Cache.Entry<K, V>> cacheEntriesIterator( GridCacheContext cctx, final boolean primary, final boolean backup, final AffinityTopologyVersion topVer, final boolean keepBinary, @Nullable final MvccSnapshot mvccSnapshot, Boolean dataPageScanEnabled ) throws IgniteCheckedException; /** * @param cacheId Cache ID. * @param part Partition. * @return Iterator. * @throws IgniteCheckedException If failed. */ // TODO: MVCC> public GridCloseableIterator<KeyCacheObject> cacheKeysIterator(int cacheId, final int part) throws IgniteCheckedException; /** * @param cacheId Cache ID. * @param primary Primary entries flag. * @param backup Backup entries flag. * @param topVer Topology version. * @return Entries count. * @throws IgniteCheckedException If failed. */ // TODO: MVCC> public long cacheEntriesCount(int cacheId, boolean primary, boolean backup, AffinityTopologyVersion topVer) throws IgniteCheckedException; /** * Store entries. * * @param partId Partition number. * @param infos Entry infos. * @param initPred Applied to all created rows. Each row that not matches the predicate is removed. * @throws IgniteCheckedException If failed. */ public void storeEntries(int partId, Iterator<GridCacheEntryInfo> infos, IgnitePredicateX<CacheDataRow> initPred) throws IgniteCheckedException; /** * Clears offheap entries. * * @param cctx Cache context. * @param readers {@code True} to clear readers. */ public void clearCache(GridCacheContext cctx, boolean readers); /** * @param cacheId Cache ID. * @param part Partition. * @return Number of entries in given partition. */ public long cacheEntriesCount(int cacheId, int part); /** * @return Offheap allocated size. */ public long offHeapAllocatedSize(); /** * @return Global remove ID counter. */ public GridAtomicLong globalRemoveId(); /** * @param cacheId Cache ID. * @param idxName Index name. * @param segment Segment. * @return Root page for index tree. * @throws IgniteCheckedException If failed. */ public RootPage rootPageForIndex(int cacheId, String idxName, int segment) throws IgniteCheckedException; /** * @param cacheId Cache ID. * @param idxName Index name. * @throws IgniteCheckedException If failed. */ public void dropRootPageForIndex(int cacheId, String idxName, int segment) throws IgniteCheckedException; /** * @param idxName Index name. * @return Reuse list for index tree. * @throws IgniteCheckedException If failed. */ public ReuseList reuseListForIndex(String idxName) throws IgniteCheckedException; /** * @param cacheId Cache ID. * @return Number of entries. */ public long cacheEntriesCount(int cacheId); /** * @param part Partition. * @return Number of entries. */ public long totalPartitionEntriesCount(int part); /** * Preload a partition. Must be called under partition reservation for DHT caches. * * @param part Partition. * @throws IgniteCheckedException If failed. */ public void preloadPartition(int part) throws IgniteCheckedException; /** * */ interface OffheapInvokeClosure extends IgniteTree.InvokeClosure<CacheDataRow> { /** * @return Old row. */ @Nullable public CacheDataRow oldRow(); /** * Flag that indicates if oldRow was expired during invoke. * @return {@code true} if old row was expired, {@code false} otherwise. */ public boolean oldRowExpiredFlag(); } /** * */ interface CacheDataStore { /** * Initialize data store if it exists. * * @return {@code True} if initialized. */ boolean init(); /** * @return Partition ID. */ int partId(); /** * @param cacheId Cache ID. * @return Size. */ long cacheSize(int cacheId); /** * @return Cache sizes if store belongs to group containing multiple caches. */ Map<Integer, Long> cacheSizes(); /** * @return Total size. */ long fullSize(); /** * @return {@code True} if there are no items in the store. */ boolean isEmpty(); /** * Updates size metric for particular cache. * * @param cacheId Cache ID. * @param delta Size delta. */ void updateSize(int cacheId, long delta); /** * @return Update counter (LWM). */ long updateCounter(); /** * @return Reserved counter (HWM). */ long reservedCounter(); /** * @return Update counter or {@code null} if store is not yet created. */ @Nullable PartitionUpdateCounter partUpdateCounter(); /** * @param delta Delta. */ long reserve(long delta); /** * @param val Update counter. */ void updateCounter(long val); /** * Updates counters from start value by delta value. * @param start Start. * @param delta Delta. */ boolean updateCounter(long start, long delta); /** * @return Next update counter. */ public long nextUpdateCounter(); /** * Returns current value and updates counter by delta. * * @param delta Delta. * @return Current value. */ public long getAndIncrementUpdateCounter(long delta); /** * @return Initial update counter. */ public long initialUpdateCounter(); /** * @param cctx Cache context. * @param key Key. * @param val Value. * @param ver Version. * @param expireTime Expire time. * @param oldRow Old row. * @return New row. * @throws IgniteCheckedException If failed. */ CacheDataRow createRow( GridCacheContext cctx, KeyCacheObject key, CacheObject val, GridCacheVersion ver, long expireTime, @Nullable CacheDataRow oldRow) throws IgniteCheckedException; /** * Insert rows into page memory. * * @param rows Rows. * @param initPred Applied to all rows. Each row that not matches the predicate is removed. * @throws IgniteCheckedException If failed. */ public void insertRows(Collection<DataRowCacheAware> rows, IgnitePredicateX<CacheDataRow> initPred) throws IgniteCheckedException; /** * @param cctx Cache context. * @param cleanupRows Rows to cleanup. * @throws IgniteCheckedException If failed. * @return Cleaned rows count. */ public int cleanup(GridCacheContext cctx, @Nullable List<MvccLinkAwareSearchRow> cleanupRows) throws IgniteCheckedException; /** * * @param cctx Cache context. * @param row Row. * @throws IgniteCheckedException */ public void updateTxState(GridCacheContext cctx, CacheSearchRow row) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @param val Value. * @param ver Version. * @param expireTime Expire time. * @param oldRow Old row if available. * @throws IgniteCheckedException If failed. */ void update( GridCacheContext cctx, KeyCacheObject key, CacheObject val, GridCacheVersion ver, long expireTime, @Nullable CacheDataRow oldRow) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @param val Value. * @param ver Version. * @param mvccVer MVCC version. * @param newMvccVer New MVCC version. * @return {@code True} if new value was inserted. * @throws IgniteCheckedException If failed. */ boolean mvccInitialValue( GridCacheContext cctx, KeyCacheObject key, @Nullable CacheObject val, GridCacheVersion ver, long expireTime, MvccVersion mvccVer, MvccVersion newMvccVer) throws IgniteCheckedException; /** * Tries to apply entry history. * Either applies full entry history or do nothing. * * @param cctx Cache context. * @param key Key. * @param hist Full entry history. * @return {@code True} if entry history applied successfully, {@code False} otherwise. */ boolean mvccApplyHistoryIfAbsent( GridCacheContext cctx, KeyCacheObject key, List<GridCacheMvccEntryInfo> hist) throws IgniteCheckedException; /** * Apply update with full history. * Note: History version may be skipped if it have already been actualized with previous update operation. * * @param cctx Grid cache context. * @param key Key. * @param val Value. * @param ver Version. * @param expireTime Expiration time. * @param mvccVer Mvcc version. * @param newMvccVer New mvcc version. * @return {@code true} on success. * @throws IgniteCheckedException, if failed. */ boolean mvccUpdateRowWithPreloadInfo( GridCacheContext cctx, KeyCacheObject key, @Nullable CacheObject val, GridCacheVersion ver, long expireTime, MvccVersion mvccVer, MvccVersion newMvccVer, byte mvccTxState, byte newMvccTxState) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @param val Value. * @param ver Version. * @param expireTime Expire time. * @param mvccSnapshot MVCC snapshot. * @param filter Filter. * @param entryProc Entry processor. * @param invokeArgs Entry processor invoke arguments. * @param primary {@code True} if update is executed on primary node. * @param needHist Flag to collect history. * @param noCreate Flag indicating that row should not be created if absent. * @param needOldVal {@code True} if need old value. * @param retVal Flag to return previous value. * @param keepBinary Keep binary flag. * @return Update result. * @throws IgniteCheckedException If failed. */ MvccUpdateResult mvccUpdate( GridCacheContext cctx, KeyCacheObject key, CacheObject val, GridCacheVersion ver, long expireTime, MvccSnapshot mvccSnapshot, @Nullable CacheEntryPredicate filter, EntryProcessor entryProc, Object[] invokeArgs, boolean primary, boolean needHist, boolean noCreate, boolean needOldVal, boolean retVal, boolean keepBinary) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @param mvccSnapshot MVCC snapshot. * @param filter Filter. * @param primary {@code True} if update is executed on primary node. * @param needHistory Flag to collect history. * @param needOldVal {@code True} if need old value. * @param retVal Flag to return previous value. * @return List of transactions to wait for. * @throws IgniteCheckedException If failed. */ MvccUpdateResult mvccRemove( GridCacheContext cctx, KeyCacheObject key, MvccSnapshot mvccSnapshot, @Nullable CacheEntryPredicate filter, boolean primary, boolean needHistory, boolean needOldVal, boolean retVal) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @param mvccSnapshot MVCC snapshot. * @return List of transactions to wait for. * @throws IgniteCheckedException If failed. */ MvccUpdateResult mvccLock( GridCacheContext cctx, KeyCacheObject key, MvccSnapshot mvccSnapshot) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @throws IgniteCheckedException If failed. */ void mvccRemoveAll(GridCacheContext cctx, KeyCacheObject key) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @param c Closure. * @throws IgniteCheckedException If failed. */ public void invoke(GridCacheContext cctx, KeyCacheObject key, OffheapInvokeClosure c) throws IgniteCheckedException; /** * * @param cctx Cache context. * @param key Key. * @param val Value. * @param ver Version. * @param expireTime Expire time. * @param mvccVer Mvcc version. * @throws IgniteCheckedException */ void mvccApplyUpdate(GridCacheContext cctx, KeyCacheObject key, CacheObject val, GridCacheVersion ver, long expireTime, MvccVersion mvccVer ) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @param partId Partition number. * @throws IgniteCheckedException If failed. */ public void remove(GridCacheContext cctx, KeyCacheObject key, int partId) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @return Data row. * @throws IgniteCheckedException If failed. */ public CacheDataRow find(GridCacheContext cctx, KeyCacheObject key) throws IgniteCheckedException; /** * Returns iterator over the all row versions for the given key. * * @param cctx Cache context. * @param key Key. * @param x Implementation specific argument, {@code null} always means that we need to return full detached data row. * @return Iterator over all versions. * @throws IgniteCheckedException If failed. */ GridCursor<CacheDataRow> mvccAllVersionsCursor(GridCacheContext cctx, KeyCacheObject key, Object x) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @return Data row. * @throws IgniteCheckedException If failed. */ public CacheDataRow mvccFind(GridCacheContext cctx, KeyCacheObject key, MvccSnapshot snapshot) throws IgniteCheckedException; /** * For testing only. * * @param cctx Cache context. * @param key Key. * @return All stored versions for given key. * @throws IgniteCheckedException If failed. */ List<IgniteBiTuple<Object, MvccVersion>> mvccFindAllVersions(GridCacheContext cctx, KeyCacheObject key) throws IgniteCheckedException; /** * @return Data cursor. * @throws IgniteCheckedException If failed. */ public GridCursor<? extends CacheDataRow> cursor() throws IgniteCheckedException; /** * @param x Implementation specific argument, {@code null} always means that we need to return full detached data row. * @return Data cursor. * @throws IgniteCheckedException If failed. */ public GridCursor<? extends CacheDataRow> cursor(Object x) throws IgniteCheckedException; /** * @param mvccSnapshot MVCC snapshot. * @return Data cursor. * @throws IgniteCheckedException If failed. */ public GridCursor<? extends CacheDataRow> cursor(MvccSnapshot mvccSnapshot) throws IgniteCheckedException; /** * @param cacheId Cache ID. * @return Data cursor. * @throws IgniteCheckedException If failed. */ public GridCursor<? extends CacheDataRow> cursor(int cacheId) throws IgniteCheckedException; /** * @param cacheId Cache ID. * @param mvccSnapshot Mvcc snapshot. * @return Data cursor. * @throws IgniteCheckedException If failed. */ public GridCursor<? extends CacheDataRow> cursor(int cacheId, MvccSnapshot mvccSnapshot) throws IgniteCheckedException; /** * @param cacheId Cache ID. * @param lower Lower bound. * @param upper Upper bound. * @return Data cursor. * @throws IgniteCheckedException If failed. */ public GridCursor<? extends CacheDataRow> cursor(int cacheId, KeyCacheObject lower, KeyCacheObject upper) throws IgniteCheckedException; /** * @param cacheId Cache ID. * @param lower Lower bound. * @param upper Upper bound. * @param x Implementation specific argument, {@code null} always means that we need to return full detached data row. * @return Data cursor. * @throws IgniteCheckedException If failed. */ public GridCursor<? extends CacheDataRow> cursor(int cacheId, KeyCacheObject lower, KeyCacheObject upper, Object x) throws IgniteCheckedException; /** * @param cacheId Cache ID. * @param lower Lower bound. * @param upper Upper bound. * @param x Implementation specific argument, {@code null} always means that we need to return full detached data row. * @param snapshot Mvcc snapshot. * @return Data cursor. * @throws IgniteCheckedException If failed. */ public GridCursor<? extends CacheDataRow> cursor(int cacheId, KeyCacheObject lower, KeyCacheObject upper, Object x, MvccSnapshot snapshot) throws IgniteCheckedException; /** * Destroys the tree associated with the store. * * @throws IgniteCheckedException If failed. */ public void destroy() throws IgniteCheckedException; /** * Mark store as destroyed. */ public void markDestroyed() throws IgniteCheckedException; /** * Clears all the records associated with logical cache with given ID. * * @param cacheId Cache ID. * @throws IgniteCheckedException If failed. */ public void clear(int cacheId) throws IgniteCheckedException; /** * @return Row store. */ public RowStore rowStore(); /** * @param start Counter. * @param delta Delta. */ public void updateInitialCounter(long start, long delta); /** * Inject rows cache cleaner. * * @param rowCacheCleaner Rows cache cleaner. */ public void setRowCacheCleaner(GridQueryRowCacheCleaner rowCacheCleaner); /** * Return PendingTree for data store. * * @return PendingTree instance. */ public PendingEntriesTree pendingTree(); /** * Flushes pending update counters closing all possible gaps. * * @return Even-length array of pairs [start, end] for each gap. */ GridLongList finalizeUpdateCounters(); /** * Preload a store into page memory. * @throws IgniteCheckedException If failed. */ public void preload() throws IgniteCheckedException; /** * Reset counter for partition. */ void resetUpdateCounter(); /** * Reset the initial value of the partition counter. */ void resetInitialUpdateCounter(); /** * Partition storage. */ public PartitionMetaStorage<SimpleDataRow> partStorage(); } }
package mrriegel.limelib.helper; import java.io.File; import java.io.FileInputStream; import java.io.FileWriter; import java.io.IOException; import java.nio.file.Files; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; import java.util.Set; import java.util.jar.JarEntry; import java.util.jar.JarInputStream; import java.util.stream.Collectors; import javax.annotation.Nullable; import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.tuple.Pair; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import mrriegel.limelib.LimeLib; import mrriegel.limelib.recipe.ShapedRecipeExt; import mrriegel.limelib.recipe.ShapelessRecipeExt; import mrriegel.limelib.util.Utils; import net.minecraft.block.Block; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.item.crafting.IRecipe; import net.minecraft.item.crafting.Ingredient; import net.minecraft.item.crafting.ShapedRecipes; import net.minecraft.item.crafting.ShapelessRecipes; import net.minecraft.launchwrapper.Launch; import net.minecraft.util.NonNullList; import net.minecraft.util.ResourceLocation; import net.minecraft.util.StringUtils; import net.minecraftforge.common.config.Configuration; import net.minecraftforge.common.crafting.CraftingHelper; import net.minecraftforge.common.crafting.CraftingHelper.ShapedPrimer; import net.minecraftforge.fml.common.Loader; import net.minecraftforge.oredict.OreDictionary; import net.minecraftforge.oredict.ShapedOreRecipe; import net.minecraftforge.oredict.ShapelessOreRecipe; public class RecipeHelper { //TODO change location public static final boolean dev = (boolean) Launch.blackboard.get("fml.deobfuscatedEnvironment"); private static Map<String, RecipeHelper> helpers = Maps.newHashMap(); private File DIR; private final Set<String> USED_OD_NAMES = Sets.newTreeSet(); private boolean oldway = true; private static RecipeHelper getHelper() { String modid = Utils.getCurrentModID(); RecipeHelper rh = helpers.get(modid); if (rh != null) return rh; rh = new RecipeHelper(modid); helpers.put(modid, rh); return rh; } private RecipeHelper(String modid) { DIR = new File("").toPath().resolve("../src/main/resources/assets/" + modid + "/recipes/").toFile(); if (!DIR.exists() && dev) DIR.mkdirs(); // if (DIR.exists()) // Arrays.stream(DIR.listFiles()).forEach(File::delete); if (!dev) { File jar = Loader.instance().activeModContainer().getSource(); try { JarInputStream jis = new JarInputStream(new FileInputStream(jar)); JarEntry e = null; while ((e = jis.getNextJarEntry()) != null) if (e.getName().equals("assets/" + modid + "/recipes/")) { oldway = false; break; } jis.close(); } catch (IOException e) { } } else oldway = false; } @Deprecated public static void addShapedOreRecipe(ItemStack stack, Object... input) { addShapedRecipe(stack, input); } public static void addShapedRecipe(ItemStack stack, Object... input) { RecipeHelper rh = getHelper(); ResourceLocation rl = name(stack, input); if (Arrays.stream(input).anyMatch(o -> o instanceof Collection)) addRecipe(rl, new ShapedRecipeExt(rl, stack, input)); else if (Arrays.stream(input).anyMatch(o -> o instanceof String && OreDictionary.doesOreNameExist((String) o))) { if (rh.oldway) addRecipe(rl, new ShapedOreRecipe(rl, stack, input)); else rh.addRecipe(rl, true, true, stack, input); } else { if (rh.oldway) { ShapedPrimer sp = CraftingHelper.parseShaped(input); addRecipe(rl, new ShapedRecipes("", sp.width, sp.height, sp.input, stack)); } else rh.addRecipe(rl, true, false, stack, input); } } @Deprecated public static void addShapelessOreRecipe(ItemStack stack, Object... input) { addShapelessRecipe(stack, input); } public static void addShapelessRecipe(ItemStack stack, Object... input) { RecipeHelper rh = getHelper(); ResourceLocation rl = name(stack, input); if (Arrays.stream(input).anyMatch(o -> o instanceof Collection)) addRecipe(rl, new ShapelessRecipeExt(rl, stack, input)); else if (Arrays.stream(input).anyMatch(o -> o instanceof String && OreDictionary.doesOreNameExist((String) o))) { if (rh.oldway) addRecipe(rl, new ShapelessOreRecipe(rl, stack, input)); else rh.addRecipe(rl, false, true, stack, input); } else { if (rh.oldway) addRecipe(rl, new ShapelessRecipes("", stack, NonNullList.<Ingredient> from(Ingredient.EMPTY, Lists.newArrayList(input).stream().map(o -> CraftingHelper.getIngredient(o)).filter(o -> o != null).collect(Collectors.toList()).toArray(new Ingredient[0])))); else rh.addRecipe(rl, false, false, stack, input); } } public static void add(IRecipe recipe) { Validate.isTrue(!recipe.getClass().getName().startsWith("net.minecraft"), "Use JSON instead"); ResourceLocation rl = name(recipe.getRecipeOutput(), (Object[]) recipe.getIngredients().toArray(new Ingredient[0])); addRecipe(rl, recipe); } private static void addRecipe(ResourceLocation rl, IRecipe recipe) { Validate.isTrue(!recipe.getRecipeOutput().isEmpty() /*&& !recipe.getClass().getName().startsWith("net.minecraft")*/); recipe.setRegistryName(rl); RegistryHelper.register(recipe); } /** * @author williewillus (partly) */ private void addRecipe(ResourceLocation rl, boolean shaped, boolean ore, ItemStack stack, Object... input) { if (!dev) return; Map<String, Object> json = Maps.newHashMap(); if (shaped) { List<String> pattern = Lists.newArrayList(); int i = 0; while (i < input.length && input[i] instanceof String) { pattern.add((String) input[i]); i++; } json.put("pattern", pattern); Map<String, Map<String, Object>> key = Maps.newHashMap(); Character curKey = null; for (; i < input.length; i++) { Object o = input[i]; if (o instanceof Character) { if (curKey != null) throw new IllegalArgumentException("Provided two char keys in a row"); curKey = (Character) o; } else { if (curKey == null) throw new IllegalArgumentException("Providing object without a char key"); key.put(Character.toString(curKey), serializeItem(o)); curKey = null; } } json.put("key", key); } else { json.put("ingredients", Arrays.stream(input).map(o -> serializeItem(o)).collect(Collectors.toList())); } json.put("type", shaped ? (ore ? "forge:ore_shaped" : "minecraft:crafting_shaped") : (ore ? "forge:ore_shapeless" : "minecraft:crafting_shapeless")); json.put("result", serializeItem(stack)); // String suffix = stack.getItem().getHasSubtypes() ? "_" + stack.getItemDamage() : ""; // File f = new File(DIR, stack.getItem().getRegistryName().getResourcePath() + suffix + ".json"); if (!stack.isEmpty()) { File f = new File(DIR, rl.getResourcePath().replace('/', '_') + ".json"); writeToFile(f, json); } else LimeLib.log.warn("ItemStack is empty. Can't create a recipe. " + Arrays.toString(input)); } private static ResourceLocation name(ItemStack stack, Object... input) { List<String> lis = Arrays.stream(input).map(o -> { if (o instanceof String) return o.toString(); if (o instanceof Item) return ((Item) o).getRegistryName().getResourcePath(); if (o instanceof Block) return ((Block) o).getRegistryName().getResourcePath(); if (o instanceof ItemStack) return ((ItemStack) o).getItem().getRegistryName().getResourcePath(); if (o instanceof Ingredient) return Joiner.on(" ").join(Arrays.stream(((Ingredient) o).getMatchingStacks()).map(s -> s.getItem().getRegistryName().getResourcePath()).sorted().collect(Collectors.toList())); return ""; }).collect(Collectors.toList()); return new ResourceLocation(Utils.getCurrentModID(), stack.getItem().getRegistryName().getResourcePath() + "/" + stack.getItemDamage() + "_" + stack.getCount() + "_" + (Math.abs(lis.hashCode()) % 9999)); } public static Ingredient getIngredient(Object obj) { Ingredient ret = CraftingHelper.getIngredient(obj); if (ret != null) return ret; List<Ingredient> lis = Lists.newArrayList(); if (obj instanceof Collection) { for (Object o : (Collection<?>) obj) lis.add(CraftingHelper.getIngredient(o)); } return new CompoundIngredient(lis); } /** * @author williewillus */ private Map<String, Object> serializeItem(Object thing) { if (thing instanceof Item) { return serializeItem(new ItemStack((Item) thing)); } if (thing instanceof Block) { return serializeItem(new ItemStack((Block) thing)); } if (thing instanceof ItemStack) { ItemStack stack = (ItemStack) thing; Map<String, Object> ret = Maps.newHashMap(); ret.put("item", stack.getItem().getRegistryName().toString()); if (stack.getItem().getHasSubtypes() || stack.getItemDamage() != 0) { ret.put("data", stack.getItemDamage()); } if (stack.getCount() > 1) { ret.put("count", stack.getCount()); } if (stack.hasTagCompound()) { throw new IllegalArgumentException("nbt not implemented"); } return ret; } if (thing instanceof String) { Map<String, Object> ret = Maps.newHashMap(); USED_OD_NAMES.add((String) thing); ret.put("item", "#" + ((String) thing).toUpperCase()); return ret; } throw new IllegalArgumentException("Not a block, item, stack, or od name"); } /** * @author williewillus */ public static void generateConstants() { if (!dev) return; for (RecipeHelper rh : helpers.values()) { List<Map<String, Object>> json = Lists.newArrayList(); for (String s : rh.USED_OD_NAMES) { Map<String, Object> entry = new HashMap<>(); entry.put("name", s.toUpperCase()); entry.put("ingredient", ImmutableMap.of("type", "forge:ore_dict", "ore", s)); json.add(entry); } if (!rh.USED_OD_NAMES.isEmpty()) { File file = new File(rh.DIR, "_constants.json"); writeToFile(file, json); } } } private static void writeToFile(File file, Object o) { String newJson = Utils.getGSON().toJson(o).trim(); String oldJson = null; try { oldJson = !file.exists() ? "" : Files.lines(file.toPath()).collect(Collectors.joining(Configuration.NEW_LINE)).trim(); } catch (IOException e) { e.printStackTrace(); } if (!oldJson.equals(newJson)) { try (FileWriter fw = new FileWriter(file)) { Utils.getGSON().toJson(o, fw); fw.close(); } catch (IOException e) { e.printStackTrace(); } } } private static class CompoundIngredient extends net.minecraftforge.common.crafting.CompoundIngredient { protected CompoundIngredient(Collection<Ingredient> children) { super(children.stream().filter(i -> i != null).collect(Collectors.toList())); } } //1.13 private static final Map<String, List<Pair<String, String>>> recipes = new HashMap<>(); public static void generateFiles() { if (!dev) return; if (true) return; try { for (Entry<String, List<Pair<String, String>>> e : recipes.entrySet()) { File jar = Loader.instance().getIndexedModList().get(e.getKey()).getSource(); if (!jar.getPath().endsWith(".jar")) { List<String> names = new ArrayList<>(); for (Pair<String, String> p : e.getValue()) { String name = p.getLeft(); int i = 1; while (names.contains(name)) { name = p.getLeft() + i++; } names.add(name); } File dir = new File("").toPath().resolve("../src/main/resources/data/" + e.getKey() + "/recipes/").toFile(); if (!dir.exists()) dir.mkdirs(); for (int i = 0; i < names.size(); i++) { // File file = new File(dir, names.get(i) + ".json"); // FileWriter fw = new FileWriter(file); // fw.write(e.getValue().get(i).getRight()); // fw.close(); Files.write(new File(dir, names.get(i) + ".json").toPath(), e.getValue().get(i).getRight().getBytes()); } } } } catch (IOException e) { e.printStackTrace(); } } private static Object serializeItem2(Object o, boolean count) { Objects.requireNonNull(o); if (o instanceof String) { Map<String, Object> ret = new LinkedHashMap<>(); ret.put("tag", o); return ret; } if (o instanceof ResourceLocation) { Map<String, Object> ret = new LinkedHashMap<>(); ret.put("tag", o.toString()); return ret; } if (o instanceof Item) { Map<String, Object> ret = new LinkedHashMap<>(); ret.put("item", ((Item) o).getRegistryName().toString()); return ret; } if (o instanceof Block) { Map<String, Object> ret = new LinkedHashMap<>(); ret.put("item", ((Block) o).getRegistryName().toString()); return ret; } if (o instanceof ItemStack) { ItemStack s = (ItemStack) o; Validate.isTrue(!s.isEmpty(), "ItemStack is empty."); Map<String, Object> ret = new LinkedHashMap<>(); ret.put("item", s.getItem().getRegistryName().toString()); if (count && s.getCount() > 1) ret.put("count", s.getCount()); return ret; } if (o instanceof Collection) { return ((Collection<Object>) o).stream().map(oo -> serializeItem2(oo, count)).collect(Collectors.toList()); } if (o instanceof Object[]) { return Arrays.stream((Object[]) o).map(oo -> serializeItem2(oo, count)).collect(Collectors.toList()); } throw new IllegalArgumentException("Argument of type " + o.getClass().getName() + " is invalid."); } private static void validate(ItemStack stack) { Validate.isTrue(!stack.isEmpty(), "result must not be empty"); // Validate.isTrue(Loader.instance().hasReachedState(LoaderState.INITIALIZATION), "register after preInit"); } private static boolean valid() { return dev && !Loader.instance().activeModContainer().getSource().getPath().endsWith(".jar"); } public static void addCraftingRecipe(ItemStack result, @Nullable String group, boolean shaped, Object... input) { if (!dev) return; validate(result); Map<String, Object> json = new LinkedHashMap<>(); json.put("type", shaped ? "minecraft:crafting_shaped" : "minecraft:crafting_shapeless"); if (!StringUtils.isNullOrEmpty(group)) json.put("group", group); if (shaped) { List<String> pattern = new ArrayList<>(); int i = 0; while (i < input.length && input[i] instanceof String) { pattern.add((String) input[i]); i++; } json.put("pattern", pattern); Map<String, Object> key = new LinkedHashMap<>(); Character curKey = null; for (; i < input.length; i++) { Object o = input[i]; if (o instanceof Character) { if (curKey != null) throw new IllegalArgumentException("Provided two char keys in a row"); curKey = (Character) o; } else { if (curKey == null) throw new IllegalArgumentException("Providing object without a char key"); key.put(Character.toString(curKey), serializeItem2(o, false)); curKey = null; } } json.put("key", key); } else { json.put("ingredients", Arrays.stream(input).map(o -> serializeItem2(o, false)).collect(Collectors.toList())); } json.put("result", serializeItem2(result, true)); addRecipe(result.getItem().getRegistryName().getResourcePath(), json); } public static void addSmeltingRecipe(ItemStack result, Object input, double exp, int time) { if (!dev) return; validate(result); Map<String, Object> json = new LinkedHashMap<>(); json.put("type", "smelting"); json.put("ingredient", serializeItem2(input, false)); json.put("result", result.getItem().getRegistryName().toString()); json.put("experience", exp); json.put("cookingtime", time); addRecipe(result.getItem().getRegistryName().getResourcePath(), json); } public static void addRecipe(String name, Map<String, Object> json) { String id = Utils.getCurrentModID(); recipes.computeIfAbsent(id, s -> { List<Pair<String, String>> list = new ArrayList<>(); list.add(Pair.of(name, Utils.getGSON().toJson(json))); return list; }); // List<Pair<String, String>> recs = recipes.get(id); // if (recs == null) // recipes.put(id, recs = new ArrayList<>()); // recs.add(Pair.of(name, Utils.getGSON().toJson(json))); } }
/* * #%L * This file is part of eAudit4j, a library for creating pluggable * auditing solutions, providing an audit processor that retrieves * the system time and appends a timestamp as a field to audit events. * %% * Copyright (C) 2015 - 2016 Michael Beiter <[email protected]> * %% * All rights reserved. * . * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the copyright holder nor the names of the * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * . * . * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * #L% */ package org.beiter.michael.eaudit4j.processors.timestamp; import org.beiter.michael.eaudit4j.common.*; import org.beiter.michael.eaudit4j.common.impl.AuditEvent; import org.beiter.michael.eaudit4j.common.propsbuilder.MapBasedCommonPropsBuilder; import org.beiter.michael.eaudit4j.processors.timestamp.propsbuilder.MapBasedTimestampPropsBuilder; import org.junit.Before; import org.junit.Test; import java.lang.reflect.InvocationTargetException; import java.util.Date; import java.util.HashMap; import java.util.Map; import static org.hamcrest.Matchers.*; import static org.junit.Assert.assertThat; public class TimestampProcessorTest { private java.lang.reflect.Field field_commonProperties; private java.lang.reflect.Field field_properties; private java.lang.reflect.Method method_getTimestamp; private final String encoding = "UTF-8"; private static final String TIMESTAMP_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; private static final String TIMESTAMP_REGEX = "[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}-[0-9]{4}"; /** * Make some of the private fields in the TimestampProcessor class accessible. * <p> * This is executed before every test to ensure consistency even if one of the tests mock with field accessibility. */ @Before public void makePrivateFieldsAccessible() { // make private fields accessible as needed try { field_commonProperties = TimestampProcessor.class.getDeclaredField("commonProperties"); field_properties = TimestampProcessor.class.getDeclaredField("properties"); method_getTimestamp = TimestampProcessor.class.getDeclaredMethod("getTimestamp", Date.class); } catch (NoSuchFieldException | NoSuchMethodException e) { AssertionError ae = new AssertionError("An expected private field or method does not exist"); ae.initCause(e); throw ae; } field_commonProperties.setAccessible(true); field_properties.setAccessible(true); method_getTimestamp.setAccessible(true); } /** * Initialize the processor with 'null' properties */ @Test(expected = NullPointerException.class) public void initWithNullPropertiesTest() { Processor processor = new TimestampProcessor(); processor.init(null); } /** * Test that the init() method creates a defensive copy of the provided properties */ @Test public void initPropertiesInboundDefensiveCopyTest() { String key = "some property"; String value = "some value"; Map<String, String> propsMap = new HashMap<>(); propsMap.put(key, value); CommonProperties commonProps = MapBasedCommonPropsBuilder.build(propsMap); Processor processor = new TimestampProcessor(); processor.init(commonProps); String error = "The method does not create an inbound defensive copy"; try { CommonProperties commonPropsInObject = (CommonProperties) field_commonProperties.get(processor); assertThat(error, commonPropsInObject, is(not(sameInstance(commonProps)))); } catch (IllegalAccessException e) { AssertionError ae = new AssertionError("Cannot access private field"); ae.initCause(e); throw ae; } } /** * Test that the init() method correctly initializes a local copy of the Common Properties */ @Test public void initCommonPropertiesCorrectTest() { String auditStreamName = "audit stream name"; String key = "some property"; String value = "some value"; Map<String, String> propsMap = new HashMap<>(); propsMap.put(key, value); CommonProperties commonProps = MapBasedCommonPropsBuilder.buildDefault(); commonProps.setDefaultAuditStream(auditStreamName); commonProps.setAdditionalProperties(propsMap); Processor processor = new TimestampProcessor(); processor.init(commonProps); String error = "The method does not retain object keys or values"; try { CommonProperties commonPropsInObject = (CommonProperties) field_commonProperties.get(processor); assertThat(error, commonPropsInObject.getDefaultAuditStream(), is(equalTo(auditStreamName))); assertThat(error, commonPropsInObject.getAdditionalProperties().containsKey(key), is(true)); assertThat(error, commonPropsInObject.getAdditionalProperties().get(key), is(equalTo(value))); } catch (IllegalAccessException e) { AssertionError ae = new AssertionError("Cannot access private field"); ae.initCause(e); throw ae; } } /** * Test that the init() method correctly initializes the Timestamp Properties values */ @Test public void initPropertiesCorrectTest() { String key = MapBasedTimestampPropsBuilder.KEY_TIMEZONE; String value = "some value"; Map<String, String> propsMap = new HashMap<>(); propsMap.put(key, value); CommonProperties commonProps = MapBasedCommonPropsBuilder.buildDefault(); commonProps.setAdditionalProperties(propsMap); Processor processor = new TimestampProcessor(); processor.init(commonProps); String error = "The method does not retain object keys or values"; try { TimestampProperties propsInObject = (TimestampProperties) field_properties.get(processor); assertThat(error, propsInObject.getTimezone(), is(equalTo(value))); } catch (IllegalAccessException e) { AssertionError ae = new AssertionError("Cannot access private field"); ae.initCause(e); throw ae; } } /////////////////////////////////////////////////////////////////////////// // Process events in the default audit stream /////////////////////////////////////////////////////////////////////////// /** * Test processing a null Event * <p> * (should throw a AuditException if things go well) */ @Test(expected = AuditException.class) public void processNullEventInDefaultAuditStreamTest() throws AuditException { CommonProperties commonProps = MapBasedCommonPropsBuilder.buildDefault(); Processor processor = new TimestampProcessor(); processor.init(commonProps); Event event = null; processor.process(event); } /////////////////////////////////////////////////////////////////////////// // Process events in a provided audit stream, with default processing objects /////////////////////////////////////////////////////////////////////////// /** * Test processing a null Event * <p> * (should throw a AuditException if things go well) */ @Test(expected = AuditException.class) public void processNullEventInProvidedAuditStreamTest() throws AuditException { CommonProperties commonProps = MapBasedCommonPropsBuilder.buildDefault(); Processor processor = new TimestampProcessor(); processor.init(commonProps); Event event = null; String auditStream = "some audit stream"; processor.process(event, auditStream); } /** * Test processing in a null audit stream * <p> * (should throw a NullPointerException if things go well) */ @Test(expected = NullPointerException.class) public void processEventInProvidedNullAuditStreamTest() throws AuditException { String auditStream = null; CommonProperties commonProps = MapBasedCommonPropsBuilder.buildDefault(); Processor processor = new TimestampProcessor(); processor.init(commonProps); Event event = new AuditEvent(); processor.process(event, auditStream); } /** * Test processing in a blank audit stream * <p> * (should throw a IllegalArgumentException if things go well) */ @Test(expected = IllegalArgumentException.class) public void processEventInProvidedBlankAuditStreamTest() throws AuditException { CommonProperties commonProps = MapBasedCommonPropsBuilder.buildDefault(); Processor processor = new TimestampProcessor(); processor.init(commonProps); Event event = new AuditEvent(); String auditStream = ""; processor.process(event, auditStream); } /////////////////////////////////////////////////////////////////////////// // Process events in a provided audit stream, with provided processing objects /////////////////////////////////////////////////////////////////////////// /** * Test processing a null Event * <p> * (should throw a AuditException if things go well) */ @Test(expected = AuditException.class) public void processNullEventWithProcessingObjectsTest() throws AuditException { CommonProperties commonProps = MapBasedCommonPropsBuilder.buildDefault(); Processor processor = new TimestampProcessor(); processor.init(commonProps); Event event = null; String auditStream = "some audit stream"; ProcessingObjects processingObjects = new ProcessingObjects(); processor.process(event, auditStream, processingObjects); } /** * Test processing in a null audit stream * <p> * (should throw a NullPointerException if things go well) */ @Test(expected = NullPointerException.class) public void processEventInNullAuditStreamWithProcessingObjectsTest() throws AuditException { CommonProperties commonProps = MapBasedCommonPropsBuilder.buildDefault(); Processor processor = new TimestampProcessor(); processor.init(commonProps); Event event = new AuditEvent(); String auditStream = null; ProcessingObjects processingObjects = new ProcessingObjects(); processor.process(event, auditStream, processingObjects); } /** * Test processing in a blank audit stream * <p> * (should throw a IllegalArgumentException if things go well) */ @Test(expected = IllegalArgumentException.class) public void processEventInBlankAuditStreamWithProcessingObjectsTest() throws AuditException { CommonProperties commonProps = MapBasedCommonPropsBuilder.buildDefault(); Processor processor = new TimestampProcessor(); processor.init(commonProps); Event event = new AuditEvent(); String auditStream = ""; ProcessingObjects processingObjects = new ProcessingObjects(); processor.process(event, auditStream, processingObjects); } /** * Test processing with a null set of processing objects * <p> * (should throw a NullPointerException if things go well) */ @Test(expected = NullPointerException.class) public void processEventWithNullProcessingObjectsTest() throws AuditException { CommonProperties commonProps = MapBasedCommonPropsBuilder.buildDefault(); Processor processor = new TimestampProcessor(); processor.init(commonProps); Event event = new AuditEvent(); String auditStream = "some audit stream"; ProcessingObjects processingObjects = null; processor.process(event, auditStream, processingObjects); } /** * Test that the processor adds a timestamp to an event. */ @Test public void addTimestampTest() throws AuditException { Map<String, String> props = new HashMap<>(); props.put(MapBasedTimestampPropsBuilder.KEY_FORMAT, TIMESTAMP_FORMAT); CommonProperties commonProps = MapBasedCommonPropsBuilder.build(props); Processor processor = new TimestampProcessor(); processor.init(commonProps); Event event = new AuditEvent(); processor.process(event); String error = "The method does not add a timestamp"; assertThat(error, event.containsField(MapBasedTimestampPropsBuilder.DEFAULT_EVENT_FIELD_NAME), is(equalTo(true))); error = "The timestamp field has the wrong format"; assertThat( error, String.valueOf(event.getField(MapBasedTimestampPropsBuilder.DEFAULT_EVENT_FIELD_NAME).getCharValue(encoding)).matches(TIMESTAMP_REGEX), is(true)); } /** * Test that the processor refuses to overwrite a timestamp if such a field would already been present in an * event (i.e. this processor makes sure it is only executed once per audit chain) */ @Test(expected = AuditException.class) public void doubleTimestampTest() throws AuditException { CommonProperties commonProps = MapBasedCommonPropsBuilder.buildDefault(); Processor processor = new TimestampProcessor(); processor.init(commonProps); Event event = new AuditEvent(); // create a first timestamp, and validate that it is there processor.process(event); String error = "The method does not add a timestamp"; assertThat(error, event.containsField(MapBasedTimestampPropsBuilder.DEFAULT_EVENT_FIELD_NAME), is(equalTo(true))); // create a second timestamp, this should throw an exception processor.process(event); } /** * Test that the method computing the timestamp creates the timestamp properly. * <p> * This test uses a static input timestamp (ie. a static return of Date() at a specific point in time), and then * ensures that the String representation of that timestamp is returned in the correct format and timezone. */ @Test public void getTimestampTest() throws InvocationTargetException, IllegalAccessException { Date date = new Date(303866400000l); getTimestamp(date, "UTC", "1979-08-18T23:20:00.000+0000"); getTimestamp(date, "America/Denver", "1979-08-18T17:20:00.000-0600"); getTimestamp(date, "Europe/Berlin", "1979-08-19T00:20:00.000+0100"); } // The actual test implementation of getTimestampTest() private void getTimestamp(Date date, String timezone, String expected) throws IllegalAccessException, InvocationTargetException { Map<String, String> props = new HashMap<>(); props.put(MapBasedTimestampPropsBuilder.KEY_TIMEZONE, timezone); props.put(MapBasedTimestampPropsBuilder.KEY_FORMAT, TIMESTAMP_FORMAT); CommonProperties commonProps = MapBasedCommonPropsBuilder.build(props); Processor processor = new TimestampProcessor(); processor.init(commonProps); String actualTimestamp = (String) method_getTimestamp.invoke(processor, date); String error = "The timestamp is null or empty"; assertThat(error, actualTimestamp, is(not(nullValue()))); assertThat(error, actualTimestamp.length(), is(greaterThan(0))); error = "The timestamp does not match the expected value"; assertThat(error, actualTimestamp, is(equalTo(expected))); } /////////////////////////////////////////////////////////////////////////// // Revert & clean up tests /////////////////////////////////////////////////////////////////////////// /** * Test that the revert() method can handle null events */ @Test(expected = NullPointerException.class) public void removeMachineIdFromNulEventTest() throws AuditException { CommonProperties commonProps = MapBasedCommonPropsBuilder.buildDefault(); Processor processor = new TimestampProcessor(); processor.init(commonProps); Event event = null; // revert the operation (remove the timestamp) form a null Event ((Reversible) processor).revert(event); } /** * Test that the revert() method removes the timestamp field from the event */ @Test public void removeMachineIdTest() throws AuditException { CommonProperties commonProps = MapBasedCommonPropsBuilder.buildDefault(); Processor processor = new TimestampProcessor(); processor.init(commonProps); Event event = new AuditEvent(); // create a timestamp, and validate that it is there processor.process(event); String error = "The method does not add a timestamp"; assertThat(error, event.containsField(MapBasedTimestampPropsBuilder.DEFAULT_EVENT_FIELD_NAME), is(equalTo(true))); // revert the operation (remove the timestamp), and validate that it is gone Event modifiedEvent = ((Reversible) processor).revert(event); error = "The method does not remove the timestamp"; assertThat(error, modifiedEvent.containsField(MapBasedTimestampPropsBuilder.DEFAULT_EVENT_FIELD_NAME), is(equalTo(false))); } /** * Clean up test * <p> * This test does not really do anything, but the cleanUp() method does not does * not to anything either, so I guess that is okay to get test coverage up :) */ @Test public void cleanUpTest() { Processor processor = new TimestampProcessor(); processor.cleanUp(); } }
/* $Id: WebPageOutputOptions.java 742 2012-05-07 13:09:53Z davemckain $ * * Copyright (c) 2008-2011, The University of Edinburgh. * All Rights Reserved */ package uk.ac.ed.ph.snuggletex; import static uk.ac.ed.ph.snuggletex.internal.util.ObjectUtilities.concat; import uk.ac.ed.ph.snuggletex.definitions.W3CConstants; import uk.ac.ed.ph.snuggletex.internal.util.ConstraintUtilities; import javax.xml.transform.Transformer; /** * Builds on {@link XMLStringOutputOptions} to add in options for configuring how to build a * web page using the relevant methods in {@link SnuggleSession} * (e.g. {@link SnuggleSession#buildWebPage(WebPageOutputOptions)}). * <p> * As of SnuggleTeX 1.3, you will generally want to use the various static methods in * {@link WebPageOutputOptionsBuilder} to create instances of this class that you can tweak * slightly. * <p> * The older {@link WebPageOutputOptionsTemplates} is now deprecated and will be removed in * SnuggleTeX 1.4. * * @see DOMOutputOptions * @see XMLStringOutputOptions * @see WebPageOutputOptionsBuilder * @see WebPageOutputOptionsTemplates * * @author David McKain * @version $Revision: 742 $ */ @SuppressWarnings("javadoc") public class WebPageOutputOptions extends XMLStringOutputOptions { /** Default content type */ public static final String DEFAULT_CONTENT_TYPE = "application/xhtml+xml"; /** Default language */ public static final String DEFAULT_LANG = "en"; /** * Enumerates the different web page "types" supported. This is used to tweak certain parts of * the page generation process. You should avoid setting this explicitly unless you know what * you're doing - use {@link WebPageOutputOptionsBuilder} instead. * * @deprecated Use {@link WebPageOutputOptionsBuilder} to generate different types of web pages. * More properties have been added to this class to control some of the finer aspects of this. */ @Deprecated public static enum WebPageType { /** * Mozilla-compatible output. XHTML + MathML; no XML declaration; no DOCTYPE. * <p> * This is intended to be served as <tt>application/xhtml+xml</tt> with * encoding declared via HTTP header and <tt>meta</tt> element. * <p> * This is the best option for serving content exclusively on Mozilla-based browsers. * <p> * This will display as an XML tree on IE, which is not useful. * * @deprecated Use {@link WebPageOutputOptionsBuilder#createMozillaSpecificOptions()} if * you really need this, otherwise consider some of the MathJax outputs for better * cross-browser impact. */ @Deprecated MOZILLA, /** * "Cross-browser" XHTML + MathML; has XML declaration and DOCTYPE declaration * consisting of the Public identifier defined in {@link W3CConstants#XHTML_11_MATHML_20_PUBLIC_IDENTIFIER} * and System identifier defined in {@link W3CConstants#XHTML_11_MATHML_20_SYSTEM_IDENTIFIER}. * The <tt>charset</tt> is declared only in the <tt>meta</tt> element in order * to appease MathPlayer. * <p> * Intended to be served as <tt>application/xhtml+xml</tt> * <p> * Works on both Mozilla and IE6/7 (<strong>provided</strong> MathPlayer has been installed). * This will display wrongly on IE6/7 if MathPlayer is not installed. * <p> * The main issue with this is that IE will want to download the relevant DTD, which * hinders performance slightly. * * @deprecated Use {@link WebPageOutputOptionsBuilder#createLegacyCrossBrowserOptions()} * if you really need this, otherwise consider some of the MathJax outputs for better * cross-browser impact. */ @Deprecated CROSS_BROWSER_XHTML, /** * HTML + MathML intended for Internet Explorer 6/7 with the MathPlayer plug-in. * <p> * Intended to be served as <tt>text/html</tt>. * <p> * This only works on IE clients with the MathPlayer plug-in preinstalled, * but is a good option if that's your target audience. * <p> * This will display wrongly on IE6/7 if MathPlayer is not installed. * * @deprecated See {@link WebPageOutputOptionsBuilder#createIEMathPlayerSpecificOptions()} * if you really need this, otherwise consider some of the MathJax outputs for better * cross-browser impact. */ @Deprecated MATHPLAYER_HTML, //---------------------------------------------------------- // The following require further configuration /** * "Cross-browser" XHTML + MathML suitable for Mozilla and Internet Explorer 6/7. * Intended to be used in conjunction with the client-side Universal StyleSheet XSLT * to accommodate the two cases, prompting * for the download of MathPlayer on IE6/7 if it is not already installed. * <p> * Page is created with an XML declaration but no DOCTYPE declaration. * <p> * The <strong>pref:renderer</strong> attribute on the <tt>html</tt> element will be set * to <tt>mathplayer-dl</tt>. * <p> * You <strong>MUST</strong> also call * {@link #setClientSideXSLTStylesheetURLs(String...)} * to indicate where the USS is going to be loaded from. This <strong>MUST</strong> * be on a server local to the document you are serving from, because IE enforces * a "same origin" policy for loading XSLT stylesheets. If you don't do * this, your page will not work on IE. * * <h2>Notes</h2> * * The SnuggleTeX source distribution contains a slightly fixed version of the * USS that works in IE7 that you can use if you like. * * @deprecated Use {@link WebPageOutputOptionsBuilder#createUniversalStylesheetOptions(String)} * if you really need this, otherwise consider some of the MathJax outputs for better * cross-browser impact. */ @Deprecated UNIVERSAL_STYLESHEET, /** * XHTML + MathML containing one or more processing instructions designed to invoke * client-side XSLT. No XML declaration and no DOCTYPE. * <p> * Intended to be served as <tt>application/xhtml+xml</tt>. * <p> * Combining this with the Universal Math Stylesheet or something similar can give * good cross-browser results. * * @deprecated Use {@link WebPageOutputOptionsBuilder#createUniversalStylesheetOptions(String)} * if you really need this, otherwise consider some of the MathJax outputs for better * cross-browser impact. */ @Deprecated CLIENT_SIDE_XSLT_STYLESHEET, /** * HTML deemed suitable for use by any User Agent. * <p> * Intended to be served as <tt>text/html</tt>. * <p> * You will have to use a suitable {@link DOMPostProcessor} to convert any MathML islands * into other forms. (E.g. replace by an applet, replace by images, ...) * * @deprecated Use {@link WebPageOutputOptionsBuilder#createHTML4Options()} */ @Deprecated PROCESSED_HTML, ; } /** * Desired "type" of web page to be constructed. * * @deprecated As of SnuggleTeX 1.3.0, this property no longer controls anything. This will be * removed in SnuggleTeX 1.4 * * @see WebPageOutputOptionsBuilder */ @SuppressWarnings("unused") @Deprecated private WebPageType webPageType; /** * MIME type for the resulting page. * <p> * Default is {@link #DEFAULT_CONTENT_TYPE}. * <p> * This must not be null. */ private String contentType; /** * Language code for the resulting page. * <p> * Default is {@link #DEFAULT_LANG}. * <p> * This may be set to null */ private String lang; /** * Title for the resulting page. * Default is null. * If null, then a boilerplate title is added. */ private String title; /** * Indicates whether page title should be inserted at the start of the web page * body as an XHTML <tt>h1</tt> element. This has no effect if title is null. * <p> * Default is false. */ private boolean addingTitleHeading; /** * Indicates whether to include the necessary processing instruction and <tt>object</tt> * element required to explicitly trigger the MathPlayer plugin. * <p> * The default is false. * * @since 1.3.0 */ private boolean addingMathPlayerImport; /** * Value of the optional <tt>pref:renderer</tt> attribute * (in the {@link W3CConstants#MATHML_PREF_NAMESPACE}) * that can be added to the HTML root element to control certain aspects of the * client-side * <a href="http://www.w3.org/Math/XSL/Overview-tech.html Universal StyleSheets for MathML</a> * <p> * The default is null, indicating that no attribute will be added * * @since 1.3.0 */ private String mathPrefRenderer; /** * Set to include SnuggleTeX-related CSS as a <tt>style</tt> element within the resulting * page. If you choose not to do this, you probably want to put <tt>snuggletex.css</tt> * somewhere accessible and pass its location in {@link #clientSideXSLTStylesheetURLs}. * <p> * Default is true, as that's the simplest way of getting up to speed quickly. */ private boolean includingStyleElement; /** * Array of relative URLs specifying client-side CSS stylesheets to be specified in the * resulting page. * <p> * The URLs are used as-is; the caller should have ensured they make sense in advance! * <p> * The caller can use this to specify the location of <tt>snuggletex.css</tt>, as well * as any other required stylesheets. */ private String[] cssStylesheetURLs; /** * Array of relative URLs specifying client-side XSLT stylesheets to be specified in the * resulting page. * <p> * The URLs are used as-is; the caller should have ensured they make sense in advance! * <p> * This is ignored for {@link WebPageType#MATHPLAYER_HTML}. Also, if nothing is set * here for a {@link WebPageType#CLIENT_SIDE_XSLT_STYLESHEET} then {@link WebPageType#MOZILLA} * will be used as a template instead. */ private String[] clientSideXSLTStylesheetURLs; /** * Optional JAXP {@link Transformer}s representing XSLT stylesheet(s) that * will be applied to the resulting web page once it has been built but * before it is serialised. This can be useful if you want to add in headers * and footers to the resulting XHTML web page. * <p> * Remember that the XHTML is all in its correct namespace so you will need * to write your stylesheet appropriately. Ensure that any further XHTML you * generate is also in the correct namespace; it will later be converted to * no-namespace HTML if required by the serialisation process. * <p> * <strong>NOTE:</strong> Source documents may contain Processing * Instructions (e.g. to invoke MathPlayer) so these must be handled as * appropriate. * <p> * If null or empty, then no stylesheet is applied. */ private Transformer[] stylesheets; /** * Determines whether to generate HTML (or XHTML 5) output, which uses a "charset" meta * attribute and a different DOCTYPE declaration. * <p> * Note that if set to true, then this will override whatever is returned by * {@link #getDoctypePublic()} and {@link #getDoctypeSystem()}. * <p> * The default is false. * * @since 1.3.0 */ private boolean html5; /** * Determines whether to omit character set details in the Content-Type HTTP header when * streaming web pages. * <p> * MathPlayer can only handle application/xhtml+xml without a "charset" clause, so this should * be used in those cases. * * @since 1.3.0 */ private boolean noCharsetInContentTypeHeader; /** * Determines whether to add the required JavaScript to invoke MathJax to render the * resulting web page. * <p> * The default is false. * * @since 1.3.0 */ private boolean mathJax; /** * Specifies a custom MathJax URL to use when invoking MathJax. Use this if you want to use * a local installation of MathJax, or want to specify custom configuration directives. * * See the MathJax <a href="http://www.mathjax.org/docs/2.0/start.html#mathjax-cdn">Getting Started</a> * documentation for more details. * <p> * The default is null, which will use the MathJax CDN with a reasonable default configuration * if {@link #isMathJax()} returns true. * * @since 1.3.0 */ private String customMathJaxUrl; public WebPageOutputOptions() { super(); this.webPageType = WebPageType.MOZILLA; this.contentType = DEFAULT_CONTENT_TYPE; this.lang = DEFAULT_LANG; this.title = null; this.addingTitleHeading = false; this.addingMathPlayerImport = false; this.mathPrefRenderer = null; this.includingStyleElement = true; this.cssStylesheetURLs = null; this.clientSideXSLTStylesheetURLs = null; this.stylesheets = null; this.html5 = false; this.noCharsetInContentTypeHeader = false; this.mathJax = false; this.customMathJaxUrl = null; } /** * Before SnuggleTeX 1.3.0, this returned the desired "type" of web page to be constructed. * <p> * From SnuggleTeX 1.3.0, this property no longer controls anything. * * @deprecated Use various methods in {@link WebPageOutputOptionsBuilder} to generate * suitable options for various types of pages, which you can then tweak. */ @Deprecated public WebPageType getWebPageType() { return WebPageType.MOZILLA; } /** * Before SnuggleTeX 1.3.0, this returned the desired "type" of web page to be constructed. * <p> * From SnuggleTeX 1.3.0, this property no longer controls anything. * * @deprecated Use various methods in {@link WebPageOutputOptionsBuilder} to generate * suitable options for various types of pages, which you can then tweak. */ @Deprecated public void setWebPageType(WebPageType webPageType) { ConstraintUtilities.ensureNotNull(webPageType, "webPageType"); this.webPageType = webPageType; } /** * Returns the MIME type for the resulting page. * <p> * Defaults to {@link #DEFAULT_CONTENT_TYPE}. */ public String getContentType() { return contentType; } /** * Sets the MIME type for the resulting page. * * @param contentType desired contentType, which must not be null. */ public void setContentType(String contentType) { ConstraintUtilities.ensureNotNull(contentType, "contentType"); this.contentType = contentType; } /** * Returns the language of the resulting page, null if not set. * <p> * Defaults to {@link #DEFAULT_LANG}. */ public String getLang() { return lang; } /** * Sets the language of the resulting page. * * @param lang desired language, which may be null. */ public void setLang(String lang) { this.lang = lang; } /** * Returns the title for the resulting page, null if not set. * <p> * Default is null. * <p> * This is used to generate a <tt>title</tt> and possible a <tt>h1</tt> * header if {@link #isAddingTitleHeading()} returns true. */ public String getTitle() { return title; } /** * Sets the title for the resulting page. * <p> * This is used to generate a <tt>title</tt> and possible a <tt>h1</tt> * header if {@link #isAddingTitleHeading()} returns true. * * @param title title for the required page, which may be null to indicate * that no title should be included. */ public void setTitle(String title) { this.title = title; } /** * Returns whether page title should be inserted at the start of the web page * body as an XHTML <tt>h1</tt> element. * <p> * Default is false. * <p> * This has no effect if {@link #getTitle()} returns null. */ public boolean isAddingTitleHeading() { return addingTitleHeading; } /** * Sets whether page title should be inserted at the start of the web page * body as an XHTML <tt>h1</tt> element. * <p> * This has no effect if {@link #getTitle()} returns null. * * @param addingTitleHeading true to add a title header if a title has been set, false otherwise. */ public void setAddingTitleHeading(boolean addingTitleHeading) { this.addingTitleHeading = addingTitleHeading; } /** * Returns whether to include the necessary processing instruction and <tt>object</tt> * element required to explicitly trigger the MathPlayer plugin. * * @since 1.3.0 * * @return true if adding MathPlayer import gubbins, false otherwise. */ public boolean isAddingMathPlayerImport() { return addingMathPlayerImport; } /** * Sets whether to include the necessary processing instruction and <tt>object</tt> * element required to explicitly trigger the MathPlayer plugin. * * @since 1.3.0 * * @param addingMathPlayerImport true to add gubbins for importing MathPlayer, false otherwise. */ public void setAddingMathPlayerImport(boolean addingMathPlayerImport) { this.addingMathPlayerImport = addingMathPlayerImport; } /** * Gets the value of the optional <tt>pref:renderer</tt> attribute * (in the {@link W3CConstants#MATHML_PREF_NAMESPACE}) * that can be added to the HTML root element to control certain aspects of the * client-side * <a href="http://www.w3.org/Math/XSL/Overview-tech.html Universal StyleSheets for MathML</a> * <p> * The default is null, indicating that no attribute will be added * * @since 1.3.0 */ public String getMathPrefRenderer() { return mathPrefRenderer; } /** * Sets the value of the optional <tt>pref:renderer</tt> attribute * (in the {@link W3CConstants#MATHML_PREF_NAMESPACE}) * that can be added to the HTML root element to control certain aspects of the * client-side * <a href="http://www.w3.org/Math/XSL/Overview-tech.html Universal StyleSheets for MathML</a> * <p> * A null value is allowed, which prevents such an attribute being added. * * @since 1.3.0 */ public void setMathPrefRenderer(String mathPrefRenderer) { this.mathPrefRenderer = mathPrefRenderer; } /** * Returns whether to include SnuggleTeX-related CSS as a <tt>style</tt> element within the * resulting page. If you choose not to do this, you probably want to put <tt>snuggletex.css</tt> * somewhere accessible and pass its location in via {@link #setClientSideXSLTStylesheetURLs(String...)}. * <p> * As of SnuggleTeX 1.2.3, this option is ignored if {@link #isInliningCSS()} returns true as its * effect is clearly redundant in this case. * <p> * Default is true, as that's the simplest way of getting up to speed quickly. */ public boolean isIncludingStyleElement() { return includingStyleElement; } /** * Sets whether to include SnuggleTeX-related CSS as a <tt>style</tt> element within the * resulting page. If you choose not to do this, you probably want to put <tt>snuggletex.css</tt> * somewhere accessible and pass its location in via {@link #setClientSideXSLTStylesheetURLs(String...)}. * <p> * As of SnuggleTeX 1.2.3, this option is ignored if {@link #isInliningCSS()} returns true as its * effect is clearly redundant in this case. * * @param includingStyleElement set to true to include a <tt>style</tt> element, false otherwise. */ public void setIncludingStyleElement(boolean includingStyleElement) { this.includingStyleElement = includingStyleElement; } /** * Returns specified array of relative URLs specifying client-side CSS stylesheets to be * referenced in the resulting page. * <p> * Default is null. * <p> * The URLs are used as-is; the caller should have ensured they make sense in advance! * <p> * The caller can use this to specify the location of <tt>snuggletex.css</tt>, as well * as any other required stylesheets. */ public String[] getCSSStylesheetURLs() { return cssStylesheetURLs; } /** * Specifies an array of relative URLs specifying client-side CSS stylesheets to be * referenced in the resulting page. * <p> * The URLs are used as-is; the caller should have ensured they make sense in advance! * <p> * The caller can use this to specify the location of <tt>snuggletex.css</tt>, as well * as any other required stylesheets. * * @param cssStylesheetURLs array of CSS stylesheet URLs, which may be empty */ public void setCSSStylesheetURLs(String... cssStylesheetURLs) { this.cssStylesheetURLs = cssStylesheetURLs; } /** * Appends to existing array of relative URLs specifying client-side CSS stylesheets to be * referenced in the resulting page. * <p> * The URLs are used as-is; the caller should have ensured they make sense in advance! * <p> * The caller can use this to specify the location of <tt>snuggletex.css</tt>, as well * as any other required stylesheets. * * @param cssStylesheetURLs array of CSS stylesheet URLs to add, which may be empty */ public void addCSSStylesheetURLs(String... cssStylesheetURLs) { this.cssStylesheetURLs = concat(this.cssStylesheetURLs, cssStylesheetURLs, String.class); } /** * Returns specified array of relative URLs specifying client-side XSLT stylesheets to be * referenced in the resulting page. * <p> * Default is null * <p> * The URLs are used as-is; the caller should have ensured they make sense in advance! * <p> * This is ignored for {@link WebPageType#MATHPLAYER_HTML}. Also, if nothing is set * here for a {@link WebPageType#CLIENT_SIDE_XSLT_STYLESHEET} then {@link WebPageType#MOZILLA} * will be used as a template instead. */ public String[] getClientSideXSLTStylesheetURLs() { return clientSideXSLTStylesheetURLs; } /** * Sets an array of relative URLs specifying client-side XSLT stylesheets to be * referenced in the resulting page. * <p> * The URLs are used as-is; the caller should have ensured they make sense in advance! * <p> * This is ignored for {@link WebPageType#MATHPLAYER_HTML}. Also, if nothing is set * here for a {@link WebPageType#CLIENT_SIDE_XSLT_STYLESHEET} then {@link WebPageType#MOZILLA} * will be used as a template instead. * * @param clientSideXSLTStylesheetURLs array of URLs to use, which may be empty. */ public void setClientSideXSLTStylesheetURLs(String... clientSideXSLTStylesheetURLs) { this.clientSideXSLTStylesheetURLs = clientSideXSLTStylesheetURLs; } /** * Appends to existing array of relative URLs specifying client-side XSLT stylesheets to be * referenced in the resulting page. * <p> * The URLs are used as-is; the caller should have ensured they make sense in advance! * <p> * This is ignored for {@link WebPageType#MATHPLAYER_HTML}. Also, if nothing is set * here for a {@link WebPageType#CLIENT_SIDE_XSLT_STYLESHEET} then {@link WebPageType#MOZILLA} * will be used as a template instead. * * @param clientSideXSLTStylesheetURLs array of URLs to append, which may be empty. */ public void addClientSideXSLTStylesheetURLs(String... clientSideXSLTStylesheetURLs) { this.clientSideXSLTStylesheetURLs = concat(this.clientSideXSLTStylesheetURLs, clientSideXSLTStylesheetURLs, String.class); } /** * Returns an array of specified JAXP {@link Transformer}s representing XSLT stylesheet(s) * that will be applied to the resulting web page once it has been built but * before it is serialised. This can be useful if you want to add in headers * and footers to the resulting XHTML web page. * <p> * Default is null. * <p> * Remember that the XHTML is all in its correct namespace so you will need * to write your stylesheet appropriately. Ensure that any further XHTML you * generate is also in the correct namespace; it will later be converted to * no-namespace HTML if required by the serialisation process. * <p> * <strong>NOTE:</strong> Source documents may contain Processing * Instructions (e.g. to invoke MathPlayer) so these must be handled as * appropriate. */ public Transformer[] getStylesheets() { return stylesheets; } /** * Sets an array of JAXP {@link Transformer}s representing XSLT stylesheet(s) * that will be applied to the resulting web page once it has been built but * before it is serialised. This can be useful if you want to add in headers * and footers to the resulting XHTML web page. * <p> * Remember that the XHTML is all in its correct namespace so you will need * to write your stylesheet appropriately. Ensure that any further XHTML you * generate is also in the correct namespace; it will later be converted to * no-namespace HTML if required by the serialisation process. * <p> * <strong>NOTE:</strong> Source documents may contain Processing * Instructions (e.g. to invoke MathPlayer) so these must be handled as * appropriate. * * @param stylesheets array of XSLT stylesheets to apply, which may be null. They * are applied in the order specified. */ public void setStylesheets(Transformer... stylesheets) { this.stylesheets = stylesheets; } /** * Appends to existing array of JAXP {@link Transformer}s representing XSLT stylesheet(s) * that will be applied to the resulting web page once it has been built but * before it is serialised. This can be useful if you want to add in headers * and footers to the resulting XHTML web page. * <p> * Remember that the XHTML is all in its correct namespace so you will need * to write your stylesheet appropriately. Ensure that any further XHTML you * generate is also in the correct namespace; it will later be converted to * no-namespace HTML if required by the serialisation process. * <p> * <strong>NOTE:</strong> Source documents may contain Processing * Instructions (e.g. to invoke MathPlayer) so these must be handled as * appropriate. * * @param stylesheets array of additional XSLT stylesheets to apply, which may be null. They * are applied in the order specified. */ public void addStylesheets(Transformer... stylesheets) { this.stylesheets = concat(this.stylesheets, stylesheets, Transformer.class); } public boolean isHtml5() { return html5; } public void setHtml5(boolean html5) { this.html5 = html5; } public boolean isNoCharsetInContentTypeHeader() { return noCharsetInContentTypeHeader; } public void setNoCharsetInContentTypeHeader(boolean noCharsetInContentTypeHeader) { this.noCharsetInContentTypeHeader = noCharsetInContentTypeHeader; } public boolean isMathJax() { return mathJax; } public void setMathJax(boolean mathJax) { this.mathJax = mathJax; } public String getCustomMathJaxUrl() { return customMathJaxUrl; } public void setCustomMathJaxUrl(String mathJaxPath) { this.customMathJaxUrl = mathJaxPath; } }
/** * This class is generated by jOOQ */ package io.cattle.platform.core.model; /** * This class is generated by jOOQ. */ @javax.annotation.Generated(value = { "http://www.jooq.org", "3.3.0" }, comments = "This class is generated by jOOQ") @java.lang.SuppressWarnings({ "all", "unchecked", "rawtypes" }) @javax.persistence.Entity @javax.persistence.Table(name = "volume", schema = "cattle") public interface Volume extends java.io.Serializable { /** * Setter for <code>cattle.volume.id</code>. */ public void setId(java.lang.Long value); /** * Getter for <code>cattle.volume.id</code>. */ @javax.persistence.Id @javax.persistence.Column(name = "id", unique = true, nullable = false, precision = 19) public java.lang.Long getId(); /** * Setter for <code>cattle.volume.name</code>. */ public void setName(java.lang.String value); /** * Getter for <code>cattle.volume.name</code>. */ @javax.persistence.Column(name = "name", length = 255) public java.lang.String getName(); /** * Setter for <code>cattle.volume.account_id</code>. */ public void setAccountId(java.lang.Long value); /** * Getter for <code>cattle.volume.account_id</code>. */ @javax.persistence.Column(name = "account_id", precision = 19) public java.lang.Long getAccountId(); /** * Setter for <code>cattle.volume.kind</code>. */ public void setKind(java.lang.String value); /** * Getter for <code>cattle.volume.kind</code>. */ @javax.persistence.Column(name = "kind", nullable = false, length = 255) public java.lang.String getKind(); /** * Setter for <code>cattle.volume.uuid</code>. */ public void setUuid(java.lang.String value); /** * Getter for <code>cattle.volume.uuid</code>. */ @javax.persistence.Column(name = "uuid", unique = true, nullable = false, length = 128) public java.lang.String getUuid(); /** * Setter for <code>cattle.volume.description</code>. */ public void setDescription(java.lang.String value); /** * Getter for <code>cattle.volume.description</code>. */ @javax.persistence.Column(name = "description", length = 1024) public java.lang.String getDescription(); /** * Setter for <code>cattle.volume.state</code>. */ public void setState(java.lang.String value); /** * Getter for <code>cattle.volume.state</code>. */ @javax.persistence.Column(name = "state", nullable = false, length = 128) public java.lang.String getState(); /** * Setter for <code>cattle.volume.created</code>. */ public void setCreated(java.util.Date value); /** * Getter for <code>cattle.volume.created</code>. */ @javax.persistence.Column(name = "created") public java.util.Date getCreated(); /** * Setter for <code>cattle.volume.removed</code>. */ public void setRemoved(java.util.Date value); /** * Getter for <code>cattle.volume.removed</code>. */ @javax.persistence.Column(name = "removed") public java.util.Date getRemoved(); /** * Setter for <code>cattle.volume.remove_time</code>. */ public void setRemoveTime(java.util.Date value); /** * Getter for <code>cattle.volume.remove_time</code>. */ @javax.persistence.Column(name = "remove_time") public java.util.Date getRemoveTime(); /** * Setter for <code>cattle.volume.data</code>. */ public void setData(java.util.Map<String,Object> value); /** * Getter for <code>cattle.volume.data</code>. */ @javax.persistence.Column(name = "data", length = 16777215) public java.util.Map<String,Object> getData(); /** * Setter for <code>cattle.volume.physical_size_mb</code>. */ public void setPhysicalSizeMb(java.lang.Long value); /** * Getter for <code>cattle.volume.physical_size_mb</code>. */ @javax.persistence.Column(name = "physical_size_mb", precision = 19) public java.lang.Long getPhysicalSizeMb(); /** * Setter for <code>cattle.volume.virtual_size_mb</code>. */ public void setVirtualSizeMb(java.lang.Long value); /** * Getter for <code>cattle.volume.virtual_size_mb</code>. */ @javax.persistence.Column(name = "virtual_size_mb", precision = 19) public java.lang.Long getVirtualSizeMb(); /** * Setter for <code>cattle.volume.device_number</code>. */ public void setDeviceNumber(java.lang.Integer value); /** * Getter for <code>cattle.volume.device_number</code>. */ @javax.persistence.Column(name = "device_number", precision = 10) public java.lang.Integer getDeviceNumber(); /** * Setter for <code>cattle.volume.format</code>. */ public void setFormat(java.lang.String value); /** * Getter for <code>cattle.volume.format</code>. */ @javax.persistence.Column(name = "format", length = 255) public java.lang.String getFormat(); /** * Setter for <code>cattle.volume.allocation_state</code>. */ public void setAllocationState(java.lang.String value); /** * Getter for <code>cattle.volume.allocation_state</code>. */ @javax.persistence.Column(name = "allocation_state", length = 255) public java.lang.String getAllocationState(); /** * Setter for <code>cattle.volume.attached_state</code>. */ public void setAttachedState(java.lang.String value); /** * Getter for <code>cattle.volume.attached_state</code>. */ @javax.persistence.Column(name = "attached_state", length = 255) public java.lang.String getAttachedState(); /** * Setter for <code>cattle.volume.instance_id</code>. */ public void setInstanceId(java.lang.Long value); /** * Getter for <code>cattle.volume.instance_id</code>. */ @javax.persistence.Column(name = "instance_id", precision = 19) public java.lang.Long getInstanceId(); /** * Setter for <code>cattle.volume.image_id</code>. */ public void setImageId(java.lang.Long value); /** * Getter for <code>cattle.volume.image_id</code>. */ @javax.persistence.Column(name = "image_id", precision = 19) public java.lang.Long getImageId(); /** * Setter for <code>cattle.volume.offering_id</code>. */ public void setOfferingId(java.lang.Long value); /** * Getter for <code>cattle.volume.offering_id</code>. */ @javax.persistence.Column(name = "offering_id", precision = 19) public java.lang.Long getOfferingId(); /** * Setter for <code>cattle.volume.zone_id</code>. */ public void setZoneId(java.lang.Long value); /** * Getter for <code>cattle.volume.zone_id</code>. */ @javax.persistence.Column(name = "zone_id", precision = 19) public java.lang.Long getZoneId(); /** * Setter for <code>cattle.volume.uri</code>. */ public void setUri(java.lang.String value); /** * Getter for <code>cattle.volume.uri</code>. */ @javax.persistence.Column(name = "uri", length = 512) public java.lang.String getUri(); /** * Setter for <code>cattle.volume.external_id</code>. */ public void setExternalId(java.lang.String value); /** * Getter for <code>cattle.volume.external_id</code>. */ @javax.persistence.Column(name = "external_id", length = 128) public java.lang.String getExternalId(); // ------------------------------------------------------------------------- // FROM and INTO // ------------------------------------------------------------------------- /** * Load data from another generated Record/POJO implementing the common interface Volume */ public void from(io.cattle.platform.core.model.Volume from); /** * Copy data into another generated Record/POJO implementing the common interface Volume */ public <E extends io.cattle.platform.core.model.Volume> E into(E into); }
package org.efidroid.efidroidmanager.fragments; import android.content.Context; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.design.widget.AppBarLayout; import android.support.design.widget.CollapsingToolbarLayout; import android.support.design.widget.CoordinatorLayout; import android.support.v4.app.Fragment; import android.support.v4.content.res.ResourcesCompat; import android.support.v4.widget.DrawerLayout; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.Toolbar; import android.view.Gravity; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.FrameLayout; import android.widget.TextView; import com.melnykov.fab.FloatingActionButton; import org.efidroid.efidroidmanager.R; import org.efidroid.efidroidmanager.Util; import org.efidroid.efidroidmanager.models.DeviceInfo; import org.efidroid.efidroidmanager.services.GenericProgressIntentService; import org.efidroid.efidroidmanager.tasks.EFIDroidInstallServiceTask; import org.efidroid.efidroidmanager.tasks.EFIDroidUninstallServiceTask; import org.efidroid.efidroidmanager.types.InstallationEntry; import org.efidroid.efidroidmanager.types.InstallationStatus; import org.efidroid.efidroidmanager.types.ProgressReceiver; import org.efidroid.efidroidmanager.view.ProgressCircle; import java.text.DateFormat; import java.util.ArrayList; import java.util.Date; import java.util.Iterator; public class InstallFragment extends Fragment implements AppBarLayout.OnOffsetChangedListener, ProgressReceiver.OnStatusChangeListener { // listener private OnInstallFragmentInteractionListener mListener; // UI private ProgressReceiver mProgressReceiver; private ProgressCircle mProgressCircle; private TextView mProgressDescription; private ArrayList<InstallStatusRecyclerViewAdapter.Item> mListData = new ArrayList<>(); private InstallStatusRecyclerViewAdapter mListAdapter; private int getColor(int id) { return ResourcesCompat.getColor(getResources(), id, getContext().getTheme()); } private InstallStatusLoadCallback mInstallStatusLoadCallback = new InstallStatusLoadCallback() { @Override public void onStatusLoaded() { loadUiData(true); } @Override public void onStatusLoadError() { mProgressCircle.setFillColor(getColor(R.color.colorCircleBgError), false, 0); mProgressCircle.setContentText(R.string.error); mProgressDescription.setText(R.string.cant_reload_install_status); mProgressCircle.setClickable(true); } }; private void startReload() { mProgressCircle.setProgressHidden(true, true, 200); mProgressCircle.setFillColor(getColor(R.color.colorCircleBgLoading), true, 200); mProgressCircle.setClickable(false); mProgressCircle.setContentText(R.string.reloading); mProgressDescription.setText(R.string.reloading_info); mListener.reloadInstallStatus(mInstallStatusLoadCallback); } public InstallFragment() { } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { inflater.inflate(R.menu.fragment_install, menu); } void startInstallService() { Bundle extras = new Bundle(); extras.putParcelable(EFIDroidInstallServiceTask.ARG_DEVICE_INFO, mListener.getDeviceInfo()); extras.putParcelable(EFIDroidInstallServiceTask.ARG_INSTALL_STATUS, mListener.getInstallStatus()); mProgressReceiver.setServiceBundle(extras); mProgressReceiver.setServiceHandler(EFIDroidInstallServiceTask.class); mProgressReceiver.startService(); } void startUnInstallService() { Bundle extras = new Bundle(); extras.putParcelable(EFIDroidInstallServiceTask.ARG_DEVICE_INFO, mListener.getDeviceInfo()); extras.putParcelable(EFIDroidInstallServiceTask.ARG_INSTALL_STATUS, mListener.getInstallStatus()); mProgressReceiver.setServiceBundle(extras); mProgressReceiver.setServiceHandler(EFIDroidUninstallServiceTask.class); mProgressReceiver.startService(); } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.action_refresh: startReload(); return true; default: return super.onOptionsItemSelected(item); } } private void loadListData() { mListData.clear(); InstallationEntry entry = mListener.getInstallStatus().getWorkingEntry(); if (entry != null) { Date date = new Date(entry.getTimeStamp() * 1000l); DateFormat format = DateFormat.getDateTimeInstance(); mListData.add(new InstallStatusRecyclerViewAdapter.Item(getString(R.string.efidroid_version), entry.getEFIDroidReleaseVersionString())); mListData.add(new InstallStatusRecyclerViewAdapter.Item(getString(R.string.build_time), format.format(date))); mListData.add(new InstallStatusRecyclerViewAdapter.Item(getString(R.string.efi_spec), entry.getEfiSpecVersionMajor() + "." + entry.getEfiSpecVersionMinor())); } mListAdapter.notifyDataSetChanged(); } private View.OnClickListener mInstallClickListener = new View.OnClickListener() { @Override public void onClick(View v) { if (!mProgressReceiver.isFinished()) { mListener.getDrawerLayout().setDrawerLockMode(DrawerLayout.LOCK_MODE_LOCKED_CLOSED); mProgressCircle.setClickable(false); mProgressCircle.setValue(0, false, 0); mProgressCircle.setProgressHidden(false, true, 200); mProgressCircle.setFillColor(getColor(R.color.colorCircleBgLoading), true, 200); Util.animateVisibility(mListener.getFAB(), View.VISIBLE, 200); mListener.getFAB().setImageDrawable(ResourcesCompat.getDrawable(getResources(), R.drawable.ic_action_cancel, getActivity().getTheme())); mListener.getFAB().setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { GenericProgressIntentService.stopCurrentTask(getContext(), GenericProgressIntentService.class); } }); startInstallService(); } } }; private View.OnClickListener mUinstallClickListener = new View.OnClickListener() { @Override public void onClick(View v) { if (!mProgressReceiver.isFinished()) { mListener.getDrawerLayout().setDrawerLockMode(DrawerLayout.LOCK_MODE_LOCKED_CLOSED); mProgressCircle.setClickable(false); mProgressCircle.setValue(0, false, 0); mProgressCircle.setProgressHidden(false, true, 200); mProgressCircle.setFillColor(getColor(R.color.colorCircleBgLoading), true, 200); mProgressCircle.setContentText(R.string.uninstall); mProgressDescription.setText(""); Util.animateVisibility(mListener.getFAB(), View.VISIBLE, 200); mListener.getFAB().setImageDrawable(ResourcesCompat.getDrawable(getResources(), R.drawable.ic_action_cancel, getActivity().getTheme())); mListener.getFAB().setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { GenericProgressIntentService.stopCurrentTask(getContext(), GenericProgressIntentService.class); } }); startUnInstallService(); } } }; private void loadUiData(boolean animate) { // progress circle InstallationStatus installStatus = mListener.getInstallStatus(); mProgressCircle.setOnClickListener(mInstallClickListener); mProgressCircle.setClickable(true); if (!installStatus.isInstalled()) { // not installed mProgressCircle.setFillColor(getColor(R.color.colorCircleBgError), animate, 200); mProgressCircle.setContentText(R.string.install); mProgressDescription.setText(""); mListener.getFAB().setVisibility(View.GONE); } else if(installStatus.isBroken()) { // hide FAB FloatingActionButton fab = mListener.getFAB(); fab.setVisibility(View.GONE); // broken mProgressCircle.setFillColor(getColor(R.color.colorCircleBgError), animate, 200); mProgressCircle.setContentText(R.string.repair); ArrayList<InstallationEntry> installEntries = new ArrayList<>(); installEntries.addAll(installStatus.getInstallationEntries()); // remove 'OK' entries Iterator<InstallationEntry> iterator = installEntries.iterator(); while (iterator.hasNext()) { InstallationEntry entry = iterator.next(); int status = entry.getStatus(); if(status==InstallationEntry.STATUS_OK) iterator.remove(); } String text = ""; for(int i=0; i<installEntries.size(); i++) { InstallationEntry entry = installEntries.get(i); int status = entry.getStatus(); if(status==InstallationEntry.STATUS_OK) continue; if(i!=0 && i==installEntries.size()-1) text += " "+getString(R.string.and)+" "; else if(i!=0) { text += ", "; } text += entry.getFsTabEntry().getMountPoint().substring(1)+" "; if(status==InstallationEntry.STATUS_ESP_ONLY) text += getString(R.string.status_esp_only); else if(status==InstallationEntry.STATUS_ESP_MISSING) text += getString(R.string.status_esp_missing); else if(status==InstallationEntry.STATUS_WRONG_DEVICE) text += getString(R.string.status_wrong_device); else if(status==InstallationEntry.STATUS_NOT_INSTALLED) text += getString(R.string.status_not_installed); } mProgressDescription.setText(text); } else if(installStatus.isUpdateAvailable()) { DateFormat format = DateFormat.getDateTimeInstance(); // show FAB FloatingActionButton fab = mListener.getFAB(); fab.setImageDrawable(ResourcesCompat.getDrawable(getResources(), R.drawable.ic_action_delete, getActivity().getTheme())); fab.setOnClickListener(mUinstallClickListener); fab.setVisibility(View.VISIBLE); // installed mProgressCircle.setFillColor(getColor(R.color.colorCircleBgWarning), animate, 200); mProgressCircle.setContentText(R.string.update); mProgressDescription.setText(format.format(installStatus.getUpdateDate())); } else { // show FAB FloatingActionButton fab = mListener.getFAB(); fab.setImageDrawable(ResourcesCompat.getDrawable(getResources(), R.drawable.ic_action_delete, getActivity().getTheme())); fab.setOnClickListener(mUinstallClickListener); fab.setVisibility(View.VISIBLE); // installed mProgressCircle.setFillColor(getColor(R.color.colorCircleBgSuccess), animate, 200); mProgressCircle.setContentText(R.string.reinstall); mProgressDescription.setText(R.string.installed_and_updated); } loadListData(); } private void initToolbar() { // get colors int colorToolBarGrey = ResourcesCompat.getColor(getResources(), R.color.colorToolBarGrey, getActivity().getTheme()); int colorToolBarGreyDark = ResourcesCompat.getColor(getResources(), R.color.colorToolBarGreyDark, getActivity().getTheme()); // toolbar CollapsingToolbarLayout collapsingToolbarLayout = mListener.getCollapsingToolbarLayout(); collapsingToolbarLayout.setContentScrimColor(colorToolBarGrey); collapsingToolbarLayout.setBackgroundColor(colorToolBarGrey); collapsingToolbarLayout.setStatusBarScrimColor(colorToolBarGreyDark); collapsingToolbarLayout.setScrimVisibleHeightTrigger(-1); // appbar AppBarLayout appBarLayout = mListener.getAppBarLayout(); Util.setToolBarHeight(appBarLayout, 300, true); appBarLayout.addOnOffsetChangedListener(this); // inflate toolbar layout FrameLayout toolbarFrameLayout = mListener.getToolbarFrameLayout(); LayoutInflater inflater = LayoutInflater.from(toolbarFrameLayout.getContext()); toolbarFrameLayout.removeAllViews(); View toolbarView = inflater.inflate(R.layout.toolbar_layout_install, toolbarFrameLayout, true); mProgressCircle = (ProgressCircle) toolbarView.findViewById(R.id.progressCircle); mProgressDescription = (TextView) toolbarView.findViewById(R.id.description); } @Nullable @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View view = inflater.inflate(R.layout.fragment_install, container, false); // Set the adapter if (view instanceof RecyclerView) { Context context = view.getContext(); RecyclerView recyclerView = (RecyclerView) view; recyclerView.setLayoutManager(new LinearLayoutManager(context)); mListData.clear(); mListAdapter = new InstallStatusRecyclerViewAdapter(mListData); recyclerView.setAdapter(mListAdapter); recyclerView.setNestedScrollingEnabled(true); } // menu setHasOptionsMenu(true); // toolbar initToolbar(); // create progress receiver mProgressReceiver = new ProgressReceiver(getContext(), this, null, EFIDroidInstallServiceTask.class, null); // restore status if (savedInstanceState != null) { mProgressReceiver.onRestoreInstanceState(savedInstanceState); } loadUiData(false); return view; } @Override public void onAttach(Context context) { super.onAttach(context); if (context instanceof OnInstallFragmentInteractionListener) { mListener = (OnInstallFragmentInteractionListener) context; } else { throw new RuntimeException(context.toString() + " must implement OnInstallFragmentInteractionListener"); } } @Override public void onPause() { mProgressReceiver.notifyPause(); super.onPause(); } @Override public void onResume() { super.onResume(); mProgressReceiver.notifyResume(); } @Override public void onDetach() { super.onDetach(); mListener.getAppBarLayout().removeOnOffsetChangedListener(this); mListener = null; } @Override public void onDestroy() { mProgressReceiver.notifyDestroy(); super.onDestroy(); } @Override public void onSaveInstanceState(Bundle outState) { mProgressReceiver.onSaveInstanceState(outState); super.onSaveInstanceState(outState); } @Override public void onOffsetChanged(AppBarLayout appBarLayout, int verticalOffset) { FloatingActionButton fab = mListener.getFAB(); CoordinatorLayout.LayoutParams layoutParams = (CoordinatorLayout.LayoutParams) fab.getLayoutParams(); layoutParams.gravity = Gravity.END | Gravity.TOP; layoutParams.setMargins(layoutParams.leftMargin, Util.getStatusBarHeight(getContext()) + Util.getToolBarHeight(getContext()) + 300 + 32 + fab.getHeight() / 2 + verticalOffset, layoutParams.rightMargin, layoutParams.bottomMargin); fab.setLayoutParams(layoutParams); } @Override public void onStatusUpdate(int progress, String text) { mProgressCircle.setValue(progress, true, 100); mProgressCircle.setContentText(progress+"%"); mProgressDescription.setText(text); } @Override public void onCompleted(boolean success) { mListener.getDrawerLayout().setDrawerLockMode(DrawerLayout.LOCK_MODE_UNLOCKED); Util.animateVisibility(mListener.getFAB(), View.GONE, 200); mProgressReceiver.reset(); if (success) { startReload(); } else { mProgressCircle.setProgressHidden(true, true, 1000); mProgressCircle.setFillColor(getColor(R.color.colorCircleBgError), true, 1000); } } public interface InstallStatusLoadCallback { void onStatusLoaded(); void onStatusLoadError(); } public interface OnInstallFragmentInteractionListener { DeviceInfo getDeviceInfo(); InstallationStatus getInstallStatus(); void reloadInstallStatus(InstallStatusLoadCallback callback); FloatingActionButton getFAB(); Toolbar getToolbar(); CollapsingToolbarLayout getCollapsingToolbarLayout(); AppBarLayout getAppBarLayout(); FrameLayout getToolbarFrameLayout(); DrawerLayout getDrawerLayout(); } }
/* * Copyright 2012-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigure.web; import javax.servlet.Servlet; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.junit.Test; import org.springframework.beans.BeansException; import org.springframework.beans.factory.config.BeanPostProcessor; import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression; import org.springframework.boot.context.embedded.AnnotationConfigEmbeddedWebApplicationContext; import org.springframework.boot.context.embedded.ConfigurableEmbeddedServletContainer; import org.springframework.boot.context.embedded.EmbeddedServletContainerCustomizer; import org.springframework.boot.context.embedded.EmbeddedServletContainerFactory; import org.springframework.boot.context.embedded.MockEmbeddedServletContainerFactory; import org.springframework.boot.test.util.EnvironmentTestUtils; import org.springframework.boot.web.servlet.ServletRegistrationBean; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.stereotype.Component; import org.springframework.web.servlet.DispatcherServlet; import org.springframework.web.servlet.FrameworkServlet; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.verify; /** * Tests for {@link EmbeddedServletContainerAutoConfiguration}. * * @author Dave Syer */ public class EmbeddedServletContainerAutoConfigurationTests { private AnnotationConfigEmbeddedWebApplicationContext context; @Test public void createFromConfigClass() throws Exception { this.context = new AnnotationConfigEmbeddedWebApplicationContext( BaseConfiguration.class); verifyContext(); } @Test public void contextAlreadyHasDispatcherServletWithDefaultName() throws Exception { this.context = new AnnotationConfigEmbeddedWebApplicationContext( DispatcherServletConfiguration.class, BaseConfiguration.class); verifyContext(); } @Test public void contextAlreadyHasDispatcherServlet() throws Exception { this.context = new AnnotationConfigEmbeddedWebApplicationContext( SpringServletConfiguration.class, BaseConfiguration.class); verifyContext(); assertThat(this.context.getBeanNamesForType(DispatcherServlet.class).length) .isEqualTo(2); } @Test public void contextAlreadyHasNonDispatcherServlet() throws Exception { this.context = new AnnotationConfigEmbeddedWebApplicationContext( NonSpringServletConfiguration.class, BaseConfiguration.class); verifyContext(); // the non default servlet is still registered assertThat(this.context.getBeanNamesForType(DispatcherServlet.class).length) .isEqualTo(0); } @Test public void contextAlreadyHasNonServlet() throws Exception { this.context = new AnnotationConfigEmbeddedWebApplicationContext( NonServletConfiguration.class, BaseConfiguration.class); assertThat(this.context.getBeanNamesForType(DispatcherServlet.class).length) .isEqualTo(0); assertThat(this.context.getBeanNamesForType(Servlet.class).length).isEqualTo(0); } @Test public void contextAlreadyHasDispatcherServletAndRegistration() throws Exception { this.context = new AnnotationConfigEmbeddedWebApplicationContext( DispatcherServletWithRegistrationConfiguration.class, BaseConfiguration.class); verifyContext(); assertThat(this.context.getBeanNamesForType(DispatcherServlet.class).length) .isEqualTo(1); } @Test public void containerHasNoServletContext() throws Exception { this.context = new AnnotationConfigEmbeddedWebApplicationContext( EnsureContainerHasNoServletContext.class, BaseConfiguration.class); verifyContext(); } @Test public void customizeContainerThroughCallback() throws Exception { this.context = new AnnotationConfigEmbeddedWebApplicationContext( CallbackEmbeddedContainerCustomizer.class, BaseConfiguration.class); verifyContext(); assertThat(getContainerFactory().getPort()).isEqualTo(9000); } @Test public void initParametersAreConfiguredOnTheServletContext() { this.context = new AnnotationConfigEmbeddedWebApplicationContext(); EnvironmentTestUtils.addEnvironment(this.context, "server.servlet.context-parameters.a:alpha", "server.servlet.context-parameters.b:bravo"); this.context.register(BaseConfiguration.class); this.context.refresh(); ServletContext servletContext = this.context.getServletContext(); assertThat(servletContext.getInitParameter("a")).isEqualTo("alpha"); assertThat(servletContext.getInitParameter("b")).isEqualTo("bravo"); } private void verifyContext() { MockEmbeddedServletContainerFactory containerFactory = getContainerFactory(); Servlet servlet = this.context.getBean( DispatcherServletAutoConfiguration.DEFAULT_DISPATCHER_SERVLET_BEAN_NAME, Servlet.class); verify(containerFactory.getServletContext()).addServlet("dispatcherServlet", servlet); } private MockEmbeddedServletContainerFactory getContainerFactory() { return this.context.getBean(MockEmbeddedServletContainerFactory.class); } @Configuration @Import({ EmbeddedContainerConfiguration.class, EmbeddedServletContainerAutoConfiguration.class, DispatcherServletAutoConfiguration.class }) protected static class BaseConfiguration { } @Configuration @ConditionalOnExpression("true") public static class EmbeddedContainerConfiguration { @Bean public EmbeddedServletContainerFactory containerFactory() { return new MockEmbeddedServletContainerFactory(); } } @Configuration public static class DispatcherServletConfiguration { @Bean public DispatcherServlet dispatcherServlet() { return new DispatcherServlet(); } } @Configuration public static class SpringServletConfiguration { @Bean public DispatcherServlet springServlet() { return new DispatcherServlet(); } } @Configuration public static class NonSpringServletConfiguration { @Bean public FrameworkServlet dispatcherServlet() { return new FrameworkServlet() { @Override protected void doService(HttpServletRequest request, HttpServletResponse response) throws Exception { } }; } } @Configuration public static class NonServletConfiguration { @Bean public String dispatcherServlet() { return "foo"; } } @Configuration public static class DispatcherServletWithRegistrationConfiguration { @Bean(name = DispatcherServletAutoConfiguration.DEFAULT_DISPATCHER_SERVLET_BEAN_NAME) public DispatcherServlet dispatcherServlet() { return new DispatcherServlet(); } @Bean(name = DispatcherServletAutoConfiguration.DEFAULT_DISPATCHER_SERVLET_REGISTRATION_BEAN_NAME) public ServletRegistrationBean<DispatcherServlet> dispatcherRegistration() { return new ServletRegistrationBean<DispatcherServlet>(dispatcherServlet(), "/app/*"); } } @Component public static class EnsureContainerHasNoServletContext implements BeanPostProcessor { @Override public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException { if (bean instanceof ConfigurableEmbeddedServletContainer) { MockEmbeddedServletContainerFactory containerFactory = (MockEmbeddedServletContainerFactory) bean; assertThat(containerFactory.getServletContext()).isNull(); } return bean; } @Override public Object postProcessAfterInitialization(Object bean, String beanName) { return bean; } } @Component public static class CallbackEmbeddedContainerCustomizer implements EmbeddedServletContainerCustomizer { @Override public void customize(ConfigurableEmbeddedServletContainer container) { container.setPort(9000); } } }
/* * Licensed to Apereo under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Apereo licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at the following location: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.portlet.notice.controller; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.Resource; import javax.portlet.ActionRequest; import javax.portlet.ActionResponse; import javax.portlet.EventRequest; import javax.portlet.EventResponse; import javax.portlet.PortletPreferences; import javax.portlet.ResourceRequest; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jasig.portlet.notice.INotificationService; import org.jasig.portlet.notice.NotificationAction; import org.jasig.portlet.notice.NotificationAttribute; import org.jasig.portlet.notice.NotificationCategory; import org.jasig.portlet.notice.NotificationConstants; import org.jasig.portlet.notice.NotificationEntry; import org.jasig.portlet.notice.NotificationResponse; import org.jasig.portlet.notice.NotificationResult; import org.jasig.portlet.notice.util.NotificationResponseFlattener; import org.jasig.portlet.notice.util.UsernameFinder; import org.jasig.portlet.notice.util.sort.Sorting; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.portlet.ModelAndView; import org.springframework.web.portlet.bind.annotation.ActionMapping; import org.springframework.web.portlet.bind.annotation.EventMapping; import org.springframework.web.portlet.bind.annotation.ResourceMapping; /** * Gathering of notifications requires action and sometimes event phases. This * controller serves that purpose. */ @RequestMapping("VIEW") public class NotificationLifecycleController { public static final String DO_EVENTS_PREFERENCE = "NotificationLifecycleController.doEvents"; /** * Some deployments are experiencing issues whe the 'invokeNotificationService' handler sends a redirect because * they're using SSL implemented by a load balancer and the portal/portlet container doesn't realize the traffic is * over HTTPS. In these cases, the ActionURL results in an HTTP 302 redirect with a Location header that starts * with 'http://...' and (consequently) the JS in the browser fails, warning of an XSS violation. */ public static final String INVOKE_REDIRECT_PROTOCOL_PREFERENCE = "NotificationLifecycleController.invokeRedirectProtocol"; /** * See INVOKE_REDIRECT_PROTOCOL_PREFERENCE. If you need to specify an absolute protocol, you may need to specify * an absolute port as well. A blank value for this preference means use the default port for the specified * protocol. */ public static final String INVOKE_REDIRECT_PORT_PREFERENCE = "NotificationLifecycleController.invokeRedirectPort"; private static final String SUCCESS_PATH = "/scripts/success.json"; private final Log log = LogFactory.getLog(getClass()); @Autowired private UsernameFinder usernameFinder; @Autowired private NotificationResponseFlattener notificationResponseFlattener; @Resource(name="rootNotificationService") private INotificationService notificationService; @ResourceMapping("GET-NOTIFICATIONS-UNCATEGORIZED") public ModelAndView getNotificationsUncategorized(final ResourceRequest req, final @RequestParam(value="refresh", required=false) String doRefresh) throws IOException { // RequestParam("key") String key, HttpServletRequest request, ModelMap model log.debug("Invoking getNotifications for user: " + usernameFinder.findUsername(req)); // Get the notifications and any data retrieval errors final NotificationResponse notifications = notificationService.fetch(req); if (notifications == null) { String msg = "Notifications have not been loaded for user: " + usernameFinder.findUsername(req); throw new IllegalStateException(msg); } // Flatten the collection List<NotificationEntry> allEntries = notificationResponseFlattener.flatten(notifications); // Apply specified sorting (if any)... allEntries = Sorting.sort(req, allEntries); final Map<String,Object> model = new HashMap<>(); model.put("feed", allEntries); model.put("errors", notifications.getErrors()); return new ModelAndView("json", model); } @ActionMapping(params="action=invokeNotificationService") public void invokeNotificationService(final ActionRequest req, final ActionResponse res, @RequestParam(value="refresh", required=false) final String doRefresh) throws IOException { // Notification data services must have the invoke() method called, // whether we're using portlet events or not; additional features -- // including the refresh button -- rely on invoke(). notificationService.invoke(req, res, Boolean.parseBoolean(doRefresh)); final PortletPreferences prefs = req.getPreferences(); final boolean doEvents = Boolean.parseBoolean(prefs.getValue(DO_EVENTS_PREFERENCE, "false")); if (doEvents) { /* * TODO: I wish we didn't have to go through a whole render phase just * to trigger the events-based features of the portlet, but atm I don't * see a way around it, since.. * * - (1) You can only start an event chain in the Action phase; and * - (2) You can only return JSON in a Resource phase; and * - (3) An un-redirected Action phase leads to a Render phase, not a * Resource phase :( * * It would be awesome either (first choice) to do Action > Event > Resource, * or Action > sendRedirect() followed by a Resource request. * * As it stands, this implementation will trigger a complete render on * the portal needlessly. */ } else { // The real payload awaits a Render phase; send a token response to // avoid a full portlet request cycle (since we can). final String redirectUri = evaluateRedirectUri(req); res.sendRedirect(redirectUri); } } @ActionMapping public void invokeUserAction(final ActionRequest req, final ActionResponse res, @RequestParam("notificationId") final String notificationId, @RequestParam("actionId") final String actionId) throws IOException { // Prime the pump notificationService.invoke(req, res, false); // Obtain the collection final NotificationResponse notifications = notificationService.fetch(req); // Find the relevant action NotificationAction target = null; final NotificationEntry entry = notifications.findNotificationEntryById(notificationId); if (entry != null) { for (NotificationAction action : entry.getAvailableActions()) { if (actionId.equals(action.getId())) { target = action; break; } } } // We must have a target to proceed if (target != null) { target.invoke(req, res); // It's reasonable to assume we need to purge // caches for this user after invoking his action notificationService.invoke(req, res, true); } else { String msg = "Target action not found for notificationId='" + notificationId + "' and actionId='" + actionId + "'"; log.warn(msg); } } @EventMapping(NotificationConstants.NOTIFICATION_RESULT_QNAME_STRING) public void collectNotifications(final EventRequest req, final EventResponse res) { final PortletPreferences prefs = req.getPreferences(); final boolean doEvents = Boolean.parseBoolean(prefs.getValue(DO_EVENTS_PREFERENCE, "false")); if (!doEvents) { // Get out... return; } if (log.isDebugEnabled()) { log.debug("Processing event=" + NotificationConstants.NOTIFICATION_RESULT_QNAME_STRING +" for user='" + usernameFinder.findUsername(req) + "' and windowId=" + req.getWindowID()); } // Ignore results from other notification portlets final NotificationResult notificationResult = (NotificationResult) req.getEvent().getValue(); if (notificationResult != null && req.getWindowID().equals(notificationResult.getQueryWindowId())) { notificationService.collect(req, res); } } /* * Implementation */ private String evaluateRedirectUri(ActionRequest req) { // Default response -- specify a relative URI, allowing the protocol to be inferred String rslt = req.getContextPath() + SUCCESS_PATH; final PortletPreferences prefs = req.getPreferences(); final String protocol = prefs.getValue(INVOKE_REDIRECT_PROTOCOL_PREFERENCE, null); if (protocol != null) { // Specify an absolute URI. Apparently we need to insist on a protoco (usually HTTPS) String portPart = ""; // default final String port = prefs.getValue(INVOKE_REDIRECT_PORT_PREFERENCE, null); if (port != null) { portPart = ":" + port; } rslt = protocol.toLowerCase() + "://" + req.getServerName() // Server hostname + portPart // Server port (blank, with any luck) + rslt; // Remainder of the URI (as above) } return rslt; } }
/* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.common.event.tracking; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import co.aikar.timings.Timing; import com.flowpowered.math.vector.Vector3i; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import net.minecraft.block.Block; import net.minecraft.block.BlockEventData; import net.minecraft.block.BlockRedstoneLight; import net.minecraft.block.BlockRedstoneRepeater; import net.minecraft.block.BlockRedstoneTorch; import net.minecraft.block.state.IBlockState; import net.minecraft.entity.item.EntityItem; import net.minecraft.init.Blocks; import net.minecraft.util.ITickable; import net.minecraft.util.math.BlockPos; import net.minecraft.world.WorldProvider; import net.minecraft.world.WorldServer; import net.minecraft.world.chunk.Chunk; import org.apache.logging.log4j.Level; import org.spongepowered.api.Sponge; import org.spongepowered.api.block.BlockSnapshot; import org.spongepowered.api.block.BlockState; import org.spongepowered.api.block.tileentity.TileEntity; import org.spongepowered.api.data.Transaction; import org.spongepowered.api.entity.Entity; import org.spongepowered.api.entity.living.player.User; import org.spongepowered.api.event.CauseStackManager; import org.spongepowered.api.event.CauseStackManager.StackFrame; import org.spongepowered.api.event.SpongeEventFactory; import org.spongepowered.api.event.block.ChangeBlockEvent; import org.spongepowered.api.event.block.TickBlockEvent; import org.spongepowered.api.event.cause.EventContextKeys; import org.spongepowered.api.event.entity.SpawnEntityEvent; import org.spongepowered.api.event.item.inventory.DropItemEvent; import org.spongepowered.api.item.inventory.ItemStackSnapshot; import org.spongepowered.api.world.BlockChangeFlag; import org.spongepowered.api.world.LocatableBlock; import org.spongepowered.api.world.Location; import org.spongepowered.api.world.World; import org.spongepowered.asm.util.PrettyPrinter; import org.spongepowered.common.SpongeImpl; import org.spongepowered.common.block.SpongeBlockSnapshot; import org.spongepowered.common.entity.EntityUtil; import org.spongepowered.common.event.ShouldFire; import org.spongepowered.common.event.tracking.phase.block.BlockPhase; import org.spongepowered.common.event.tracking.phase.general.GeneralPhase; import org.spongepowered.common.event.tracking.phase.tick.DimensionContext; import org.spongepowered.common.event.tracking.phase.tick.EntityTickContext; import org.spongepowered.common.event.tracking.phase.tick.TickPhase; import org.spongepowered.common.interfaces.IMixinChunk; import org.spongepowered.common.interfaces.block.IMixinBlock; import org.spongepowered.common.interfaces.block.IMixinBlockEventData; import org.spongepowered.common.interfaces.block.tile.IMixinTileEntity; import org.spongepowered.common.interfaces.entity.IMixinEntity; import org.spongepowered.common.interfaces.world.IMixinLocation; import org.spongepowered.common.interfaces.world.IMixinWorldServer; import org.spongepowered.common.item.inventory.util.ItemStackUtil; import org.spongepowered.common.mixin.plugin.blockcapturing.IModData_BlockCapturing; import org.spongepowered.common.registry.type.event.InternalSpawnTypes; import org.spongepowered.common.util.SpongeHooks; import org.spongepowered.common.world.BlockChange; import org.spongepowered.common.world.SpongeProxyBlockAccess; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Random; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; /** * A simple utility for aiding in tracking, either with resolving notifiers * and owners, or proxying out the logic for ticking a block, entity, etc. */ @SuppressWarnings("unchecked") public final class TrackingUtil { public static final int BREAK_BLOCK_INDEX = 0; public static final int PLACE_BLOCK_INDEX = 1; public static final int DECAY_BLOCK_INDEX = 2; public static final int CHANGE_BLOCK_INDEX = 3; public static final int MULTI_CHANGE_INDEX = 4; public static final Function<ImmutableList.Builder<Transaction<BlockSnapshot>>[], Consumer<Transaction<BlockSnapshot>>> TRANSACTION_PROCESSOR = builders -> transaction -> { final BlockChange blockChange = ((SpongeBlockSnapshot) transaction.getOriginal()).blockChange; builders[blockChange.ordinal()].add(transaction); builders[MULTI_CHANGE_INDEX].add(transaction); } ; public static final int EVENT_COUNT = 5; public static final Function<BlockSnapshot, Transaction<BlockSnapshot>> TRANSACTION_CREATION = (blockSnapshot) -> { final Location<World> originalLocation = blockSnapshot.getLocation().get(); final WorldServer worldServer = (WorldServer) originalLocation.getExtent(); final BlockPos blockPos = ((IMixinLocation) (Object) originalLocation).getBlockPos(); final IBlockState newState = worldServer.getBlockState(blockPos); final IBlockState newActualState = newState.getActualState(worldServer, blockPos); final BlockSnapshot newSnapshot = ((IMixinWorldServer) worldServer).createSpongeBlockSnapshot(newState, newActualState, blockPos, 0); return new Transaction<>(blockSnapshot, newSnapshot); }; public static void tickEntity(net.minecraft.entity.Entity entityIn) { checkArgument(entityIn instanceof Entity, "Entity %s is not an instance of SpongeAPI's Entity!", entityIn); checkNotNull(entityIn, "Cannot capture on a null ticking entity!"); final IMixinEntity mixinEntity = EntityUtil.toMixin(entityIn); if (!mixinEntity.shouldTick()) { return; } try (final StackFrame frame = Sponge.getCauseStackManager().pushCauseFrame(); final EntityTickContext context = TickPhase.Tick.ENTITY.createPhaseContext() .source(entityIn); final Timing entityTiming = mixinEntity.getTimingsHandler().startTiming() ) { Sponge.getCauseStackManager().pushCause(entityIn); mixinEntity.getNotifierUser() .ifPresent(notifier -> { Sponge.getCauseStackManager().addContext(EventContextKeys.NOTIFIER, notifier); context.notifier(notifier); }); mixinEntity.getCreatorUser() .ifPresent(notifier -> { Sponge.getCauseStackManager().addContext(EventContextKeys.OWNER, notifier); context.owner(notifier); }); context.buildAndSwitch(); entityIn.onUpdate(); } } public static void tickRidingEntity(net.minecraft.entity.Entity entity) { checkArgument(entity instanceof Entity, "Entity %s is not an instance of SpongeAPI's Entity!", entity); checkNotNull(entity, "Cannot capture on a null ticking entity!"); final IMixinEntity mixinEntity = EntityUtil.toMixin(entity); if (!mixinEntity.shouldTick()) { return; } final Optional<User> notifierUser = mixinEntity.getNotifierUser(); final Optional<User> creatorUser = mixinEntity.getCreatorUser(); try (final StackFrame frame = Sponge.getCauseStackManager().pushCauseFrame(); final EntityTickContext context = TickPhase.Tick.ENTITY.createPhaseContext() .source(entity) .notifier(() -> notifierUser) .owner(() -> creatorUser) .buildAndSwitch(); final Timing entityTiming = mixinEntity.getTimingsHandler().startTiming() ) { Sponge.getCauseStackManager().pushCause(entity); notifierUser .ifPresent(notifier -> frame.addContext(EventContextKeys.NOTIFIER, notifier)); creatorUser .ifPresent(notifier -> frame.addContext(EventContextKeys.OWNER, notifier)); entity.updateRidden(); } } @SuppressWarnings({"unused", "try"}) public static void tickTileEntity(IMixinWorldServer mixinWorldServer, ITickable tile) { checkArgument(tile instanceof TileEntity, "ITickable %s is not a TileEntity!", tile); checkNotNull(tile, "Cannot capture on a null ticking tile entity!"); final net.minecraft.tileentity.TileEntity tileEntity = (net.minecraft.tileentity.TileEntity) tile; final IMixinTileEntity mixinTileEntity = (IMixinTileEntity) tile; final BlockPos pos = tileEntity.getPos(); final IMixinChunk chunk = ((IMixinTileEntity) tile).getActiveChunk(); if (!mixinTileEntity.shouldTick()) { return; } try (final StackFrame frame = Sponge.getCauseStackManager().pushCauseFrame(); final PhaseContext<?> phaseContext = TickPhase.Tick.TILE_ENTITY.createPhaseContext() .source(tile)) { Sponge.getCauseStackManager().pushCause(tile); // Add notifier and owner so we don't have to perform lookups during the phases and other processing chunk.getBlockNotifier(pos) .ifPresent(notifier -> { Sponge.getCauseStackManager().addContext(EventContextKeys.NOTIFIER, notifier); phaseContext.notifier(notifier); }); User blockOwner = mixinTileEntity.getSpongeOwner(); if (!mixinTileEntity.hasSetOwner()) { blockOwner = chunk.getBlockOwner(pos).orElse(null); mixinTileEntity.setSpongeOwner(blockOwner); } if (blockOwner != null) { Sponge.getCauseStackManager().addContext(EventContextKeys.OWNER, blockOwner); phaseContext.owner(blockOwner); } phaseContext.owner = blockOwner; // Add the block snapshot of the tile entity for caches to avoid creating multiple snapshots during processing // This is a lazy evaluating snapshot to avoid the overhead of snapshot creation // Finally, switch the context now that we have the owner and notifier phaseContext.buildAndSwitch(); try (Timing timing = mixinTileEntity.getTimingsHandler().startTiming()) { tile.update(); } } catch (Exception e) { PhaseTracker.getInstance().printExceptionFromPhase(e); } } public static void updateTickBlock(IMixinWorldServer mixinWorld, Block block, BlockPos pos, IBlockState state, Random random) { final WorldServer minecraftWorld = mixinWorld.asMinecraftWorld(); try (CauseStackManager.StackFrame frame = Sponge.getCauseStackManager().pushCauseFrame()) { Sponge.getCauseStackManager().pushCause(minecraftWorld); if (ShouldFire.TICK_BLOCK_EVENT) { BlockSnapshot snapshot = mixinWorld.createSpongeBlockSnapshot(state, state, pos, 0); final TickBlockEvent event = SpongeEventFactory.createTickBlockEventScheduled(Sponge.getCauseStackManager().getCurrentCause(), snapshot); SpongeImpl.postEvent(event); if(event.isCancelled()) { return; } } final LocatableBlock locatable = LocatableBlock.builder() .location(new Location<>(mixinWorld.asSpongeWorld(), pos.getX(), pos.getY(), pos.getZ())) .state((BlockState) state) .build(); Sponge.getCauseStackManager().pushCause(locatable); IPhaseState<?> phase = ((IMixinBlock) block).requiresBlockCapture() ? TickPhase.Tick.BLOCK : TickPhase.Tick.NO_CAPTURE_BLOCK; final PhaseContext<?> phaseContext = phase.createPhaseContext() .source(locatable); checkAndAssignBlockTickConfig(block, minecraftWorld, phaseContext); final PhaseTracker phaseTracker = PhaseTracker.getInstance(); // We have to associate any notifiers in case of scheduled block updates from other sources final PhaseData current = phaseTracker.getCurrentPhaseData(); final IPhaseState<?> currentState = current.state; currentState.getPhase().appendNotifierPreBlockTick(mixinWorld, pos, currentState, current.context, phaseContext); // Now actually switch to the new phase try (PhaseContext<?> context = phaseContext.buildAndSwitch()) { block.updateTick(minecraftWorld, pos, state, random); } catch (Exception | NoClassDefFoundError e) { phaseTracker.printExceptionFromPhase(e); } } } public static void randomTickBlock(PhaseTracker phaseTracker, IMixinWorldServer mixinWorld, Block block, BlockPos pos, IBlockState state, Random random) { final WorldServer minecraftWorld = mixinWorld.asMinecraftWorld(); try (StackFrame frame = Sponge.getCauseStackManager().pushCauseFrame()) { Sponge.getCauseStackManager().pushCause(minecraftWorld); if (ShouldFire.TICK_BLOCK_EVENT) { final BlockSnapshot currentTickBlock = mixinWorld.createSpongeBlockSnapshot(state, state, pos, 0); final TickBlockEvent event = SpongeEventFactory.createTickBlockEventRandom(Sponge.getCauseStackManager().getCurrentCause(), currentTickBlock); SpongeImpl.postEvent(event); if(event.isCancelled()) { return; } } final LocatableBlock locatable = LocatableBlock.builder() .location(new Location<>(mixinWorld.asSpongeWorld(), pos.getX(), pos.getY(), pos.getZ())) .state((BlockState) state) .build(); Sponge.getCauseStackManager().pushCause(locatable); IPhaseState<?> phase = ((IMixinBlock) block).requiresBlockCapture() ? TickPhase.Tick.RANDOM_BLOCK : TickPhase.Tick.NO_CAPTURE_BLOCK; final PhaseContext<?> phaseContext = phase.createPhaseContext() .source(locatable); checkAndAssignBlockTickConfig(block, minecraftWorld, phaseContext); // We have to associate any notifiers in case of scheduled block updates from other sources final PhaseData current = phaseTracker.getCurrentPhaseData(); final IPhaseState<?> currentState = current.state; currentState.getPhase().appendNotifierPreBlockTick(mixinWorld, pos, currentState, current.context, phaseContext); // Now actually switch to the new phase try (PhaseContext<?> context = phaseContext.buildAndSwitch()) { block.randomTick(minecraftWorld, pos, state, random); } } } private static void checkAndAssignBlockTickConfig(Block block, WorldServer minecraftWorld, PhaseContext<?> phaseContext) { if (block instanceof IModData_BlockCapturing) { IModData_BlockCapturing capturingBlock = (IModData_BlockCapturing) block; if (capturingBlock.requiresBlockCapturingRefresh()) { capturingBlock.initializeBlockCapturingState(minecraftWorld); capturingBlock.requiresBlockCapturingRefresh(false); } } } public static void tickWorldProvider(IMixinWorldServer worldServer) { final WorldProvider worldProvider = ((WorldServer) worldServer).provider; try (DimensionContext context = TickPhase.Tick.DIMENSION.createPhaseContext().source(worldProvider).buildAndSwitch()) { worldProvider.onWorldUpdateEntities(); } } public static boolean fireMinecraftBlockEvent(WorldServer worldIn, BlockEventData event) { IBlockState currentState = worldIn.getBlockState(event.getPosition()); final IMixinBlockEventData blockEvent = (IMixinBlockEventData) event; IPhaseState<?> phase = blockEvent.getCaptureBlocks() ? TickPhase.Tick.BLOCK_EVENT : TickPhase.Tick.NO_CAPTURE_BLOCK; final PhaseContext<?> phaseContext = phase.createPhaseContext(); Object source = blockEvent.getTickBlock() != null ? blockEvent.getTickBlock() : blockEvent.getTickTileEntity(); if (source != null) { phaseContext.source(source); } else { // No source present which means we are ignoring the phase state boolean result = currentState.onBlockEventReceived(worldIn, event.getPosition(), event.getEventID(), event.getEventParameter()); return result; } if (blockEvent.getSourceUser() != null) { phaseContext.notifier(blockEvent.getSourceUser()); } try (PhaseContext<?> o = phaseContext.buildAndSwitch()) { return currentState.onBlockEventReceived(worldIn, event.getPosition(), event.getEventID(), event.getEventParameter()); } } @SuppressWarnings("rawtypes") static boolean trackBlockChange(PhaseTracker phaseTracker, IMixinWorldServer mixinWorld, Chunk chunk, IBlockState currentState, IBlockState newState, BlockPos pos, int flags, PhaseContext<?> phaseContext, IPhaseState<?> phaseState) { final SpongeBlockSnapshot originalBlockSnapshot; final WorldServer minecraftWorld = mixinWorld.asMinecraftWorld(); if (((IPhaseState) phaseState).shouldCaptureBlockChangeOrSkip(phaseContext, pos)) { //final IBlockState actualState = currentState.getActualState(minecraftWorld, pos); originalBlockSnapshot = mixinWorld.createSpongeBlockSnapshot(currentState, currentState, pos, flags); final List<BlockSnapshot> capturedSnapshots = phaseContext.getCapturedBlocks(); final Block newBlock = newState.getBlock(); associateBlockChangeWithSnapshot(phaseState, newBlock, currentState, originalBlockSnapshot, capturedSnapshots); final IMixinChunk mixinChunk = (IMixinChunk) chunk; final IBlockState originalBlockState = mixinChunk.setBlockState(pos, newState, currentState, originalBlockSnapshot); if (originalBlockState == null) { capturedSnapshots.remove(originalBlockSnapshot); return false; } ((IPhaseState) phaseState).postTrackBlock(originalBlockSnapshot, phaseTracker, phaseContext); } else { originalBlockSnapshot = (SpongeBlockSnapshot) BlockSnapshot.NONE; final IMixinChunk mixinChunk = (IMixinChunk) chunk; final IBlockState originalBlockState = mixinChunk.setBlockState(pos, newState, currentState, originalBlockSnapshot); if (originalBlockState == null) { return false; } } if (newState.getLightOpacity() != currentState.getLightOpacity() || newState.getLightValue() != currentState.getLightValue()) { minecraftWorld.profiler.startSection("checkLight"); minecraftWorld.checkLight(pos); minecraftWorld.profiler.endSection(); } return true; } private static void associateBlockChangeWithSnapshot(IPhaseState<?> phaseState, Block newBlock, IBlockState currentState, SpongeBlockSnapshot snapshot, List<BlockSnapshot> capturedSnapshots) { Block originalBlock = currentState.getBlock(); if (phaseState == BlockPhase.State.BLOCK_DECAY) { if (newBlock == Blocks.AIR) { snapshot.blockChange = BlockChange.DECAY; capturedSnapshots.add(snapshot); } } else if (newBlock == Blocks.AIR) { snapshot.blockChange = BlockChange.BREAK; capturedSnapshots.add(snapshot); } else if (newBlock != originalBlock && !forceModify(originalBlock, newBlock)) { snapshot.blockChange = BlockChange.PLACE; capturedSnapshots.add(snapshot); } else { snapshot.blockChange = BlockChange.MODIFY; capturedSnapshots.add(snapshot); } } private static boolean forceModify(Block originalBlock, Block newBlock) { if (originalBlock instanceof BlockRedstoneRepeater && newBlock instanceof BlockRedstoneRepeater) { return true; } if (originalBlock instanceof BlockRedstoneTorch && newBlock instanceof BlockRedstoneTorch) { return true; } if (originalBlock instanceof BlockRedstoneLight && newBlock instanceof BlockRedstoneLight) { return true; } return false; } private TrackingUtil() { } public static User getNotifierOrOwnerFromBlock(Location<World> location) { final BlockPos blockPos = ((IMixinLocation) (Object) location).getBlockPos(); return getNotifierOrOwnerFromBlock((WorldServer) location.getExtent(), blockPos); } public static User getNotifierOrOwnerFromBlock(WorldServer world, BlockPos blockPos) { final IMixinChunk mixinChunk = (IMixinChunk) world.getChunkFromBlockCoords(blockPos); User notifier = mixinChunk.getBlockNotifier(blockPos).orElse(null); if (notifier != null) { return notifier; } User owner = mixinChunk.getBlockOwner(blockPos).orElse(null); return owner; } public static Supplier<IllegalStateException> throwWithContext(String s, PhaseContext<?> phaseContext) { return () -> { final PrettyPrinter printer = new PrettyPrinter(60); printer.add("Exception trying to process over a phase!").centre().hr(); printer.addWrapped(40, "%s :", "PhaseContext"); PhaseTracker.CONTEXT_PRINTER.accept(printer, phaseContext); printer.add("Stacktrace:"); final IllegalStateException exception = new IllegalStateException(s + " Please analyze the current phase context. "); printer.add(exception); printer.trace(System.err, SpongeImpl.getLogger(), Level.ERROR); return exception; }; } /** * Processes the given list of {@link BlockSnapshot}s and creates and throws and processes * the {@link ChangeBlockEvent}s as appropriately determined based on the {@link BlockChange} * for each snapshot. If any transactions are invalid or events cancelled, this event * returns {@code false} to signify a transaction was cancelled. This return value * is used for portal creation. * * @param snapshots The snapshots to process * @param state The phase state that is being processed, used to handle marking notifiers * and block owners * @param context The phase context, only used by the phase for handling processes. * @return True if no events or transactions were cancelled */ @SuppressWarnings({"unchecked"}) public static boolean processBlockCaptures(List<BlockSnapshot> snapshots, IPhaseState<?> state, PhaseContext<?> context) { if (snapshots.isEmpty()) { return false; } ImmutableList<Transaction<BlockSnapshot>>[] transactionArrays = new ImmutableList[EVENT_COUNT]; ImmutableList.Builder<Transaction<BlockSnapshot>>[] transactionBuilders = new ImmutableList.Builder[EVENT_COUNT]; for (int i = 0; i < EVENT_COUNT; i++) { transactionBuilders[i] = new ImmutableList.Builder<>(); } final List<ChangeBlockEvent> blockEvents = new ArrayList<>(); for (BlockSnapshot snapshot : snapshots) { // This processes each snapshot to assign them to the correct event in the next area, with the // correct builder array entry. TRANSACTION_PROCESSOR.apply(transactionBuilders).accept(TRANSACTION_CREATION.apply(snapshot)); } for (int i = 0; i < EVENT_COUNT; i++) { // Build each event array transactionArrays[i] = transactionBuilders[i].build(); } // Clear captured snapshots after processing them context.getCapturedBlocksOrEmptyList().clear(); final ChangeBlockEvent[] mainEvents = new ChangeBlockEvent[BlockChange.values().length]; // This likely needs to delegate to the phase in the event we don't use the source object as the main object causing the block changes // case in point for WorldTick event listeners since the players are captured non-deterministically try (CauseStackManager.StackFrame frame = Sponge.getCauseStackManager().pushCauseFrame()) { if(context.getNotifier().isPresent()) { Sponge.getCauseStackManager().addContext(EventContextKeys.NOTIFIER, context.getNotifier().get()); } if(context.getOwner().isPresent()) { Sponge.getCauseStackManager().addContext(EventContextKeys.OWNER, context.getOwner().get()); } try { state.associateAdditionalCauses(state, context); } catch (Exception e) { // TODO - this should be a thing to associate additional objects in the cause, or context, but for now it's just a simple // try catch to avoid bombing on performing block changes. } // Creates the block events accordingly to the transaction arrays iterateChangeBlockEvents(transactionArrays, blockEvents, mainEvents); // Needs to throw events // We create the post event and of course post it in the method, regardless whether any transactions are invalidated or not final ChangeBlockEvent.Post postEvent = throwMultiEventsAndCreatePost(transactionArrays, blockEvents, mainEvents); if (postEvent == null) { // Means that we have had no actual block changes apparently? return false; } final List<Transaction<BlockSnapshot>> invalid = new ArrayList<>(); boolean noCancelledTransactions = true; // Iterate through the block events to mark any transactions as invalid to accumilate after (since the post event contains all // transactions of the preceeding block events) for (ChangeBlockEvent blockEvent : blockEvents) { // Need to only check if the event is cancelled, If it is, restore if (blockEvent.isCancelled()) { noCancelledTransactions = false; // Don't restore the transactions just yet, since we're just marking them as invalid for now for (Transaction<BlockSnapshot> transaction : Lists.reverse(blockEvent.getTransactions())) { transaction.setValid(false); } } } // Finally check the post event if (postEvent.isCancelled()) { // Of course, if post is cancelled, just mark all transactions as invalid. noCancelledTransactions = false; for (Transaction<BlockSnapshot> transaction : postEvent.getTransactions()) { transaction.setValid(false); } } // Now we can gather the invalid transactions that either were marked as invalid from an event listener - OR - cancelled. // Because after, we will restore all the invalid transactions in reverse order. for (Transaction<BlockSnapshot> transaction : postEvent.getTransactions()) { if (!transaction.isValid()) { invalid.add(transaction); // Cancel any block drops performed, avoids any item drops, regardless final Location<World> location = transaction.getOriginal().getLocation().orElse(null); if (location != null) { final BlockPos pos = ((IMixinLocation) (Object) location).getBlockPos(); context.getBlockItemDropSupplier().removeAllIfNotEmpty(pos); context.getBlockEntitySpawnSupplier().removeAllIfNotEmpty(pos); context.getBlockEntitySpawnSupplier().removeAllIfNotEmpty(pos); } } } if (!invalid.isEmpty()) { // We need to set this value and return it to signify that some transactions were cancelled noCancelledTransactions = false; // NOW we restore the invalid transactions (remember invalid transactions are from either plugins marking them as invalid // or the events were cancelled), again in reverse order of which they were received. for (Transaction<BlockSnapshot> transaction : Lists.reverse(invalid)) { transaction.getOriginal().restore(true, BlockChangeFlag.NONE); if (state.tracksBlockSpecificDrops()) { // Cancel any block drops or harvests for the block change. // This prevents unnecessary spawns. final Location<World> location = transaction.getOriginal().getLocation().orElse(null); if (location != null) { final BlockPos pos = ((IMixinLocation) (Object) location).getBlockPos(); context.getBlockDropSupplier().removeAllIfNotEmpty(pos); } } } } return performBlockAdditions(postEvent.getTransactions(), state, context, noCancelledTransactions); } } public static void iterateChangeBlockEvents(ImmutableList<Transaction<BlockSnapshot>>[] transactionArrays, List<ChangeBlockEvent> blockEvents, ChangeBlockEvent[] mainEvents) { for (BlockChange blockChange : BlockChange.values()) { if (blockChange == BlockChange.DECAY) { // Decay takes place after. continue; } if (!transactionArrays[blockChange.ordinal()].isEmpty()) { final ChangeBlockEvent event = blockChange.createEvent(Sponge.getCauseStackManager().getCurrentCause(), transactionArrays[blockChange.ordinal()]); mainEvents[blockChange.ordinal()] = event; if (event != null) { SpongeImpl.postEvent(event); blockEvents.add(event); } } } if (!transactionArrays[BlockChange.DECAY.ordinal()].isEmpty()) { // Needs to be placed into iterateChangeBlockEvents final ChangeBlockEvent event = BlockChange.DECAY.createEvent(Sponge.getCauseStackManager().getCurrentCause(), transactionArrays[BlockChange.DECAY.ordinal()]); mainEvents[BlockChange.DECAY.ordinal()] = event; if (event != null) { SpongeImpl.postEvent(event); blockEvents.add(event); } } } @SuppressWarnings("rawtypes") public static boolean performBlockAdditions(List<Transaction<BlockSnapshot>> transactions, IPhaseState<?> phaseState, PhaseContext<?> phaseContext, boolean noCancelledTransactions) { // We have to use a proxy so that our pending changes are notified such that any accessors from block // classes do not fail on getting the incorrect block state from the IBlockAccess final SpongeProxyBlockAccess proxyBlockAccess = new SpongeProxyBlockAccess(transactions); final CapturedMultiMapSupplier<BlockPos, ItemDropData> capturedBlockDrops = phaseContext.getBlockDropSupplier(); final CapturedMultiMapSupplier<BlockPos, EntityItem> capturedBlockItemEntityDrops = phaseContext.getBlockItemDropSupplier(); final CapturedMultiMapSupplier<BlockPos, net.minecraft.entity.Entity> capturedBlockEntitySpawns = phaseContext.getBlockEntitySpawnSupplier(); for (Transaction<BlockSnapshot> transaction : transactions) { if (!transaction.isValid()) { // Rememver that this value needs to be set to false to return because of the fact that // a transaction was marked as invalid or cancelled. This is used primarily for // things like portal creation, and if false, removes the portal from the cache noCancelledTransactions = false; continue; // Don't use invalidated block transactions during notifications, these only need to be restored } // Handle custom replacements if (transaction.getCustom().isPresent()) { transaction.getFinal().restore(true, BlockChangeFlag.NONE); } final SpongeBlockSnapshot oldBlockSnapshot = (SpongeBlockSnapshot) transaction.getOriginal(); final SpongeBlockSnapshot newBlockSnapshot = (SpongeBlockSnapshot) transaction.getFinal(); final Location<World> worldLocation = oldBlockSnapshot.getLocation().get(); final IMixinWorldServer mixinWorldServer = (IMixinWorldServer) worldLocation.getExtent(); // Handle item drops captured final BlockPos pos = ((IMixinLocation) (Object) oldBlockSnapshot.getLocation().get()).getBlockPos(); // This is for pre-merged items capturedBlockDrops.acceptAndRemoveIfPresent(pos, items -> spawnItemDataForBlockDrops(items, newBlockSnapshot, phaseContext, phaseState)); // And this is for un-pre-merged items, these will be EntityItems, not ItemDropDatas. capturedBlockItemEntityDrops.acceptAndRemoveIfPresent(pos, items -> spawnItemEntitiesForBlockDrops(items, newBlockSnapshot, phaseContext, phaseState)); // This is for entities actually spawned capturedBlockEntitySpawns.acceptAndRemoveIfPresent(pos, items -> spawnEntitiesForBlock(items, newBlockSnapshot, phaseContext, phaseState)); SpongeHooks.logBlockAction(mixinWorldServer.asMinecraftWorld(), oldBlockSnapshot.blockChange, transaction); final BlockChangeFlag changeFlag = oldBlockSnapshot.getChangeFlag(); final IBlockState originalState = (IBlockState) oldBlockSnapshot.getState(); final IBlockState newState = (IBlockState) newBlockSnapshot.getState(); // We call onBlockAdded here for both TE blocks (BlockContainer's) and other blocks. // MixinChunk#setBlockState will only call onBlockAdded for BlockContainers when it's passed a null newBlockSnapshot, // which only happens when capturing is not being done. final PhaseTracker phaseTracker = PhaseTracker.getInstance(); if (changeFlag.performBlockPhysics() && originalState.getBlock() != newState.getBlock()) { newState.getBlock().onBlockAdded(mixinWorldServer.asMinecraftWorld(), pos, newState); final PhaseData peek = phaseTracker.getCurrentPhaseData(); if (peek.state == GeneralPhase.Post.UNWINDING) { ((IPhaseState) peek.state).unwind(peek.context); } } proxyBlockAccess.proceed(); ((IPhaseState) phaseState).handleBlockChangeWithUser(oldBlockSnapshot.blockChange, transaction, phaseContext); final int minecraftChangeFlag = oldBlockSnapshot.getUpdateFlag(); if (((minecraftChangeFlag & 2) != 0)) { // Always try to notify clients of the change. mixinWorldServer.asMinecraftWorld().notifyBlockUpdate(pos, originalState, newState, minecraftChangeFlag); } if (changeFlag.updateNeighbors()) { // Notify neighbors only if the change flag allowed it. mixinWorldServer.spongeNotifyNeighborsPostBlockChange(pos, originalState, newState, oldBlockSnapshot.getUpdateFlag()); } else if ((minecraftChangeFlag & 16) == 0) { mixinWorldServer.asMinecraftWorld().updateObservingBlocksAt(pos, newState.getBlock()); } final PhaseData peek = phaseTracker.getCurrentPhaseData(); if (peek.state == GeneralPhase.Post.UNWINDING) { ((IPhaseState) peek.state).unwind(peek.context); } } return noCancelledTransactions; } public static void spawnItemEntitiesForBlockDrops(Collection<EntityItem> entityItems, SpongeBlockSnapshot newBlockSnapshot, PhaseContext<?> phaseContext, IPhaseState<?> phaseState) { // Now we can spawn the entity items appropriately final List<Entity> itemDrops = entityItems.stream() .map(EntityUtil::fromNative) .collect(Collectors.toList()); try (CauseStackManager.StackFrame frame = Sponge.getCauseStackManager().pushCauseFrame()) { Sponge.getCauseStackManager().pushCause(newBlockSnapshot); Sponge.getCauseStackManager().addContext(EventContextKeys.SPAWN_TYPE, InternalSpawnTypes.DROPPED_ITEM); final Optional<User> owner = phaseContext.getOwner(); final Optional<User> notifier = phaseContext.getNotifier(); if (notifier.isPresent()) { Sponge.getCauseStackManager().addContext(EventContextKeys.NOTIFIER, notifier.get()); } final User entityCreator = notifier.orElseGet(() -> owner.orElse(null)); final DropItemEvent.Destruct destruct = SpongeEventFactory.createDropItemEventDestruct(Sponge.getCauseStackManager().getCurrentCause(), itemDrops); SpongeImpl.postEvent(destruct); if (!destruct.isCancelled()) { for (Entity entity : destruct.getEntities()) { if (entityCreator != null) { EntityUtil.toMixin(entity).setCreator(entityCreator.getUniqueId()); } EntityUtil.getMixinWorld(entity).forceSpawnEntity(entity); } } } } public static void spawnItemDataForBlockDrops(Collection<ItemDropData> itemStacks, SpongeBlockSnapshot oldBlockSnapshot, PhaseContext<?> phaseContext, IPhaseState<?> state) { final Vector3i position = oldBlockSnapshot.getPosition(); final List<ItemStackSnapshot> itemSnapshots = itemStacks.stream() .map(ItemDropData::getStack) .map(ItemStackUtil::snapshotOf) .collect(Collectors.toList()); final ImmutableList<ItemStackSnapshot> originalSnapshots = ImmutableList.copyOf(itemSnapshots); Sponge.getCauseStackManager().pushCause(oldBlockSnapshot); final DropItemEvent.Pre dropItemEventPre = SpongeEventFactory.createDropItemEventPre(Sponge.getCauseStackManager().getCurrentCause(), originalSnapshots, itemSnapshots); Sponge.getCauseStackManager().popCause(); SpongeImpl.postEvent(dropItemEventPre); if (dropItemEventPre.isCancelled()) { itemStacks.clear(); } if (itemStacks.isEmpty()) { return; } final World world = oldBlockSnapshot.getLocation().get().getExtent(); final WorldServer worldServer = (WorldServer) world; // Now we can spawn the entity items appropriately final List<Entity> itemDrops = itemStacks.stream().map(itemStack -> { final net.minecraft.item.ItemStack minecraftStack = itemStack.getStack(); float f = 0.5F; double offsetX = worldServer.rand.nextFloat() * f + (1.0F - f) * 0.5D; double offsetY = worldServer.rand.nextFloat() * f + (1.0F - f) * 0.5D; double offsetZ = worldServer.rand.nextFloat() * f + (1.0F - f) * 0.5D; final double x = position.getX() + offsetX; final double y = position.getY() + offsetY; final double z = position.getZ() + offsetZ; EntityItem entityitem = new EntityItem(worldServer, x, y, z, minecraftStack); entityitem.setDefaultPickupDelay(); return entityitem; }) .map(EntityUtil::fromNative) .collect(Collectors.toList()); try (CauseStackManager.StackFrame frame = Sponge.getCauseStackManager().pushCauseFrame()) { Sponge.getCauseStackManager().pushCause(oldBlockSnapshot); Sponge.getCauseStackManager().addContext(EventContextKeys.SPAWN_TYPE, InternalSpawnTypes.DROPPED_ITEM); if(phaseContext.getNotifier().isPresent()) { Sponge.getCauseStackManager().addContext(EventContextKeys.NOTIFIER, phaseContext.getNotifier().get()); } final User entityCreator = phaseContext.getNotifier().orElseGet(() -> phaseContext.getOwner().orElse(null)); final DropItemEvent.Destruct destruct = SpongeEventFactory.createDropItemEventDestruct(Sponge.getCauseStackManager().getCurrentCause(), itemDrops); SpongeImpl.postEvent(destruct); if (!destruct.isCancelled()) { for (Entity entity : destruct.getEntities()) { if (entityCreator != null) { EntityUtil.toMixin(entity).setCreator(entityCreator.getUniqueId()); } EntityUtil.getMixinWorld(entity).forceSpawnEntity(entity); } } } } public static void spawnEntitiesForBlock(Collection<net.minecraft.entity.Entity> entities, SpongeBlockSnapshot newBlockSnapshot, PhaseContext<?> phaseContext, IPhaseState<?> phaseState) { // Now we can spawn the entity items appropriately final List<Entity> entitiesSpawned = entities.stream() .map(EntityUtil::fromNative) .collect(Collectors.toList()); final Optional<User> owner = phaseContext.getOwner(); final Optional<User> notifier = phaseContext.getNotifier(); final User entityCreator = notifier.orElseGet(() -> owner.orElse(null)); final SpawnEntityEvent destruct = SpongeEventFactory.createSpawnEntityEvent(Sponge.getCauseStackManager().getCurrentCause(), entitiesSpawned); SpongeImpl.postEvent(destruct); if (!destruct.isCancelled()) { for (Entity entity : destruct.getEntities()) { if (entityCreator != null) { EntityUtil.toMixin(entity).setCreator(entityCreator.getUniqueId()); } EntityUtil.getMixinWorld(entity).forceSpawnEntity(entity); } } } public static ChangeBlockEvent.Post throwMultiEventsAndCreatePost(ImmutableList<Transaction<BlockSnapshot>>[] transactionArrays, List<ChangeBlockEvent> blockEvents, ChangeBlockEvent[] mainEvents) { if (!blockEvents.isEmpty()) { try (StackFrame frame = Sponge.getCauseStackManager().pushCauseFrame()) { for (BlockChange blockChange : BlockChange.values()) { final ChangeBlockEvent mainEvent = mainEvents[blockChange.ordinal()]; if (mainEvent != null) { Sponge.getCauseStackManager().pushCause(mainEvent); } } final ImmutableList<Transaction<BlockSnapshot>> transactions = transactionArrays[MULTI_CHANGE_INDEX]; final ChangeBlockEvent.Post post = SpongeEventFactory.createChangeBlockEventPost(Sponge.getCauseStackManager().getCurrentCause(), transactions); SpongeImpl.postEvent(post); return post; } } return null; } public static void splitAndSpawnEntities(List<Entity> entities) { splitAndSpawnEntities(entities, (entity) -> {}); } public static void splitAndSpawnEntities(List<Entity> entities, Consumer<IMixinEntity> mixinEntityConsumer) { if (entities.size() > 1) { final HashMultimap<World, Entity> entityListMap = HashMultimap.create(); for (Entity entity : entities) { entityListMap.put(entity.getWorld(), entity); } for (Map.Entry<World, Collection<Entity>> entry : entityListMap.asMap().entrySet()) { final World world = entry.getKey(); final ArrayList<Entity> worldEntities = new ArrayList<>(entry.getValue()); final SpawnEntityEvent event = SpongeEventFactory.createSpawnEntityEvent(Sponge.getCauseStackManager().getCurrentCause(), worldEntities); SpongeImpl.postEvent(event); if (!event.isCancelled()) { for (Entity entity : event.getEntities()) { mixinEntityConsumer.accept(EntityUtil.toMixin(entity)); ((IMixinWorldServer) world).forceSpawnEntity(entity); } } } return; } final Entity singleEntity = entities.get(0); final World world = singleEntity.getWorld(); final SpawnEntityEvent event = SpongeEventFactory.createSpawnEntityEvent(Sponge.getCauseStackManager().getCurrentCause(), entities); SpongeImpl.postEvent(event); if (!event.isCancelled()) { for (Entity entity : event.getEntities()) { mixinEntityConsumer.accept(EntityUtil.toMixin(entity)); ((IMixinWorldServer) world).forceSpawnEntity(entity); } } } }
package org.usfirst.frc.team2339.Barracuda.subsystems; /* * Add a swerve mode to RobotDrive * Code from Chief Delphi: http://www.chiefdelphi.com/forums/showthread.php?t=117099 */ import org.usfirst.frc.team2339.Barracuda.smartdashboard.SendablePosition; import org.usfirst.frc.team2339.Barracuda.swervemath.SwerveWheel.RectangularCoordinates; import org.usfirst.frc.team2339.Barracuda.swervemath.SwerveWheel.RobotMotion; import org.usfirst.frc.team2339.Barracuda.swervemath.SwerveWheel.VelocityPolar; import edu.wpi.first.wpilibj.command.Subsystem; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; /** * * */ public class SwerveDrive extends Subsystem { /* * Wheel are numbered in counter-clockwise order when viewed from top of robot. * For a typical four wheel configuration the front right wheel is Number 0. * This follows the scheme in Ether's derivation of swerve inverse kinematics. * @See http://www.chiefdelphi.com/media/papers/2426 * * Note: Ether starts numbering at 1, but Java indices start at zero. * Thus the wheel indices are one less than Ether's numbers. * */ protected final SwerveWheelDrive wheels[]; protected RectangularCoordinates pivot = new RectangularCoordinates(0, 0); // Distance of wheel farthest from pivot protected double maxWheelRadius = 1; public SwerveDrive(SwerveWheelDrive wheels[]) { this.wheels = new SwerveWheelDrive[wheels.length]; for (int iiWheel = 0; iiWheel < wheels.length; iiWheel++) { this.wheels[iiWheel] = wheels[iiWheel]; } setPivot(new RectangularCoordinates(0, 0)); } public double getMaxWheelRadius() { return maxWheelRadius; } public void setMaxWheelRadius() { maxWheelRadius = 0; for (int iiWheel = 0; iiWheel < wheels.length; iiWheel++) { double radius = wheels[iiWheel].getWheelPosition().subtract(pivot).magnitude(); if (radius > maxWheelRadius) { maxWheelRadius = radius; } } } public RectangularCoordinates getPivot() { //SmartDashboard.putData("Drive pivot ", new SendablePosition(pivot.x, pivot.y)); SmartDashboard.putNumber("Drive pivot y ", pivot.y); return pivot; } public void setPivot(RectangularCoordinates pivot) { this.pivot = pivot; setMaxWheelRadius(); } public void resetSteering() { for (int iiWheel = 0; iiWheel < wheels.length; iiWheel++) { wheels[iiWheel].resetSteering(); } } public void enableSteering(boolean enable) { for (int iiWheel = 0; iiWheel < wheels.length; iiWheel++) { wheels[iiWheel].enableSteering(enable); } } public void stopRobot() { for (int iiWheel = 0; iiWheel < wheels.length; iiWheel++) { wheels[iiWheel].setWheelSpeed(0.0); } } /** * Drive in swerve mode with a given wheel speeds and directions. * Driving parameters are assumed to be relative to the current robot angle. * @param rawVelocities desired speed and direction vectors for each wheel. */ public void swerveDriveRobot( VelocityPolar rawVelocities[]) { double speedMax = 0; for (int iiWheel = 0; iiWheel < wheels.length; iiWheel++) { if (Math.abs(rawVelocities[iiWheel].speed) > speedMax) { speedMax = rawVelocities[iiWheel].speed; } } if (speedMax > 1.0) { // Normalize speeds to less than |1.0| for (int iiWheel = 0; iiWheel < wheels.length; iiWheel++) { rawVelocities[iiWheel].speed /= speedMax; } } for (int iiWheel = 0; iiWheel < wheels.length; iiWheel++) { SmartDashboard.putNumber("Wheel " + iiWheel + " raw ", rawVelocities[iiWheel].angle); } for (int iiWheel = 0; iiWheel < wheels.length; iiWheel++) { wheels[iiWheel].setWheelSanely(rawVelocities[iiWheel]); } for (int iiWheel = 0; iiWheel < wheels.length; iiWheel++) { SmartDashboard.putNumber("Wheel " + iiWheel + " encoder angle ", wheels[iiWheel].getSteeringAngle()); } } /** * Drive in swerve mode with a given speed and direction. * Driving parameters are assumed to be relative to the current robot angle. * Angles are counter-clockwise from top of robot, with zero deg forward. * @param robotVelocity desired speed and direction vector. */ public void swerveDriveRobot( VelocityPolar robotVelocity) { VelocityPolar rawVelocities[] = new VelocityPolar[wheels.length]; for (int iiWheel = 0; iiWheel < wheels.length; iiWheel++) { rawVelocities[iiWheel] = robotVelocity; } swerveDriveRobot(rawVelocities); } /** * Drive in swerve mode with a given speed and rotation. * Driving parameters are assumed to be relative to the current robot angle. * @param robotMotion desired motion of robot express by strafe, frontBack, and rotation around a pivot point. */ public void swerveDriveRobot( RobotMotion robotMotion) { VelocityPolar rawVelocities[] = new VelocityPolar[wheels.length]; for (int iiWheel = 0; iiWheel < wheels.length; iiWheel++) { rawVelocities[iiWheel] = wheels[iiWheel].calculateWheelVelocity(getPivot(), maxWheelRadius, robotMotion); } swerveDriveRobot(rawVelocities); } /** * Drive in swerve mode with a given speed and rotation. * Driving parameters are assumed to be absolute based on a fixed angle, e.g. the field. * @param robotMotion desired motion of robot express by strafe, frontBack, and rotation around a pivot point. * @param robotAngle Angle (in degrees) of robot relative to fixed angle. Zero degrees means front of robot points in desired direction. * Positive is clockwise, negative counter-clockwise. This is probably taken from the gyro. */ public void swerveDriveAbsolute( RobotMotion robotMotion, double robotAngle) { double robotAngleRad = Math.toRadians(robotAngle); RobotMotion relativeMotion = new RobotMotion( robotMotion.strafe * Math.cos(robotAngleRad) - robotMotion.frontBack * Math.sin(robotAngleRad), robotMotion.strafe * Math.sin(robotAngleRad) + robotMotion.frontBack * Math.cos(robotAngleRad), robotMotion.rotate); this.swerveDriveRobot(relativeMotion); } /** * Set speed and angle values when joystick in dead band */ public void setDeadBandValues() { for (int iiWheel = 0; iiWheel < wheels.length; iiWheel++) { // Keep current angle, set speed to zero wheels[iiWheel].maintainSteeringAngle(); wheels[iiWheel].setWheelSpeed(0); } } /** * Class to store angle and flip together * @author emiller * */ public class AngleFlip { private double angle; private boolean flip; public AngleFlip() { setAngle(0); setFlip(false); } public AngleFlip(double angle) { this.setAngle(angle); setFlip(false); } public AngleFlip(double angle, boolean flip) { this.setAngle(angle); flip = false; } /** * @return the angle */ public double getAngle() { return angle; } /** * @param angle the angle to set */ public void setAngle(double angle) { this.angle = angle; } /** * @return the flip */ public boolean isFlip() { return flip; } /** * @param flip the flip to set */ public void setFlip(boolean flip) { this.flip = flip; } }; /** * Normalizes an angle in degrees to (-180, 180]. * @param theta Angle to normalize * @return Normalized angle */ public double normalizeAngle(double theta) { while (theta > 180) { theta -= 360; } while (theta < -180) { theta += 360; } return theta; } /** * Compute angle needed to turn and whether or not flip is needed * @param currentAngle * @param targetAngle * @return new angle with flip */ public AngleFlip computeTurnAngle(double currentAngle, double targetAngle) { AngleFlip turnAngle = new AngleFlip(targetAngle - currentAngle, false); if (Math.abs(turnAngle.getAngle()) > 90) { turnAngle.setAngle(normalizeAngle(turnAngle.getAngle() + 180)); turnAngle.setFlip(true); } return turnAngle; } /** * Compute change angle to get from current to target angle. * @param currentAngle Current angle * @param targetAngle New angle to change to * @return change angle */ public double computeChangeAngle(double currentAngle, double targetAngle) { return computeTurnAngle(currentAngle, targetAngle).getAngle(); } /** * Scale drive speed based on how far wheel needs to turn * @param turnAngle Angle wheel needs to turn (with flip value) * @return speed scale factor in range [0, 1] */ public double driveScale(AngleFlip turnAngle) { double scale = 0; if (Math.abs(turnAngle.getAngle()) < 45) { /* * Eric comment: I don't like the discontinuous nature of this scaling. * Possible improvements: * 1) Use cosine(2 * turnAngle) * 2) Scale any angle < 90. */ scale = Math.cos(Math.toRadians(turnAngle.getAngle())); } else { scale = 0; } if (turnAngle.isFlip()) { scale = -scale; } return scale; } public void initDefaultCommand() { } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * $Id: ExsltSets.java 469688 2006-10-31 22:39:43Z minchau $ */ package org.apache.xalan.lib; import java.util.HashMap; import java.util.Map; import org.apache.xml.utils.DOMHelper; import org.apache.xpath.NodeSet; import org.w3c.dom.Node; import org.w3c.dom.NodeList; /** * This class contains EXSLT set extension functions. * It is accessed by specifying a namespace URI as follows: * <pre> * xmlns:set="http://exslt.org/sets" * </pre> * * The documentation for each function has been copied from the relevant * EXSLT Implementer page. * * @see <a href="http://www.exslt.org/">EXSLT</a> * @xsl.usage general */ public class ExsltSets extends ExsltBase { /** * The set:leading function returns the nodes in the node set passed as the first argument that * precede, in document order, the first node in the node set passed as the second argument. If * the first node in the second node set is not contained in the first node set, then an empty * node set is returned. If the second node set is empty, then the first node set is returned. * * @param nl1 NodeList for first node-set. * @param nl2 NodeList for second node-set. * @return a NodeList containing the nodes in nl1 that precede in document order the first * node in nl2; an empty node-set if the first node in nl2 is not in nl1; all of nl1 if nl2 * is empty. * * @see <a href="http://www.exslt.org/">EXSLT</a> */ public static NodeList leading (NodeList nl1, NodeList nl2) { if (nl2.getLength() == 0) return nl1; NodeSet ns1 = new NodeSet(nl1); NodeSet leadNodes = new NodeSet(); Node endNode = nl2.item(0); if (!ns1.contains(endNode)) return leadNodes; // empty NodeSet for (int i = 0; i < nl1.getLength(); i++) { Node testNode = nl1.item(i); if (DOMHelper.isNodeAfter(testNode, endNode) && !DOMHelper.isNodeTheSame(testNode, endNode)) leadNodes.addElement(testNode); } return leadNodes; } /** * The set:trailing function returns the nodes in the node set passed as the first argument that * follow, in document order, the first node in the node set passed as the second argument. If * the first node in the second node set is not contained in the first node set, then an empty * node set is returned. If the second node set is empty, then the first node set is returned. * * @param nl1 NodeList for first node-set. * @param nl2 NodeList for second node-set. * @return a NodeList containing the nodes in nl1 that follow in document order the first * node in nl2; an empty node-set if the first node in nl2 is not in nl1; all of nl1 if nl2 * is empty. * * @see <a href="http://www.exslt.org/">EXSLT</a> */ public static NodeList trailing (NodeList nl1, NodeList nl2) { if (nl2.getLength() == 0) return nl1; NodeSet ns1 = new NodeSet(nl1); NodeSet trailNodes = new NodeSet(); Node startNode = nl2.item(0); if (!ns1.contains(startNode)) return trailNodes; // empty NodeSet for (int i = 0; i < nl1.getLength(); i++) { Node testNode = nl1.item(i); if (DOMHelper.isNodeAfter(startNode, testNode) && !DOMHelper.isNodeTheSame(startNode, testNode)) trailNodes.addElement(testNode); } return trailNodes; } /** * The set:intersection function returns a node set comprising the nodes that are within * both the node sets passed as arguments to it. * * @param nl1 NodeList for first node-set. * @param nl2 NodeList for second node-set. * @return a NodeList containing the nodes in nl1 that are also * in nl2. * * @see <a href="http://www.exslt.org/">EXSLT</a> */ public static NodeList intersection(NodeList nl1, NodeList nl2) { NodeSet ns1 = new NodeSet(nl1); NodeSet ns2 = new NodeSet(nl2); NodeSet inter = new NodeSet(); inter.setShouldCacheNodes(true); for (int i = 0; i < ns1.getLength(); i++) { Node n = ns1.elementAt(i); if (ns2.contains(n)) inter.addElement(n); } return inter; } /** * The set:difference function returns the difference between two node sets - those nodes that * are in the node set passed as the first argument that are not in the node set passed as the * second argument. * * @param nl1 NodeList for first node-set. * @param nl2 NodeList for second node-set. * @return a NodeList containing the nodes in nl1 that are not in nl2. * * @see <a href="http://www.exslt.org/">EXSLT</a> */ public static NodeList difference(NodeList nl1, NodeList nl2) { NodeSet ns1 = new NodeSet(nl1); NodeSet ns2 = new NodeSet(nl2); NodeSet diff = new NodeSet(); diff.setShouldCacheNodes(true); for (int i = 0; i < ns1.getLength(); i++) { Node n = ns1.elementAt(i); if (!ns2.contains(n)) diff.addElement(n); } return diff; } /** * The set:distinct function returns a subset of the nodes contained in the node-set NS passed * as the first argument. Specifically, it selects a node N if there is no node in NS that has * the same string value as N, and that precedes N in document order. * * @param nl NodeList for the node-set. * @return a NodeList with nodes from nl containing distinct string values. * In other words, if more than one node in nl contains the same string value, * only include the first such node found. * * @see <a href="http://www.exslt.org/">EXSLT</a> */ public static NodeList distinct(NodeList nl) { NodeSet dist = new NodeSet(); dist.setShouldCacheNodes(true); Map stringTable = new HashMap(); for (int i = 0; i < nl.getLength(); i++) { Node currNode = nl.item(i); String key = toString(currNode); if (key == null) dist.addElement(currNode); else if (!stringTable.containsKey(key)) { stringTable.put(key, currNode); dist.addElement(currNode); } } return dist; } /** * The set:has-same-node function returns true if the node set passed as the first argument shares * any nodes with the node set passed as the second argument. If there are no nodes that are in both * node sets, then it returns false. * * The Xalan extensions MethodResolver converts 'has-same-node' to 'hasSameNode'. * * Note: Not to be confused with hasSameNodes in the Xalan namespace, which returns true if * the two node sets contain the exactly the same nodes (perhaps in a different order), * otherwise false. * * @see <a href="http://www.exslt.org/">EXSLT</a> */ public static boolean hasSameNode(NodeList nl1, NodeList nl2) { NodeSet ns1 = new NodeSet(nl1); NodeSet ns2 = new NodeSet(nl2); for (int i = 0; i < ns1.getLength(); i++) { if (ns2.contains(ns1.elementAt(i))) return true; } return false; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.raptor.storage; import com.facebook.presto.orc.BooleanVector; import com.facebook.presto.orc.DoubleVector; import com.facebook.presto.orc.LongVector; import com.facebook.presto.orc.OrcPredicate; import com.facebook.presto.orc.OrcReader; import com.facebook.presto.orc.OrcRecordReader; import com.facebook.presto.orc.SliceVector; import com.facebook.presto.raptor.metadata.ColumnStats; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.type.BigintType; import com.facebook.presto.spi.type.BooleanType; import com.facebook.presto.spi.type.DateType; import com.facebook.presto.spi.type.DoubleType; import com.facebook.presto.spi.type.TimestampType; import com.facebook.presto.spi.type.Type; import com.facebook.presto.spi.type.VarcharType; import com.google.common.collect.ImmutableMap; import io.airlift.slice.Slice; import java.io.IOException; import java.util.List; import java.util.Optional; import static com.facebook.presto.raptor.RaptorErrorCode.RAPTOR_ERROR; import static java.lang.Double.isInfinite; import static java.lang.Double.isNaN; import static org.joda.time.DateTimeZone.UTC; public final class ShardStats { /** * Maximum length of a binary value stored in an index. */ public static final int MAX_BINARY_INDEX_SIZE = 100; private ShardStats() {} public static Slice truncateIndexValue(Slice slice) { if (slice.length() > MAX_BINARY_INDEX_SIZE) { return slice.slice(0, MAX_BINARY_INDEX_SIZE); } return slice; } public static Optional<ColumnStats> computeColumnStats(OrcReader orcReader, long columnId, Type type) throws IOException { return Optional.ofNullable(doComputeColumnStats(orcReader, columnId, type)); } private static ColumnStats doComputeColumnStats(OrcReader orcReader, long columnId, Type type) throws IOException { int columnIndex = columnIndex(orcReader.getColumnNames(), columnId); OrcRecordReader reader = orcReader.createRecordReader(ImmutableMap.of(columnIndex, type), OrcPredicate.TRUE, UTC); if (type.equals(BooleanType.BOOLEAN)) { return indexBoolean(reader, columnIndex, columnId); } if (type.equals(BigintType.BIGINT) || type.equals(DateType.DATE) || type.equals(TimestampType.TIMESTAMP)) { return indexLong(reader, columnIndex, columnId); } if (type.equals(DoubleType.DOUBLE)) { return indexDouble(reader, columnIndex, columnId); } if (type.equals(VarcharType.VARCHAR)) { return indexString(reader, columnIndex, columnId); } return null; } private static int columnIndex(List<String> columnNames, long columnId) { int index = columnNames.indexOf(String.valueOf(columnId)); if (index == -1) { throw new PrestoException(RAPTOR_ERROR, "Missing column ID: " + columnId); } return index; } private static ColumnStats indexBoolean(OrcRecordReader reader, int columnIndex, long columnId) throws IOException { boolean minSet = false; boolean maxSet = false; boolean min = false; boolean max = false; while (true) { int batchSize = reader.nextBatch(); if (batchSize <= 0) { break; } BooleanVector vector = new BooleanVector(batchSize); reader.readVector(columnIndex, vector); for (int i = 0; i < batchSize; i++) { if (vector.isNull[i]) { continue; } if (!minSet || Boolean.compare(vector.vector[i], min) < 0) { minSet = true; min = vector.vector[i]; } if (!maxSet || Boolean.compare(vector.vector[i], max) > 0) { maxSet = true; max = vector.vector[i]; } } } return new ColumnStats(columnId, minSet ? min : null, maxSet ? max : null); } private static ColumnStats indexLong(OrcRecordReader reader, int columnIndex, long columnId) throws IOException { boolean minSet = false; boolean maxSet = false; long min = 0; long max = 0; while (true) { int batchSize = reader.nextBatch(); if (batchSize <= 0) { break; } LongVector vector = new LongVector(batchSize); reader.readVector(columnIndex, vector); for (int i = 0; i < batchSize; i++) { if (vector.isNull[i]) { continue; } if (!minSet || (vector.vector[i] < min)) { minSet = true; min = vector.vector[i]; } if (!maxSet || (vector.vector[i] > max)) { maxSet = true; max = vector.vector[i]; } } } return new ColumnStats(columnId, minSet ? min : null, maxSet ? max : null); } private static ColumnStats indexDouble(OrcRecordReader reader, int columnIndex, long columnId) throws IOException { boolean minSet = false; boolean maxSet = false; double min = 0; double max = 0; while (true) { int batchSize = reader.nextBatch(); if (batchSize <= 0) { break; } DoubleVector vector = new DoubleVector(batchSize); reader.readVector(columnIndex, vector); for (int i = 0; i < batchSize; i++) { if (vector.isNull[i]) { continue; } double value = vector.vector[i]; if (isNaN(value)) { continue; } if (value == -0.0) { value = 0.0; } if (!minSet || (value < min)) { minSet = true; min = value; } if (!maxSet || (value > max)) { maxSet = true; max = value; } } } if (isInfinite(min)) { minSet = false; } if (isInfinite(max)) { maxSet = false; } return new ColumnStats(columnId, minSet ? min : null, maxSet ? max : null); } private static ColumnStats indexString(OrcRecordReader reader, int columnIndex, long columnId) throws IOException { boolean minSet = false; boolean maxSet = false; Slice min = null; Slice max = null; while (true) { int batchSize = reader.nextBatch(); if (batchSize <= 0) { break; } SliceVector vector = new SliceVector(batchSize); reader.readVector(columnIndex, vector); for (int i = 0; i < batchSize; i++) { Slice slice = vector.getSliceAtPosition(i); if (slice == null) { continue; } slice = truncateIndexValue(slice); if (!minSet || (slice.compareTo(min) < 0)) { minSet = true; min = slice; } if (!maxSet || (slice.compareTo(max) > 0)) { maxSet = true; max = slice; } } } return new ColumnStats(columnId, minSet ? min.toStringUtf8() : null, maxSet ? max.toStringUtf8() : null); } }
/* * Copyright 2014 Higher Frequency Trading * * http://www.higherfrequencytrading.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.openhft.chronicle; import net.openhft.chronicle.tools.ChronicleIndexReader; import net.openhft.chronicle.tools.ChronicleTools; import net.openhft.lang.io.StopCharTesters; import net.openhft.lang.model.constraints.NotNull; import org.junit.Ignore; import org.junit.Test; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.util.*; import static org.junit.Assert.*; /** * @author peter.lawrey */ public class IndexedChronicleTest extends IndexedChronicleTestBase { static { ChronicleTools.warmup(); } public static final String TMP = System.getProperty("java.io.tmpdir"); private static final long WARMUP = 20000; static void validateExcerpt(@NotNull ExcerptCommon r, int i, int expected) { if (expected > r.remaining() || 8 * expected < r.remaining()) assertEquals("index: " + r.index(), expected, r.remaining()); if (expected > r.limit() || 8 * expected < r.limit()) assertEquals("index: " + r.index(), expected, r.capacity()); assertEquals(0, r.position()); long l = r.readLong(); assertEquals(i, l); assertEquals(8, r.position()); if (expected - 8 != r.remaining()) assertEquals("index: " + r.index(), expected - 8, r.remaining()); double d = r.readDouble(); assertEquals(i, d, 0.0); if (0 != r.remaining()) assertEquals("index: " + r.index(), 0, r.remaining()); r.position(0); long l2 = r.readLong(); assertEquals(i, l2); r.position(expected); r.finish(); } static void testSearchRange(List<Integer> ints, Excerpt excerpt, MyExcerptComparator mec, long[] startEnd) { int elo = Collections.binarySearch(ints, mec.lo); if (elo < 0) { elo = ~elo; } int ehi = Collections.binarySearch(ints, mec.hi); if (ehi < 0) { ehi = ~ehi; } else { ehi++; } excerpt.findRange(startEnd, mec); assertEquals( "lo: " + mec.lo + ", hi: " + mec.hi, "[" + elo + ", " + ehi + "]", Arrays.toString(startEnd)); } static class MyExcerptComparator implements ExcerptComparator { int lo, hi; @Override public int compare(Excerpt excerpt) { final int x = excerpt.readInt(); return x < lo ? -1 : x > hi ? +1 : 0; } } // ************************************************************************* // // ************************************************************************* @Test @Ignore public void testWasPadding() throws IOException { final String basePath = getTestPath(); final ChronicleQueueBuilder builder = ChronicleQueueBuilder .indexed(basePath) .test() .dataBlockSize(128) .indexBlockSize(128); final Chronicle chronicle1 = builder.build(); final Chronicle chronicle2 = builder.build(); try { final ExcerptAppender appender = chronicle1.createAppender(); final ExcerptTailer tailer = chronicle2.createTailer(); assertEquals(-1, tailer.index()); assertTrue(tailer.wasPadding()); assertFalse(tailer.index(-1)); assertTrue(tailer.wasPadding()); appender.startExcerpt(48); appender.position(48); appender.finish(); assertTrue(tailer.nextIndex()); assertFalse(tailer.wasPadding()); assertEquals(0, tailer.index()); assertTrue(tailer.index(0)); assertFalse(tailer.wasPadding()); // rewind it to the start - issue # 12 assertFalse(tailer.index(-1)); assertEquals(-1, tailer.index()); assertTrue(tailer.nextIndex()); assertFalse(tailer.wasPadding()); assertEquals(0, tailer.index()); // end of issue # 12; assertFalse(tailer.nextIndex()); assertFalse(tailer.wasPadding()); assertEquals(0, tailer.index()); assertFalse(tailer.index(1)); assertFalse(tailer.wasPadding()); appender.startExcerpt(48); appender.position(48); appender.finish(); assertTrue(tailer.nextIndex()); assertFalse(tailer.wasPadding()); assertEquals(2, tailer.index()); assertTrue(tailer.index(1)); assertFalse(tailer.wasPadding()); assertEquals(1, tailer.index()); assertFalse(tailer.nextIndex()); assertFalse(tailer.wasPadding()); assertEquals(1, tailer.index()); assertFalse(tailer.index(2)); assertFalse(tailer.wasPadding()); assertEquals(2, tailer.index()); // doesn't fit. appender.startExcerpt(48); appender.position(48); appender.finish(); assertFalse(tailer.index(2)); assertTrue(tailer.wasPadding()); assertEquals(2, tailer.index()); assertTrue(tailer.index(1)); assertTrue(tailer.nextIndex()); assertFalse(tailer.wasPadding()); assertEquals(3, tailer.index()); assertFalse(tailer.index(2)); assertTrue(tailer.wasPadding()); assertEquals(2, tailer.index()); assertTrue(tailer.index(3)); assertFalse(tailer.wasPadding()); assertEquals(3, tailer.index()); assertFalse(tailer.index(4)); assertFalse(tailer.wasPadding()); assertEquals(4, tailer.index()); appender.close(); tailer.close(); } finally { chronicle1.close(); chronicle2.close(); assertClean(basePath); } } @Test public void testClean() throws IOException { final String basePath = getTestPath(); final Chronicle chronicle = ChronicleQueueBuilder.indexed(basePath).build(); assertExists(basePath); chronicle.close(); chronicle.clear(); assertNotExists(basePath); } @Test public void singleThreaded() throws IOException { final String basePath = getTestPath(); final int runs = 50000; int dataBlockSize = 4 * 1024; Chronicle chronicle = ChronicleQueueBuilder.indexed(basePath) .test() .dataBlockSize(dataBlockSize) .indexBlockSize(128 * 1024) .build(); int i = 0; try { ExcerptAppender w = chronicle.createAppender(); ExcerptTailer r = chronicle.createTailer(); Excerpt e = chronicle.createExcerpt(); Random rand = new Random(1); // finish just at the end of the first page. int idx = 0; for (i = 0; i < runs; i++) { assertFalse("i: " + i, r.nextIndex()); assertFalse("i: " + i, e.index(idx)); int capacity = 16 * (1 + rand.nextInt(7)); w.startExcerpt(capacity); assertEquals(0, w.position()); w.writeLong(i); assertEquals(8, w.position()); w.writeDouble(i); int expected = 16; assertEquals(expected, w.position()); assertEquals(capacity - expected, w.remaining()); w.finish(); if (!r.nextIndex()) { assertTrue(r.nextIndex()); } validateExcerpt(r, i, expected); if (!e.index(idx++)) { assertTrue(e.wasPadding()); assertTrue(e.index(idx++)); } validateExcerpt(e, i, expected); } w.close(); r.close(); } finally { chronicle.close(); assertEquals(runs, i); assertClean(basePath); } } @Test public void multiThreaded() throws IOException, InterruptedException { // for (int i = 0; i < 20; i++) System.out.println(); if (Runtime.getRuntime().availableProcessors() < 2) { System.err.println("Test requires 2 CPUs, skipping"); return; } final String basePath = getTestPath(); final int dataBlockSize = 1 << 26; Chronicle chronicle = ChronicleQueueBuilder.indexed(basePath) .dataBlockSize(dataBlockSize) .indexBlockSize(dataBlockSize / 4) .build(); final ExcerptTailer r = chronicle.createTailer(); // shorten the test for a build server. final long words = 50L * 1000 * 1000; final int size = 4; long start = System.nanoTime(); Thread t = new Thread(new Runnable() { @Override public void run() { try { final Chronicle c = ChronicleQueueBuilder.indexed(basePath) .dataBlockSize(dataBlockSize) .indexBlockSize(dataBlockSize / 4) .build(); final ExcerptAppender w = c.createAppender(); for (int i = 0; i < words; i += size) { w.startExcerpt(); for (int s = 0; s < size; s++) { w.writeInt(1 + i); } // w.position(4L * size); w.finish(); // System.out.println(i); } w.close(); c.close(); } catch (Throwable e) { e.printStackTrace(); } } }); t.start(); long maxDelay = 0, maxJitter = 0; for (long i = 0; i < words; i += size) { if (!r.nextIndex()) { long start0 = System.nanoTime(); long last = start0; while (!r.nextIndex()) { long now = System.nanoTime(); long jitter = now - last; if (i > WARMUP && maxJitter < jitter) maxJitter = jitter; long delay0 = now - start0; if (delay0 > 200e6) throw new AssertionError("delay: " + delay0 / 1000000 + ", index: " + r.index()); if (i > WARMUP && maxDelay < delay0) maxDelay = delay0; last = now; } } try { for (int s = 0; s < size; s++) { int j = r.readInt(); if (j != i + 1) { ChronicleIndexReader.main(basePath + ".index"); throw new AssertionError(j + " != " + (i + 1)); } } r.finish(); } catch (Exception e) { System.err.println("i= " + i); e.printStackTrace(); break; } } r.close(); long rate = words / size * 10 * 1000L / (System.nanoTime() - start); System.out.println("Rate = " + rate / 10.0 + " Mmsg/sec for " + size * 4 + " byte messages, " + "maxJitter: " + maxJitter / 1000 + " us, " + "maxDelay: " + maxDelay / 1000 + " us," + ""); // "totalWait: " + (PrefetchingMappedFileCache.totalWait.longValue() + SingleMappedFileCache.totalWait.longValue()) / 1000 + " us"); t.join(); chronicle.close(); assertClean(basePath); } @Test @Ignore public void multiThreaded2() throws IOException, InterruptedException { if (Runtime.getRuntime().availableProcessors() < 3) { System.err.println("Test requires 3 CPUs, skipping"); return; } final String basePath1 = getTestPath("-1"); final String basePath2 = getTestPath("-2"); // config.dataBlockSize(4*1024); // config.indexBlockSize(4 * 1024); final int runs = 100 * 1000 * 1000; final int size = 4; long start = System.nanoTime(); Thread t = new Thread(new Runnable() { @Override public void run() { try { Chronicle chronicle = ChronicleQueueBuilder.indexed(basePath1).build(); final ExcerptAppender w = chronicle.createAppender(); for (int i = 0; i < runs; i += size) { w.startExcerpt(); for (int s = 0; s < size; s++) w.writeInt(1 + i); w.finish(); } w.close(); // chronicle.close(); } catch (IOException e) { e.printStackTrace(); } } }, "t1"); t.start(); Thread t2 = new Thread(new Runnable() { @Override public void run() { try { Chronicle chronicle = ChronicleQueueBuilder.indexed(basePath1).build(); final ExcerptTailer r = chronicle.createTailer(); Chronicle chronicle2 = null; try { chronicle2 = ChronicleQueueBuilder.indexed(basePath2).build(); } catch (FileNotFoundException e) { System.in.read(); } final ExcerptAppender w = chronicle2.createAppender(); for (int i = 0; i < runs; i += size) { do { } while (!r.nextIndex()); w.startExcerpt(); for (int s = 0; s < size; s++) w.writeInt(r.readInt()); r.finish(); w.finish(); } w.close(); // chronicle.close(); // chronicle2.close(); } catch (IOException e) { e.printStackTrace(); } } }, "t2"); t2.start(); final Chronicle chronicle = ChronicleQueueBuilder.indexed(basePath2).build(); final ExcerptTailer r = chronicle.createTailer(); for (int i = 0; i < runs; i += size) { do { } while (!r.nextIndex()); try { for (int s = 0; s < size; s++) { long l = r.readInt(); if (l != i + 1) throw new AssertionError(); } r.finish(); } catch (Exception e) { System.err.println("i= " + i); e.printStackTrace(); break; } } r.close(); long rate = 2 * runs / size * 10000L / (System.nanoTime() - start); System.out.println("Rate = " + rate / 10.0 + " Mmsg/sec"); chronicle.close(); Thread.sleep(200); ChronicleTools.deleteOnExit(basePath1); ChronicleTools.deleteOnExit(basePath2); } @Test public void testOneAtATime() throws IOException { final String basePath = getTestPath(); final File indexFile = new File(basePath + ".index"); for (int i = 0; i < 1000; i++) { //if (i % 10 == 0) // System.out.println("i: " + i); long indexFileSize = indexFile.length(); final Chronicle chronicle = ChronicleQueueBuilder.indexed(basePath) .test() .indexBlockSize(128) .dataBlockSize(128) .build(); assertEquals("Index should not grow on open (i=" + i + ")", indexFileSize, indexFile.length()); if (i == 0) { ExcerptTailer tailer = chronicle.createTailer(); assertFalse(tailer.nextIndex()); Excerpt excerpt = chronicle.createExcerpt(); assertFalse(excerpt.index(0)); } ExcerptAppender appender = chronicle.createAppender(); appender.startExcerpt(); appender.writeDouble(i); appender.finish(); // ChronicleIndexReader.main(basePath+".index"); ExcerptTailer tailer = chronicle.createTailer(); long[] indexes = new long[i + 1]; long lastIndex = -1; for (int j = 0; j <= i; j++) { assertTrue(tailer.nextIndex()); assertTrue(tailer.index() + " > " + lastIndex, tailer.index() > lastIndex); lastIndex = tailer.index(); double d = tailer.readDouble(); assertEquals(j, d, 0.0); assertEquals(0, tailer.remaining()); indexes[j] = tailer.index(); tailer.finish(); } assertFalse(tailer.nextIndex()); Excerpt excerpt = chronicle.createExcerpt(); // forward for (int j = 0; j < i; j++) { assertTrue(excerpt.index(indexes[j])); double d = excerpt.readDouble(); assertEquals(j, d, 0.0); assertEquals(0, excerpt.remaining()); excerpt.finish(); } assertFalse(excerpt.index(indexes[indexes.length - 1] + 1)); // backward for (int j = i - 1; j >= 0; j--) { assertTrue(excerpt.index(indexes[j])); double d = excerpt.readDouble(); assertEquals(j, d, 0.0); assertEquals(0, excerpt.remaining()); excerpt.finish(); } assertFalse(excerpt.index(-1)); chronicle.close(); } assertClean(basePath); } @Test public void testFindRange() throws IOException { final String basePath = getTestPath(); final Chronicle chronicle = ChronicleQueueBuilder.indexed(basePath).build(); try { ExcerptAppender appender = chronicle.createAppender(); List<Integer> ints = new ArrayList<Integer>(); for (int i = 0; i < 1000; i += 10) { appender.startExcerpt(); appender.writeInt(i); appender.finish(); ints.add(i); } Excerpt excerpt = chronicle.createExcerpt(); final MyExcerptComparator mec = new MyExcerptComparator(); // exact matches at a the start mec.lo = mec.hi = -1; assertEquals(~0, excerpt.findMatch(mec)); mec.lo = mec.hi = 0; assertEquals(0, excerpt.findMatch(mec)); mec.lo = mec.hi = 9; assertEquals(~1, excerpt.findMatch(mec)); mec.lo = mec.hi = 10; assertEquals(1, excerpt.findMatch(mec)); // exact matches at a the end mec.lo = mec.hi = 980; assertEquals(98, excerpt.findMatch(mec)); mec.lo = mec.hi = 981; assertEquals(~99, excerpt.findMatch(mec)); mec.lo = mec.hi = 990; assertEquals(99, excerpt.findMatch(mec)); mec.lo = mec.hi = 1000; assertEquals(~100, excerpt.findMatch(mec)); // range match near the start long[] startEnd = new long[2]; mec.lo = 0; mec.hi = 3; excerpt.findRange(startEnd, mec); assertEquals("[0, 1]", Arrays.toString(startEnd)); mec.lo = 21; mec.hi = 29; excerpt.findRange(startEnd, mec); assertEquals("[3, 3]", Arrays.toString(startEnd)); /* mec.lo = 129; mec.hi = 631; testSearchRange(ints, excerpt, mec, startEnd); */ Random rand = new Random(1); for (int i = 0; i < 1000; i++) { int x = rand.nextInt(1010) - 5; int y = rand.nextInt(1010) - 5; mec.lo = Math.min(x, y); mec.hi = Math.max(x, y); testSearchRange(ints, excerpt, mec, startEnd); } } finally { chronicle.close(); assertClean(basePath); } } @Test public void testParseLines() throws IOException { final String basePath = getTestPath(); final Chronicle chronicle = ChronicleQueueBuilder.indexed(basePath).build(); try { ExcerptAppender appender = chronicle.createAppender(); int runs = 10000; for (int i = 0; i < runs; i++) { appender.startExcerpt(); appender.append("Hello world ").append(i).append("\n"); appender.finish(); } ExcerptTailer tailer = chronicle.createTailer(); for (int i = 0; i < runs; i++) { assertTrue(tailer.nextIndex()); String s = tailer.parseUTF(StopCharTesters.CONTROL_STOP); assertEquals("Hello world " + i, s); tailer.finish(); } tailer.close(); } finally { chronicle.close(); assertClean(basePath); } } }
/* Copyright (C) 2013-2014 Computer Sciences Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Autogenerated by Thrift Compiler (0.9.1) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package ezbake.services.deploy.thrift; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import org.apache.thrift.protocol.TProtocolException; import org.apache.thrift.EncodingUtils; import org.apache.thrift.TException; import org.apache.thrift.async.AsyncMethodCallback; import org.apache.thrift.server.AbstractNonblockingServer.*; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class DeploymentArtifact implements org.apache.thrift.TBase<DeploymentArtifact, DeploymentArtifact._Fields>, java.io.Serializable, Cloneable, Comparable<DeploymentArtifact> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("DeploymentArtifact"); private static final org.apache.thrift.protocol.TField METADATA_FIELD_DESC = new org.apache.thrift.protocol.TField("metadata", org.apache.thrift.protocol.TType.STRUCT, (short)1); private static final org.apache.thrift.protocol.TField ARTIFACT_FIELD_DESC = new org.apache.thrift.protocol.TField("artifact", org.apache.thrift.protocol.TType.STRING, (short)2); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new DeploymentArtifactStandardSchemeFactory()); schemes.put(TupleScheme.class, new DeploymentArtifactTupleSchemeFactory()); } public ezbake.services.deploy.thrift.DeploymentMetadata metadata; // required public ByteBuffer artifact; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { METADATA((short)1, "metadata"), ARTIFACT((short)2, "artifact"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // METADATA return METADATA; case 2: // ARTIFACT return ARTIFACT; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.METADATA, new org.apache.thrift.meta_data.FieldMetaData("metadata", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, ezbake.services.deploy.thrift.DeploymentMetadata.class))); tmpMap.put(_Fields.ARTIFACT, new org.apache.thrift.meta_data.FieldMetaData("artifact", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(DeploymentArtifact.class, metaDataMap); } public DeploymentArtifact() { } public DeploymentArtifact( ezbake.services.deploy.thrift.DeploymentMetadata metadata, ByteBuffer artifact) { this(); this.metadata = metadata; this.artifact = artifact; } /** * Performs a deep copy on <i>other</i>. */ public DeploymentArtifact(DeploymentArtifact other) { if (other.isSetMetadata()) { this.metadata = new ezbake.services.deploy.thrift.DeploymentMetadata(other.metadata); } if (other.isSetArtifact()) { this.artifact = org.apache.thrift.TBaseHelper.copyBinary(other.artifact); ; } } public DeploymentArtifact deepCopy() { return new DeploymentArtifact(this); } @Override public void clear() { this.metadata = null; this.artifact = null; } public ezbake.services.deploy.thrift.DeploymentMetadata getMetadata() { return this.metadata; } public DeploymentArtifact setMetadata(ezbake.services.deploy.thrift.DeploymentMetadata metadata) { this.metadata = metadata; return this; } public void unsetMetadata() { this.metadata = null; } /** Returns true if field metadata is set (has been assigned a value) and false otherwise */ public boolean isSetMetadata() { return this.metadata != null; } public void setMetadataIsSet(boolean value) { if (!value) { this.metadata = null; } } public byte[] getArtifact() { setArtifact(org.apache.thrift.TBaseHelper.rightSize(artifact)); return artifact == null ? null : artifact.array(); } public ByteBuffer bufferForArtifact() { return artifact; } public DeploymentArtifact setArtifact(byte[] artifact) { setArtifact(artifact == null ? (ByteBuffer)null : ByteBuffer.wrap(artifact)); return this; } public DeploymentArtifact setArtifact(ByteBuffer artifact) { this.artifact = artifact; return this; } public void unsetArtifact() { this.artifact = null; } /** Returns true if field artifact is set (has been assigned a value) and false otherwise */ public boolean isSetArtifact() { return this.artifact != null; } public void setArtifactIsSet(boolean value) { if (!value) { this.artifact = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case METADATA: if (value == null) { unsetMetadata(); } else { setMetadata((ezbake.services.deploy.thrift.DeploymentMetadata)value); } break; case ARTIFACT: if (value == null) { unsetArtifact(); } else { setArtifact((ByteBuffer)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case METADATA: return getMetadata(); case ARTIFACT: return getArtifact(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case METADATA: return isSetMetadata(); case ARTIFACT: return isSetArtifact(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof DeploymentArtifact) return this.equals((DeploymentArtifact)that); return false; } public boolean equals(DeploymentArtifact that) { if (that == null) return false; boolean this_present_metadata = true && this.isSetMetadata(); boolean that_present_metadata = true && that.isSetMetadata(); if (this_present_metadata || that_present_metadata) { if (!(this_present_metadata && that_present_metadata)) return false; if (!this.metadata.equals(that.metadata)) return false; } boolean this_present_artifact = true && this.isSetArtifact(); boolean that_present_artifact = true && that.isSetArtifact(); if (this_present_artifact || that_present_artifact) { if (!(this_present_artifact && that_present_artifact)) return false; if (!this.artifact.equals(that.artifact)) return false; } return true; } @Override public int hashCode() { return 0; } @Override public int compareTo(DeploymentArtifact other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = Boolean.valueOf(isSetMetadata()).compareTo(other.isSetMetadata()); if (lastComparison != 0) { return lastComparison; } if (isSetMetadata()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.metadata, other.metadata); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetArtifact()).compareTo(other.isSetArtifact()); if (lastComparison != 0) { return lastComparison; } if (isSetArtifact()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.artifact, other.artifact); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("DeploymentArtifact("); boolean first = true; sb.append("metadata:"); if (this.metadata == null) { sb.append("null"); } else { sb.append(this.metadata); } first = false; if (!first) sb.append(", "); sb.append("artifact:"); if (this.artifact == null) { sb.append("null"); } else { org.apache.thrift.TBaseHelper.toString(this.artifact, sb); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields if (metadata == null) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'metadata' was not present! Struct: " + toString()); } if (artifact == null) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'artifact' was not present! Struct: " + toString()); } // check for sub-struct validity if (metadata != null) { metadata.validate(); } } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class DeploymentArtifactStandardSchemeFactory implements SchemeFactory { public DeploymentArtifactStandardScheme getScheme() { return new DeploymentArtifactStandardScheme(); } } private static class DeploymentArtifactStandardScheme extends StandardScheme<DeploymentArtifact> { public void read(org.apache.thrift.protocol.TProtocol iprot, DeploymentArtifact struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // METADATA if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.metadata = new ezbake.services.deploy.thrift.DeploymentMetadata(); struct.metadata.read(iprot); struct.setMetadataIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // ARTIFACT if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.artifact = iprot.readBinary(); struct.setArtifactIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); // check for required fields of primitive type, which can't be checked in the validate method struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, DeploymentArtifact struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.metadata != null) { oprot.writeFieldBegin(METADATA_FIELD_DESC); struct.metadata.write(oprot); oprot.writeFieldEnd(); } if (struct.artifact != null) { oprot.writeFieldBegin(ARTIFACT_FIELD_DESC); oprot.writeBinary(struct.artifact); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class DeploymentArtifactTupleSchemeFactory implements SchemeFactory { public DeploymentArtifactTupleScheme getScheme() { return new DeploymentArtifactTupleScheme(); } } private static class DeploymentArtifactTupleScheme extends TupleScheme<DeploymentArtifact> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, DeploymentArtifact struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; struct.metadata.write(oprot); oprot.writeBinary(struct.artifact); } @Override public void read(org.apache.thrift.protocol.TProtocol prot, DeploymentArtifact struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; struct.metadata = new ezbake.services.deploy.thrift.DeploymentMetadata(); struct.metadata.read(iprot); struct.setMetadataIsSet(true); struct.artifact = iprot.readBinary(); struct.setArtifactIsSet(true); } } }
// Licensed to Cloudera, Inc. under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. Cloudera, Inc. licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.cloudera.api.model; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import com.cloudera.api.ApiUtils; import com.google.common.base.Objects; /** * Replication arguments for HDFS. */ @XmlRootElement(name = "hdfsReplicationArguments") public class ApiHdfsReplicationArguments { private ApiServiceRef sourceService; private String sourcePath; private String destinationPath; private String mapreduceServiceName; private String schedulerPoolName; private String userName; private Integer numMaps; private boolean dryRun; private Integer bandwidthPerMap; private boolean abortOnError; private boolean removeMissingFiles; private boolean preserveReplicationCount; private boolean preserveBlockSize; private boolean preservePermissions; private String logPath; private boolean skipChecksumChecks; private Boolean skipTrash; private ReplicationStrategy replicationStrategy; private Boolean preserveXAttrs; /** * The strategy for distributing the file replication tasks among the mappers * of the MR job associated with a replication. */ public enum ReplicationStrategy { /** * Distributes file replication tasks among the mappers up front, trying to * achieve a uniform distribution based on the file sizes. */ STATIC, /** * Distributes file replication tasks in small sets to the mappers, and * as each mapper is done processing its set of tasks, it picks up and * processes the next unallocated set of tasks. */ DYNAMIC }; // For JAX-B public ApiHdfsReplicationArguments() { } public ApiHdfsReplicationArguments(ApiServiceRef sourceService, String sourcePath, String destinationPath, String mapreduceServiceName, Integer numMaps, String userName) { this.sourceService = sourceService; this.sourcePath = sourcePath; this.destinationPath = destinationPath; this.mapreduceServiceName = mapreduceServiceName; this.numMaps = numMaps; this.userName = userName; } /** The service to replicate from. */ @XmlElement public ApiServiceRef getSourceService() { return sourceService; } public void setSourceService(ApiServiceRef sourceService) { this.sourceService = sourceService; } /** The path to replicate. */ @XmlElement public String getSourcePath() { return sourcePath; } public void setSourcePath(String path) { this.sourcePath = path; } /** The destination to replicate to. */ @XmlElement public String getDestinationPath() { return destinationPath; } public void setDestinationPath(String path) { this.destinationPath = path; } /** The mapreduce service to use for the replication job. */ @XmlElement public String getMapreduceServiceName() { return mapreduceServiceName; } public void setMapreduceServiceName(String name) { this.mapreduceServiceName = name; } /** * Name of the scheduler pool to use when submitting the MapReduce job. * Currently supports the capacity and fair schedulers. The option is * ignored if a different scheduler is configured. */ @XmlElement public String getSchedulerPoolName() { return schedulerPoolName; } public void setSchedulerPoolName(String schedulerPoolName) { this.schedulerPoolName = schedulerPoolName; } /** * The user which will execute the MapReduce job. Required if running with * Kerberos enabled. */ @XmlElement public String getUserName() { return userName; } public void setUserName(String userName) { this.userName = userName; } /** The number of mappers to use for the mapreduce replication job. */ @XmlElement public Integer getNumMaps() { return numMaps; } public void setNumMaps(Integer numMaps) { this.numMaps = numMaps; } /** Whether to perform a dry run. Defaults to false. */ @XmlElement public boolean isDryRun() { return dryRun; } public void setDryRun(boolean dryRun) { this.dryRun = dryRun; } /** * The maximum bandwidth (in MB) per mapper in the mapreduce replication * job. */ @XmlElement public Integer getBandwidthPerMap() { return bandwidthPerMap; } public void setBandwidthPerMap(Integer bandwidthPerMap) { this.bandwidthPerMap = bandwidthPerMap; } /** Whether to abort on a replication failure. Defaults to false. */ @XmlElement public boolean getAbortOnError() { return abortOnError; } public void setAbortOnError(boolean abortOnError) { this.abortOnError = abortOnError; } /** * Whether to delete destination files that are missing in source. Defaults * to false. */ @XmlElement public boolean getRemoveMissingFiles() { return removeMissingFiles; } public void setRemoveMissingFiles(boolean removeMissingFiles) { this.removeMissingFiles = removeMissingFiles; } /** Whether to preserve the HDFS replication count. Defaults to false. */ @XmlElement public boolean getPreserveReplicationCount() { return preserveReplicationCount; } public void setPreserveReplicationCount(boolean preserveReplicationCount) { this.preserveReplicationCount = preserveReplicationCount; } /** Whether to preserve the HDFS block size. Defaults to false. */ @XmlElement public boolean getPreserveBlockSize() { return preserveBlockSize; } public void setPreserveBlockSize(boolean preserveBlockSize) { this.preserveBlockSize = preserveBlockSize; } /** * Whether to preserve the HDFS owner, group and permissions. Defaults to * false. * Starting from V10, it also preserves ACLs. Defaults to null (no preserve). * ACLs is preserved if both clusters enable ACL support, and replication * ignores any ACL related failures. */ @XmlElement public boolean getPreservePermissions() { return preservePermissions; } public void setPreservePermissions(boolean preservePermissions) { this.preservePermissions = preservePermissions; } /** The HDFS path where the replication log files should be written to. */ @XmlElement public String getLogPath() { return logPath; } public void setLogPath(String logPath) { this.logPath = logPath; } /** * Whether to skip checksum based file validation/comparison during * replication. Defaults to false. */ @XmlElement public boolean getSkipChecksumChecks() { return skipChecksumChecks; } public void setSkipChecksumChecks(boolean skipChecksumChecks) { this.skipChecksumChecks = skipChecksumChecks; } /** * Whether to permanently delete destination files that are missing in source. * Defaults to null. */ @XmlElement public Boolean getSkipTrash() { return skipTrash; } public void setSkipTrash(Boolean skipTrash) { this.skipTrash = skipTrash; } /** * The strategy for distributing the file replication tasks among the mappers * of the MR job associated with a replication. Default is * {@link ReplicationStrategy.STATIC}. */ @XmlElement public ReplicationStrategy getReplicationStrategy() { return replicationStrategy; } public void setReplicationStrategy(ReplicationStrategy replicationStrategy) { this.replicationStrategy = replicationStrategy; } /** * Whether to preserve XAttrs, default to false * This is introduced in V10. To preserve XAttrs, both CDH versions * should be >= 5.2. Replication fails if either cluster does not support * XAttrs. */ @XmlElement public Boolean getPreserveXAttrs() { return preserveXAttrs; } public void setPreserveXAttrs(Boolean preserveXAttrs) { this.preserveXAttrs = preserveXAttrs; } @Override public String toString() { return Objects.toStringHelper(this) .add("sourceService", sourceService) .add("sourcePath", sourcePath) .add("destinationPath", destinationPath) .add("mapreduceServiceName", mapreduceServiceName) .add("schedulerPoolName", schedulerPoolName) .add("numMaps", numMaps) .add("dryRun", dryRun) .add("bandwidthPerMap", bandwidthPerMap) .add("abortOnError", abortOnError) .add ("removeMissingFiles", removeMissingFiles) .add("preserveReplicationCount", preserveReplicationCount) .add("preserveBlockSize", preserveBlockSize) .add("preservePermissions", preservePermissions) .add("logPath", logPath) .add("skipChecksumChecks", skipChecksumChecks) .add("skipTrash", skipTrash) .add("replicationStrategy", replicationStrategy) .add("preserveXAttrs", preserveXAttrs) .toString(); } @Override public boolean equals(Object o) { ApiHdfsReplicationArguments other = ApiUtils.baseEquals(this, o); return this == other || (other != null && Objects.equal(sourceService, other.getSourceService()) && Objects.equal(sourcePath, other.getSourcePath()) && Objects.equal(destinationPath, other.getDestinationPath()) && Objects.equal(mapreduceServiceName, other.getMapreduceServiceName()) && Objects.equal(schedulerPoolName, other.getSchedulerPoolName()) && Objects.equal(numMaps, other.getNumMaps()) && dryRun == other.isDryRun() && Objects.equal(bandwidthPerMap, other.getBandwidthPerMap()) && abortOnError == other.getAbortOnError() && removeMissingFiles == other.getRemoveMissingFiles() && preserveReplicationCount == other.getPreserveReplicationCount() && preserveBlockSize == other.getPreserveBlockSize() && preservePermissions == other.getPreservePermissions() && Objects.equal(logPath, other.getLogPath()) && skipChecksumChecks == other.getSkipChecksumChecks() && Objects.equal(skipTrash, other.getSkipTrash()) && Objects.equal(replicationStrategy, other.getReplicationStrategy()) && Objects.equal(preserveXAttrs, other.getPreserveXAttrs())); } @Override public int hashCode() { return Objects.hashCode(sourceService, sourcePath, destinationPath, mapreduceServiceName, schedulerPoolName, numMaps, dryRun, bandwidthPerMap, abortOnError, removeMissingFiles, preserveReplicationCount, preserveBlockSize, preservePermissions, logPath, skipChecksumChecks, skipTrash, replicationStrategy, preserveXAttrs); } }
/* * Copyright (C) 2015 Giuseppe Cardone <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.ac.ebi.spot.goci.service.junidecode; /** * Character map for Unicode characters with codepoint U+12xx. * @author Giuseppe Cardone * @version 0.1 */ class X12 { public static final String[] map = new String[]{ "ha", // 0x00 "hu", // 0x01 "hi", // 0x02 "haa", // 0x03 "hee", // 0x04 "he", // 0x05 "ho", // 0x06 "[?]", // 0x07 "la", // 0x08 "lu", // 0x09 "li", // 0x0a "laa", // 0x0b "lee", // 0x0c "le", // 0x0d "lo", // 0x0e "lwa", // 0x0f "hha", // 0x10 "hhu", // 0x11 "hhi", // 0x12 "hhaa", // 0x13 "hhee", // 0x14 "hhe", // 0x15 "hho", // 0x16 "hhwa", // 0x17 "ma", // 0x18 "mu", // 0x19 "mi", // 0x1a "maa", // 0x1b "mee", // 0x1c "me", // 0x1d "mo", // 0x1e "mwa", // 0x1f "sza", // 0x20 "szu", // 0x21 "szi", // 0x22 "szaa", // 0x23 "szee", // 0x24 "sze", // 0x25 "szo", // 0x26 "szwa", // 0x27 "ra", // 0x28 "ru", // 0x29 "ri", // 0x2a "raa", // 0x2b "ree", // 0x2c "re", // 0x2d "ro", // 0x2e "rwa", // 0x2f "sa", // 0x30 "su", // 0x31 "si", // 0x32 "saa", // 0x33 "see", // 0x34 "se", // 0x35 "so", // 0x36 "swa", // 0x37 "sha", // 0x38 "shu", // 0x39 "shi", // 0x3a "shaa", // 0x3b "shee", // 0x3c "she", // 0x3d "sho", // 0x3e "shwa", // 0x3f "qa", // 0x40 "qu", // 0x41 "qi", // 0x42 "qaa", // 0x43 "qee", // 0x44 "qe", // 0x45 "qo", // 0x46 "[?]", // 0x47 "qwa", // 0x48 "[?]", // 0x49 "qwi", // 0x4a "qwaa", // 0x4b "qwee", // 0x4c "qwe", // 0x4d "[?]", // 0x4e "[?]", // 0x4f "qha", // 0x50 "qhu", // 0x51 "qhi", // 0x52 "qhaa", // 0x53 "qhee", // 0x54 "qhe", // 0x55 "qho", // 0x56 "[?]", // 0x57 "qhwa", // 0x58 "[?]", // 0x59 "qhwi", // 0x5a "qhwaa", // 0x5b "qhwee", // 0x5c "qhwe", // 0x5d "[?]", // 0x5e "[?]", // 0x5f "ba", // 0x60 "bu", // 0x61 "bi", // 0x62 "baa", // 0x63 "bee", // 0x64 "be", // 0x65 "bo", // 0x66 "bwa", // 0x67 "va", // 0x68 "vu", // 0x69 "vi", // 0x6a "vaa", // 0x6b "vee", // 0x6c "ve", // 0x6d "vo", // 0x6e "vwa", // 0x6f "ta", // 0x70 "tu", // 0x71 "ti", // 0x72 "taa", // 0x73 "tee", // 0x74 "te", // 0x75 "to", // 0x76 "twa", // 0x77 "ca", // 0x78 "cu", // 0x79 "ci", // 0x7a "caa", // 0x7b "cee", // 0x7c "ce", // 0x7d "co", // 0x7e "cwa", // 0x7f "xa", // 0x80 "xu", // 0x81 "xi", // 0x82 "xaa", // 0x83 "xee", // 0x84 "xe", // 0x85 "xo", // 0x86 "[?]", // 0x87 "xwa", // 0x88 "[?]", // 0x89 "xwi", // 0x8a "xwaa", // 0x8b "xwee", // 0x8c "xwe", // 0x8d "[?]", // 0x8e "[?]", // 0x8f "na", // 0x90 "nu", // 0x91 "ni", // 0x92 "naa", // 0x93 "nee", // 0x94 "ne", // 0x95 "no", // 0x96 "nwa", // 0x97 "nya", // 0x98 "nyu", // 0x99 "nyi", // 0x9a "nyaa", // 0x9b "nyee", // 0x9c "nye", // 0x9d "nyo", // 0x9e "nywa", // 0x9f "\'a", // 0xa0 "\'u", // 0xa1 "[?]", // 0xa2 "\'aa", // 0xa3 "\'ee", // 0xa4 "\'e", // 0xa5 "\'o", // 0xa6 "\'wa", // 0xa7 "ka", // 0xa8 "ku", // 0xa9 "ki", // 0xaa "kaa", // 0xab "kee", // 0xac "ke", // 0xad "ko", // 0xae "[?]", // 0xaf "kwa", // 0xb0 "[?]", // 0xb1 "kwi", // 0xb2 "kwaa", // 0xb3 "kwee", // 0xb4 "kwe", // 0xb5 "[?]", // 0xb6 "[?]", // 0xb7 "kxa", // 0xb8 "kxu", // 0xb9 "kxi", // 0xba "kxaa", // 0xbb "kxee", // 0xbc "kxe", // 0xbd "kxo", // 0xbe "[?]", // 0xbf "kxwa", // 0xc0 "[?]", // 0xc1 "kxwi", // 0xc2 "kxwaa", // 0xc3 "kxwee", // 0xc4 "kxwe", // 0xc5 "[?]", // 0xc6 "[?]", // 0xc7 "wa", // 0xc8 "wu", // 0xc9 "wi", // 0xca "waa", // 0xcb "wee", // 0xcc "we", // 0xcd "wo", // 0xce "[?]", // 0xcf "`a", // 0xd0 "`u", // 0xd1 "`i", // 0xd2 "`aa", // 0xd3 "`ee", // 0xd4 "`e", // 0xd5 "`o", // 0xd6 "[?]", // 0xd7 "za", // 0xd8 "zu", // 0xd9 "zi", // 0xda "zaa", // 0xdb "zee", // 0xdc "ze", // 0xdd "zo", // 0xde "zwa", // 0xdf "zha", // 0xe0 "zhu", // 0xe1 "zhi", // 0xe2 "zhaa", // 0xe3 "zhee", // 0xe4 "zhe", // 0xe5 "zho", // 0xe6 "zhwa", // 0xe7 "ya", // 0xe8 "yu", // 0xe9 "yi", // 0xea "yaa", // 0xeb "yee", // 0xec "ye", // 0xed "yo", // 0xee "[?]", // 0xef "da", // 0xf0 "du", // 0xf1 "di", // 0xf2 "daa", // 0xf3 "dee", // 0xf4 "de", // 0xf5 "do", // 0xf6 "dwa", // 0xf7 "dda", // 0xf8 "ddu", // 0xf9 "ddi", // 0xfa "ddaa", // 0xfb "ddee", // 0xfc "dde", // 0xfd "ddo", // 0xfe "ddwa" // 0xff }; }
/* * Copyright 2004 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.base.Strings.nullToEmpty; import com.google.common.base.Preconditions; import com.google.javascript.rhino.InputId; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import java.util.ArrayDeque; import java.util.Deque; import java.util.Set; /** * NodeTraversal allows an iteration through the nodes in the parse tree, * and facilitates the optimizations on the parse tree. * */ public class NodeTraversal { private final AbstractCompiler compiler; private final Callback callback; /** Contains the current node*/ private Node curNode; /** * Stack containing the Scopes that have been created. The Scope objects * are lazily created; so the {@code scopeRoots} stack contains the * Nodes for all Scopes that have not been created yet. */ private final Deque<Scope> scopes = new ArrayDeque<>(); /** * A stack of scope roots. All scopes that have not been created * are represented in this Deque. */ private final ArrayDeque<Node> scopeRoots = new ArrayDeque<>(); /** * Stack containing the control flow graphs (CFG) that have been created. There are fewer CFGs * than scopes, since block-level scopes are not valid CFG roots. The CFG objects are lazily * populated: elements are simply the CFG root node until requested by {@link * #getControlFlowGraph()}. */ private final ArrayDeque<Object> cfgs = new ArrayDeque<>(); /** The current source file name */ private String sourceName; /** The current input */ private InputId inputId; private CompilerInput compilerInput; /** The scope creator */ private final ScopeCreator scopeCreator; private final boolean useBlockScope; /** Possible callback for scope entry and exist **/ private ScopedCallback scopeCallback; /** Callback for passes that iterate over a list of functions */ public interface FunctionCallback { void enterFunction(AbstractCompiler compiler, Node fnRoot); } /** * Callback for tree-based traversals */ public interface Callback { /** * <p>Visits a node in pre order (before visiting its children) and decides * whether this node's children should be traversed. If children are * traversed, they will be visited by * {@link #visit(NodeTraversal, Node, Node)} in postorder.</p> * <p>Implementations can have side effects (e.g. modifying the parse * tree).</p> * @return whether the children of this node should be visited */ boolean shouldTraverse(NodeTraversal nodeTraversal, Node n, Node parent); /** * <p>Visits a node in postorder (after its children have been visited). * A node is visited only if all its parents should be traversed * ({@link #shouldTraverse(NodeTraversal, Node, Node)}).</p> * <p>Implementations can have side effects (e.g. modifying the parse * tree).</p> */ void visit(NodeTraversal t, Node n, Node parent); } /** * Callback that also knows about scope changes */ public interface ScopedCallback extends Callback { /** * Called immediately after entering a new scope. The new scope can * be accessed through t.getScope() */ void enterScope(NodeTraversal t); /** * Called immediately before exiting a scope. The ending scope can * be accessed through t.getScope() */ void exitScope(NodeTraversal t); } /** * Abstract callback to visit all nodes in postorder. */ public abstract static class AbstractPostOrderCallback implements Callback { @Override public final boolean shouldTraverse(NodeTraversal nodeTraversal, Node n, Node parent) { return true; } } /** Abstract callback to visit all nodes in preorder. */ public abstract static class AbstractPreOrderCallback implements Callback { @Override public final void visit(NodeTraversal t, Node n, Node parent) {} } /** Abstract scoped callback to visit all nodes in postorder. */ public abstract static class AbstractScopedCallback implements ScopedCallback { @Override public final boolean shouldTraverse(NodeTraversal nodeTraversal, Node n, Node parent) { return true; } @Override public void enterScope(NodeTraversal t) {} @Override public void exitScope(NodeTraversal t) {} } /** * Abstract callback to visit all nodes but not traverse into function * bodies. */ public abstract static class AbstractShallowCallback implements Callback { @Override public final boolean shouldTraverse(NodeTraversal nodeTraversal, Node n, Node parent) { // We do want to traverse the name of a named function, but we don't // want to traverse the arguments or body. return parent == null || !parent.isFunction() || n == parent.getFirstChild(); } } /** * Abstract callback to visit all structure and statement nodes but doesn't traverse into * functions or expressions. */ public abstract static class AbstractShallowStatementCallback implements Callback { @Override public final boolean shouldTraverse(NodeTraversal nodeTraversal, Node n, Node parent) { return parent == null || NodeUtil.isControlStructure(parent) || NodeUtil.isStatementBlock(parent); } } /** * Abstract callback that knows when goog.modules (and in the future ES6 modules) are entered * and exited. This includes both whole file modules and bundled modules. */ public abstract static class AbstractModuleCallback implements ScopedCallback { /** * Called immediately after entering a module. */ public abstract void enterModule(NodeTraversal t, Node scopeRoot); /** * Called immediately before exiting a module. */ public abstract void exitModule(NodeTraversal t, Node scopeRoot); @Override public boolean shouldTraverse(NodeTraversal t, Node n, Node parent) { return true; } @Override public final void enterScope(NodeTraversal t) { Node scopeRoot = t.getScopeRoot(); if (NodeUtil.isModuleScopeRoot(scopeRoot)) { enterModule(t, scopeRoot); } } @Override public final void exitScope(NodeTraversal t) { Node scopeRoot = t.getScopeRoot(); if (NodeUtil.isModuleScopeRoot(scopeRoot)) { exitModule(t, scopeRoot); } } } /** * Abstract callback to visit a pruned set of nodes. */ public abstract static class AbstractNodeTypePruningCallback implements Callback { private final Set<Token> nodeTypes; private final boolean include; /** * Creates an abstract pruned callback. * @param nodeTypes the nodes to include in the traversal */ public AbstractNodeTypePruningCallback(Set<Token> nodeTypes) { this(nodeTypes, true); } /** * Creates an abstract pruned callback. * @param nodeTypes the nodes to include/exclude in the traversal * @param include whether to include or exclude the nodes in the traversal */ public AbstractNodeTypePruningCallback(Set<Token> nodeTypes, boolean include) { this.nodeTypes = nodeTypes; this.include = include; } @Override public boolean shouldTraverse(NodeTraversal nodeTraversal, Node n, Node parent) { return include == nodeTypes.contains(n.getToken()); } } /** Use the 3-argument constructor instead. */ @Deprecated public NodeTraversal(AbstractCompiler compiler, Callback cb) { this(compiler, cb, compiler.getLanguageMode().isEs6OrHigher() ? new Es6SyntacticScopeCreator(compiler) : SyntacticScopeCreator.makeUntyped(compiler)); } /** * Creates a node traversal using the specified callback interface * and the scope creator. */ public NodeTraversal(AbstractCompiler compiler, Callback cb, ScopeCreator scopeCreator) { this.callback = cb; if (cb instanceof ScopedCallback) { this.scopeCallback = (ScopedCallback) cb; } this.compiler = compiler; setInputId(null, ""); this.scopeCreator = scopeCreator; this.useBlockScope = scopeCreator.hasBlockScope(); } private void throwUnexpectedException(Exception unexpectedException) { // If there's an unexpected exception, try to get the // line number of the code that caused it. String message = unexpectedException.getMessage(); // TODO(user): It is possible to get more information if curNode or // its parent is missing. We still have the scope stack in which it is still // very useful to find out at least which function caused the exception. if (inputId != null) { message = unexpectedException.getMessage() + "\n" + formatNodeContext("Node", curNode) + (curNode == null ? "" : formatNodeContext("Parent", curNode.getParent())); } compiler.throwInternalError(message, unexpectedException); } private String formatNodeContext(String label, Node n) { if (n == null) { return " " + label + ": NULL"; } return " " + label + "(" + n.toString(false, false, false) + "): " + formatNodePosition(n); } /** * Traverses a parse tree recursively. */ public void traverse(Node root) { try { setInputId(NodeUtil.getInputId(root), ""); curNode = root; pushScope(root); // null parent ensures that the shallow callbacks will traverse root traverseBranch(root, null); popScope(); } catch (Exception unexpectedException) { throwUnexpectedException(unexpectedException); } } void traverseRoots(Node externs, Node root) { try { Node scopeRoot = externs.getParent(); Preconditions.checkNotNull(scopeRoot); setInputId(NodeUtil.getInputId(scopeRoot), ""); curNode = scopeRoot; pushScope(scopeRoot); traverseBranch(externs, scopeRoot); Preconditions.checkState(root.getParent() == scopeRoot); traverseBranch(root, scopeRoot); popScope(); } catch (Exception unexpectedException) { throwUnexpectedException(unexpectedException); } } private static final String MISSING_SOURCE = "[source unknown]"; private String formatNodePosition(Node n) { String sourceFileName = getBestSourceFileName(n); if (sourceFileName == null) { return MISSING_SOURCE + "\n"; } int lineNumber = n.getLineno(); int columnNumber = n.getCharno(); String src = compiler.getSourceLine(sourceFileName, lineNumber); if (src == null) { src = MISSING_SOURCE; } return sourceFileName + ":" + lineNumber + ":" + columnNumber + "\n" + src + "\n"; } /** * Traverses a parse tree recursively with a scope, starting with the given * root. This should only be used in the global scope or module scopes. Otherwise, use * {@link #traverseAtScope}. */ void traverseWithScope(Node root, Scope s) { Preconditions.checkState(s.isGlobal() || s.isModuleScope()); try { setInputId(null, ""); curNode = root; pushScope(s); traverseBranch(root, null); popScope(); } catch (Exception unexpectedException) { throwUnexpectedException(unexpectedException); } } /** * Traverses a parse tree recursively with a scope, starting at that scope's * root. */ void traverseAtScope(Scope s) { Node n = s.getRootNode(); curNode = n; Deque<Scope> parentScopes = new ArrayDeque<>(); Scope temp = s.getParent(); while (temp != null) { parentScopes.push(temp); temp = temp.getParent(); } while (!parentScopes.isEmpty()) { pushScope(parentScopes.pop(), true); } if (n.isFunction()) { // We need to do some extra magic to make sure that the scope doesn't // get re-created when we dive into the function. if (inputId == null) { setInputId(NodeUtil.getInputId(n), getSourceName(n)); } pushScope(s); Node args = n.getSecondChild(); Node body = args.getNext(); traverseBranch(args, n); traverseBranch(body, n); popScope(); } else if (n.isNormalBlock()) { if (inputId == null) { setInputId(NodeUtil.getInputId(n), getSourceName(n)); } pushScope(s); // traverseBranch is not called here to avoid re-creating the block scope. traverseChildren(n); popScope(); } else { Preconditions.checkState(s.isGlobal() || s.isModuleScope(), "Expected global or module scope. Got:", s); traverseWithScope(n, s); } } /** * Traverse a function out-of-band of normal traversal. * * @param node The function node. * @param scope The scope the function is contained in. Does not fire enter/exit * callback events for this scope. */ public void traverseFunctionOutOfBand(Node node, Scope scope) { Preconditions.checkNotNull(scope); Preconditions.checkState(node.isFunction()); Preconditions.checkState(scope.getRootNode() != null); if (inputId == null) { setInputId(NodeUtil.getInputId(node), getSourceName(node)); } curNode = node.getParent(); pushScope(scope, true /* quietly */); traverseBranch(node, curNode); popScope(true /* quietly */); } /** * Traverses an inner node recursively with a refined scope. An inner node may * be any node with a non {@code null} parent (i.e. all nodes except the * root). * * @param node the node to traverse * @param parent the node's parent, it may not be {@code null} * @param refinedScope the refined scope of the scope currently at the top of * the scope stack or in trivial cases that very scope or {@code null} */ void traverseInnerNode(Node node, Node parent, Scope refinedScope) { Preconditions.checkNotNull(parent); if (inputId == null) { setInputId(NodeUtil.getInputId(node), getSourceName(node)); } if (refinedScope != null && getScope() != refinedScope) { curNode = node; pushScope(refinedScope); traverseBranch(node, parent); popScope(); } else { traverseBranch(node, parent); } } public AbstractCompiler getCompiler() { return compiler; } /** * Gets the current line number, or zero if it cannot be determined. The line * number is retrieved lazily as a running time optimization. */ public int getLineNumber() { Node cur = curNode; while (cur != null) { int line = cur.getLineno(); if (line >= 0) { return line; } cur = cur.getParent(); } return 0; } /** * Gets the current char number, or zero if it cannot be determined. The line * number is retrieved lazily as a running time optimization. */ public int getCharno() { Node cur = curNode; while (cur != null) { int line = cur.getCharno(); if (line >= 0) { return line; } cur = cur.getParent(); } return 0; } /** * Gets the current input source name. * * @return A string that may be empty, but not null */ public String getSourceName() { return sourceName; } /** * Gets the current input source. */ public CompilerInput getInput() { if (compilerInput == null) { compilerInput = compiler.getInput(inputId); } return compilerInput; } /** * Gets the current input module. */ public JSModule getModule() { CompilerInput input = getInput(); return input == null ? null : input.getModule(); } /** Returns the node currently being traversed. */ public Node getCurrentNode() { return curNode; } /** * Traversal for passes that work only on changed functions. * Suppose a loopable pass P1 uses this traversal. * Then, if a function doesn't change between two runs of P1, it won't look at * the function the second time. * (We're assuming that P1 runs to a fixpoint, o/w we may miss optimizations.) * * <p>Most changes are reported with calls to Compiler.reportCodeChange(), which * doesn't know which scope changed. We keep track of the current scope by * calling Compiler.setScope inside pushScope and popScope. * The automatic tracking can be wrong in rare cases when a pass changes scope * w/out causing a call to pushScope or popScope. It's very hard to find the * places where this happens unless a bug is triggered. * Passes that do cross-scope modifications call * Compiler.reportChangeToEnclosingScope(Node n). */ public static void traverseChangedFunctions( AbstractCompiler compiler, FunctionCallback callback) { final AbstractCompiler comp = compiler; final FunctionCallback cb = callback; final Node jsRoot = comp.getJsRoot(); NodeTraversal.traverseEs6(comp, jsRoot, new AbstractPreOrderCallback() { @Override public final boolean shouldTraverse(NodeTraversal t, Node n, Node p) { if ((n.isScript() || n.isFunction()) && comp.hasScopeChanged(n)) { cb.enterFunction(comp, n); } return true; } }); } /** * Traverses a node recursively. * @deprecated Use traverseEs6 whenever possible. */ @Deprecated public static void traverse(AbstractCompiler compiler, Node root, Callback cb) { NodeTraversal t = new NodeTraversal(compiler, cb); t.traverse(root); } /** * Traverses using the ES6SyntacticScopeCreator */ // TODO (stephshi): rename to "traverse" when the old traverse method is no longer used public static void traverseEs6(AbstractCompiler compiler, Node root, Callback cb) { NodeTraversal t = new NodeTraversal(compiler, cb, new Es6SyntacticScopeCreator(compiler)); t.traverse(root); } public static void traverseTyped(AbstractCompiler compiler, Node root, Callback cb) { NodeTraversal t = new NodeTraversal(compiler, cb, SyntacticScopeCreator.makeTyped(compiler)); t.traverse(root); } /** * @deprecated Use traverseRootsEs6. */ @Deprecated public static void traverseRoots( AbstractCompiler compiler, Callback cb, Node externs, Node root) { NodeTraversal t = new NodeTraversal(compiler, cb); t.traverseRoots(externs, root); } public static void traverseRootsEs6( AbstractCompiler compiler, Callback cb, Node externs, Node root) { NodeTraversal t = new NodeTraversal(compiler, cb, new Es6SyntacticScopeCreator(compiler)); t.traverseRoots(externs, root); } public static void traverseRootsTyped( AbstractCompiler compiler, Callback cb, Node externs, Node root) { NodeTraversal t = new NodeTraversal(compiler, cb, SyntacticScopeCreator.makeTyped(compiler)); t.traverseRoots(externs, root); } /** * Traverses a branch. */ private void traverseBranch(Node n, Node parent) { Token type = n.getToken(); if (type == Token.SCRIPT) { setInputId(n.getInputId(), getSourceName(n)); } curNode = n; if (!callback.shouldTraverse(this, n, parent)) { return; } if (type == Token.FUNCTION) { traverseFunction(n, parent); } else if (type == Token.CLASS) { traverseClass(n); } else if (type == Token.MODULE_BODY) { traverseModule(n); } else if (useBlockScope && NodeUtil.createsBlockScope(n)) { traverseBlockScope(n); } else { traverseChildren(n); } curNode = n; callback.visit(this, n, parent); } /** Traverses a function. */ private void traverseFunction(Node n, Node parent) { final Node fnName = n.getFirstChild(); boolean isFunctionExpression = (parent != null) && NodeUtil.isFunctionExpression(n); if (!isFunctionExpression) { // Function declarations are in the scope containing the declaration. traverseBranch(fnName, n); } curNode = n; pushScope(n); if (isFunctionExpression) { // Function expression names are only accessible within the function // scope. traverseBranch(fnName, n); } final Node args = fnName.getNext(); final Node body = args.getNext(); // Args traverseBranch(args, n); // Body // ES6 "arrow" function may not have a block as a body. traverseBranch(body, n); popScope(); } /** Traverses a class. */ private void traverseClass(Node n) { final Node className = n.getFirstChild(); boolean isClassExpression = NodeUtil.isClassExpression(n); if (!isClassExpression) { // Class declarations are in the scope containing the declaration. traverseBranch(className, n); } curNode = n; pushScope(n); if (isClassExpression) { // Class expression names are only accessible within the function // scope. traverseBranch(className, n); } final Node extendsClause = n.getSecondChild(); final Node body = extendsClause.getNext(); // Extends traverseBranch(extendsClause, n); // Body traverseBranch(body, n); popScope(); } private void traverseChildren(Node n) { for (Node child = n.getFirstChild(); child != null; ) { // child could be replaced, in which case our child node // would no longer point to the true next Node next = child.getNext(); traverseBranch(child, n); child = next; } } /** Traverses a module. */ private void traverseModule(Node n) { pushScope(n); traverseChildren(n); popScope(); } /** Traverses a non-function block. */ private void traverseBlockScope(Node n) { pushScope(n); traverseChildren(n); popScope(); } /** Examines the functions stack for the last instance of a function node. When possible, prefer * this method over NodeUtil.getEnclosingFunction() because this in general looks at less nodes. */ public Node getEnclosingFunction() { Node root = getCfgRoot(); return root.isFunction() ? root : null; } /** Sets the given node as the current scope and pushes the relevant frames on the CFG stacks. */ private void recordScopeRoot(Node node) { compiler.setScope(node); if (NodeUtil.isValidCfgRoot(node)) { cfgs.push(node); } } /** Creates a new scope (e.g. when entering a function). */ private void pushScope(Node node) { Preconditions.checkState(curNode != null); Preconditions.checkState(node != null); scopeRoots.push(node); recordScopeRoot(node); if (scopeCallback != null) { scopeCallback.enterScope(this); } } /** Creates a new scope (e.g. when entering a function). */ private void pushScope(Scope s) { pushScope(s, false); } /** * Creates a new scope (e.g. when entering a function). * @param quietly Don't fire an enterScope callback. */ private void pushScope(Scope s, boolean quietly) { Preconditions.checkState(curNode != null); scopes.push(s); recordScopeRoot(s.getRootNode()); if (!quietly && scopeCallback != null) { scopeCallback.enterScope(this); } } private void popScope() { popScope(false); } /** * Pops back to the previous scope (e.g. when leaving a function). * @param quietly Don't fire the exitScope callback. */ private void popScope(boolean quietly) { if (!quietly && scopeCallback != null) { scopeCallback.exitScope(this); } Node scopeRoot = scopeRoots.pollFirst(); if (scopeRoot == null) { scopeRoot = scopes.pop().getRootNode(); } if (NodeUtil.isValidCfgRoot(scopeRoot)) { cfgs.pop(); } Node newScopeRoot = getScopeRoot(); if (newScopeRoot != null) { compiler.setScope(newScopeRoot); } } /** Gets the current scope. */ public Scope getScope() { Scope scope = scopes.peek(); Node root = null; while ((root = scopeRoots.pollLast()) != null) { scope = scopeCreator.createScope(root, scope); scopes.push(scope); } // No need to call compiler.setScope; the top scopeRoot is now the top scope return scope; } public Scope getClosestHoistScope() { // TODO(moz): This should not call getScope(). We should find the root of the closest hoist // scope and effectively getScope() from there, which avoids scanning inner scopes that might // not be needed. return getScope().getClosestHoistScope(); } public TypedScope getTypedScope() { Scope s = getScope(); Preconditions.checkState(s instanceof TypedScope, "getTypedScope called for untyped traversal"); return (TypedScope) s; } /** Gets the control flow graph for the current JS scope. */ public ControlFlowGraph<Node> getControlFlowGraph() { ControlFlowGraph<Node> result; Object o = cfgs.peek(); if (o instanceof Node) { Node cfgRoot = (Node) o; ControlFlowAnalysis cfa = new ControlFlowAnalysis(compiler, false, true); cfa.process(null, cfgRoot); result = cfa.getCfg(); cfgs.pop(); cfgs.push(result); } else { result = (ControlFlowGraph<Node>) o; } return result; } /** Returns the current scope's root. */ public Node getScopeRoot() { Node root = scopeRoots.peek(); if (root == null) { Scope s = scopes.peek(); return s != null ? s.getRootNode() : null; } else { return root; } } private Node getCfgRoot() { Node result; Object o = cfgs.peek(); if (o instanceof Node) { result = (Node) o; } else { result = ((ControlFlowGraph<Node>) o).getEntry().getValue(); } return result; } /** * Determines whether the traversal is currently in the global scope. Note that this returns false * in a global block scope. */ public boolean inGlobalScope() { return getScopeDepth() == 0; } /** Determines whether the traversal is currently in the scope of the block of a function. */ public boolean inFunctionBlockScope() { Node scopeRoot = getScopeRoot(); return scopeRoot.isNormalBlock() && scopeRoot.getParent().isFunction(); } /** * Determines whether the hoist scope of the current traversal is global. */ public boolean inGlobalHoistScope() { Node cfgRoot = getCfgRoot(); Preconditions.checkState( cfgRoot.isScript() || cfgRoot.isRoot() || cfgRoot.isNormalBlock() || cfgRoot.isFunction() || cfgRoot.isModuleBody(), cfgRoot); return cfgRoot.isScript() || cfgRoot.isRoot() || cfgRoot.isNormalBlock(); } /** * Determines whether the traversal is currently in the global scope. Note that this returns false * in a global block scope. */ public boolean inModuleScope() { return NodeUtil.isModuleScopeRoot(getScopeRoot()); } /** * Determines whether the hoist scope of the current traversal is global. */ public boolean inModuleHoistScope() { Node moduleRoot = getCfgRoot(); if (moduleRoot.isFunction()) { // For wrapped modules, the function block is the module scope root. moduleRoot = moduleRoot.getLastChild(); } return NodeUtil.isModuleScopeRoot(moduleRoot); } int getScopeDepth() { int sum = scopes.size() + scopeRoots.size(); Preconditions.checkState(sum > 0); return sum - 1; // Use 0-based scope depth to be consistent within the compiler } /** Reports a diagnostic (error or warning) */ public void report(Node n, DiagnosticType diagnosticType, String... arguments) { JSError error = JSError.make(n, diagnosticType, arguments); compiler.report(error); } private static String getSourceName(Node n) { String name = n.getSourceFileName(); return nullToEmpty(name); } private void setInputId(InputId id, String sourceName) { inputId = id; this.sourceName = sourceName; compilerInput = null; } InputId getInputId() { return inputId; } /** * Creates a JSError during NodeTraversal. * * @param n Determines the line and char position within the source file name * @param type The DiagnosticType * @param arguments Arguments to be incorporated into the message */ public JSError makeError(Node n, CheckLevel level, DiagnosticType type, String... arguments) { return JSError.make(n, level, type, arguments); } /** * Creates a JSError during NodeTraversal. * * @param n Determines the line and char position within the source file name * @param type The DiagnosticType * @param arguments Arguments to be incorporated into the message */ public JSError makeError(Node n, DiagnosticType type, String... arguments) { return JSError.make(n, type, arguments); } private String getBestSourceFileName(Node n) { return n == null ? sourceName : n.getSourceFileName(); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.budgets; import javax.annotation.Generated; import com.amazonaws.services.budgets.model.*; /** * Abstract implementation of {@code AWSBudgetsAsync}. Convenient method forms pass through to the corresponding * overload that takes a request object and an {@code AsyncHandler}, which throws an * {@code UnsupportedOperationException}. */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class AbstractAWSBudgetsAsync extends AbstractAWSBudgets implements AWSBudgetsAsync { protected AbstractAWSBudgetsAsync() { } @Override public java.util.concurrent.Future<CreateBudgetResult> createBudgetAsync(CreateBudgetRequest request) { return createBudgetAsync(request, null); } @Override public java.util.concurrent.Future<CreateBudgetResult> createBudgetAsync(CreateBudgetRequest request, com.amazonaws.handlers.AsyncHandler<CreateBudgetRequest, CreateBudgetResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<CreateBudgetActionResult> createBudgetActionAsync(CreateBudgetActionRequest request) { return createBudgetActionAsync(request, null); } @Override public java.util.concurrent.Future<CreateBudgetActionResult> createBudgetActionAsync(CreateBudgetActionRequest request, com.amazonaws.handlers.AsyncHandler<CreateBudgetActionRequest, CreateBudgetActionResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<CreateNotificationResult> createNotificationAsync(CreateNotificationRequest request) { return createNotificationAsync(request, null); } @Override public java.util.concurrent.Future<CreateNotificationResult> createNotificationAsync(CreateNotificationRequest request, com.amazonaws.handlers.AsyncHandler<CreateNotificationRequest, CreateNotificationResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<CreateSubscriberResult> createSubscriberAsync(CreateSubscriberRequest request) { return createSubscriberAsync(request, null); } @Override public java.util.concurrent.Future<CreateSubscriberResult> createSubscriberAsync(CreateSubscriberRequest request, com.amazonaws.handlers.AsyncHandler<CreateSubscriberRequest, CreateSubscriberResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DeleteBudgetResult> deleteBudgetAsync(DeleteBudgetRequest request) { return deleteBudgetAsync(request, null); } @Override public java.util.concurrent.Future<DeleteBudgetResult> deleteBudgetAsync(DeleteBudgetRequest request, com.amazonaws.handlers.AsyncHandler<DeleteBudgetRequest, DeleteBudgetResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DeleteBudgetActionResult> deleteBudgetActionAsync(DeleteBudgetActionRequest request) { return deleteBudgetActionAsync(request, null); } @Override public java.util.concurrent.Future<DeleteBudgetActionResult> deleteBudgetActionAsync(DeleteBudgetActionRequest request, com.amazonaws.handlers.AsyncHandler<DeleteBudgetActionRequest, DeleteBudgetActionResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DeleteNotificationResult> deleteNotificationAsync(DeleteNotificationRequest request) { return deleteNotificationAsync(request, null); } @Override public java.util.concurrent.Future<DeleteNotificationResult> deleteNotificationAsync(DeleteNotificationRequest request, com.amazonaws.handlers.AsyncHandler<DeleteNotificationRequest, DeleteNotificationResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DeleteSubscriberResult> deleteSubscriberAsync(DeleteSubscriberRequest request) { return deleteSubscriberAsync(request, null); } @Override public java.util.concurrent.Future<DeleteSubscriberResult> deleteSubscriberAsync(DeleteSubscriberRequest request, com.amazonaws.handlers.AsyncHandler<DeleteSubscriberRequest, DeleteSubscriberResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeBudgetResult> describeBudgetAsync(DescribeBudgetRequest request) { return describeBudgetAsync(request, null); } @Override public java.util.concurrent.Future<DescribeBudgetResult> describeBudgetAsync(DescribeBudgetRequest request, com.amazonaws.handlers.AsyncHandler<DescribeBudgetRequest, DescribeBudgetResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeBudgetActionResult> describeBudgetActionAsync(DescribeBudgetActionRequest request) { return describeBudgetActionAsync(request, null); } @Override public java.util.concurrent.Future<DescribeBudgetActionResult> describeBudgetActionAsync(DescribeBudgetActionRequest request, com.amazonaws.handlers.AsyncHandler<DescribeBudgetActionRequest, DescribeBudgetActionResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeBudgetActionHistoriesResult> describeBudgetActionHistoriesAsync(DescribeBudgetActionHistoriesRequest request) { return describeBudgetActionHistoriesAsync(request, null); } @Override public java.util.concurrent.Future<DescribeBudgetActionHistoriesResult> describeBudgetActionHistoriesAsync(DescribeBudgetActionHistoriesRequest request, com.amazonaws.handlers.AsyncHandler<DescribeBudgetActionHistoriesRequest, DescribeBudgetActionHistoriesResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeBudgetActionsForAccountResult> describeBudgetActionsForAccountAsync( DescribeBudgetActionsForAccountRequest request) { return describeBudgetActionsForAccountAsync(request, null); } @Override public java.util.concurrent.Future<DescribeBudgetActionsForAccountResult> describeBudgetActionsForAccountAsync( DescribeBudgetActionsForAccountRequest request, com.amazonaws.handlers.AsyncHandler<DescribeBudgetActionsForAccountRequest, DescribeBudgetActionsForAccountResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeBudgetActionsForBudgetResult> describeBudgetActionsForBudgetAsync(DescribeBudgetActionsForBudgetRequest request) { return describeBudgetActionsForBudgetAsync(request, null); } @Override public java.util.concurrent.Future<DescribeBudgetActionsForBudgetResult> describeBudgetActionsForBudgetAsync(DescribeBudgetActionsForBudgetRequest request, com.amazonaws.handlers.AsyncHandler<DescribeBudgetActionsForBudgetRequest, DescribeBudgetActionsForBudgetResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeBudgetNotificationsForAccountResult> describeBudgetNotificationsForAccountAsync( DescribeBudgetNotificationsForAccountRequest request) { return describeBudgetNotificationsForAccountAsync(request, null); } @Override public java.util.concurrent.Future<DescribeBudgetNotificationsForAccountResult> describeBudgetNotificationsForAccountAsync( DescribeBudgetNotificationsForAccountRequest request, com.amazonaws.handlers.AsyncHandler<DescribeBudgetNotificationsForAccountRequest, DescribeBudgetNotificationsForAccountResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeBudgetPerformanceHistoryResult> describeBudgetPerformanceHistoryAsync( DescribeBudgetPerformanceHistoryRequest request) { return describeBudgetPerformanceHistoryAsync(request, null); } @Override public java.util.concurrent.Future<DescribeBudgetPerformanceHistoryResult> describeBudgetPerformanceHistoryAsync( DescribeBudgetPerformanceHistoryRequest request, com.amazonaws.handlers.AsyncHandler<DescribeBudgetPerformanceHistoryRequest, DescribeBudgetPerformanceHistoryResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeBudgetsResult> describeBudgetsAsync(DescribeBudgetsRequest request) { return describeBudgetsAsync(request, null); } @Override public java.util.concurrent.Future<DescribeBudgetsResult> describeBudgetsAsync(DescribeBudgetsRequest request, com.amazonaws.handlers.AsyncHandler<DescribeBudgetsRequest, DescribeBudgetsResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeNotificationsForBudgetResult> describeNotificationsForBudgetAsync(DescribeNotificationsForBudgetRequest request) { return describeNotificationsForBudgetAsync(request, null); } @Override public java.util.concurrent.Future<DescribeNotificationsForBudgetResult> describeNotificationsForBudgetAsync(DescribeNotificationsForBudgetRequest request, com.amazonaws.handlers.AsyncHandler<DescribeNotificationsForBudgetRequest, DescribeNotificationsForBudgetResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeSubscribersForNotificationResult> describeSubscribersForNotificationAsync( DescribeSubscribersForNotificationRequest request) { return describeSubscribersForNotificationAsync(request, null); } @Override public java.util.concurrent.Future<DescribeSubscribersForNotificationResult> describeSubscribersForNotificationAsync( DescribeSubscribersForNotificationRequest request, com.amazonaws.handlers.AsyncHandler<DescribeSubscribersForNotificationRequest, DescribeSubscribersForNotificationResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ExecuteBudgetActionResult> executeBudgetActionAsync(ExecuteBudgetActionRequest request) { return executeBudgetActionAsync(request, null); } @Override public java.util.concurrent.Future<ExecuteBudgetActionResult> executeBudgetActionAsync(ExecuteBudgetActionRequest request, com.amazonaws.handlers.AsyncHandler<ExecuteBudgetActionRequest, ExecuteBudgetActionResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<UpdateBudgetResult> updateBudgetAsync(UpdateBudgetRequest request) { return updateBudgetAsync(request, null); } @Override public java.util.concurrent.Future<UpdateBudgetResult> updateBudgetAsync(UpdateBudgetRequest request, com.amazonaws.handlers.AsyncHandler<UpdateBudgetRequest, UpdateBudgetResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<UpdateBudgetActionResult> updateBudgetActionAsync(UpdateBudgetActionRequest request) { return updateBudgetActionAsync(request, null); } @Override public java.util.concurrent.Future<UpdateBudgetActionResult> updateBudgetActionAsync(UpdateBudgetActionRequest request, com.amazonaws.handlers.AsyncHandler<UpdateBudgetActionRequest, UpdateBudgetActionResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<UpdateNotificationResult> updateNotificationAsync(UpdateNotificationRequest request) { return updateNotificationAsync(request, null); } @Override public java.util.concurrent.Future<UpdateNotificationResult> updateNotificationAsync(UpdateNotificationRequest request, com.amazonaws.handlers.AsyncHandler<UpdateNotificationRequest, UpdateNotificationResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<UpdateSubscriberResult> updateSubscriberAsync(UpdateSubscriberRequest request) { return updateSubscriberAsync(request, null); } @Override public java.util.concurrent.Future<UpdateSubscriberResult> updateSubscriberAsync(UpdateSubscriberRequest request, com.amazonaws.handlers.AsyncHandler<UpdateSubscriberRequest, UpdateSubscriberResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } }
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. *******************************************************************************/ package org.ofbiz.entity.util; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.io.StringWriter; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import javolution.text.CharArray; import javolution.text.Text; import javolution.xml.sax.Attributes; import javolution.xml.sax.XMLReaderImpl; import org.ofbiz.base.location.FlexibleLocation; import org.ofbiz.base.util.Base64; import org.ofbiz.base.util.Debug; import org.ofbiz.base.util.UtilMisc; import org.ofbiz.base.util.UtilValidate; import org.ofbiz.base.util.UtilXml; import org.ofbiz.base.util.template.FreeMarkerWorker; import org.ofbiz.entity.Delegator; import org.ofbiz.entity.GenericEntityException; import org.ofbiz.entity.GenericEntityNotFoundException; import org.ofbiz.entity.GenericValue; import org.ofbiz.entity.datasource.GenericHelper; import org.ofbiz.entity.eca.EntityEcaHandler; import org.ofbiz.entity.model.ModelEntity; import org.ofbiz.entity.model.ModelField; import org.ofbiz.entity.transaction.GenericTransactionException; import org.ofbiz.entity.transaction.TransactionUtil; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.xml.sax.ErrorHandler; import org.xml.sax.SAXException; import freemarker.ext.dom.NodeModel; import freemarker.template.Configuration; import freemarker.template.Template; import freemarker.template.TemplateException; import freemarker.template.TemplateHashModel; /** * SAX XML Parser Content Handler for Entity Engine XML files */ public class EntitySaxReader implements javolution.xml.sax.ContentHandler, ErrorHandler { public static final String module = EntitySaxReader.class.getName(); public static final int DEFAULT_TX_TIMEOUT = 7200; protected org.xml.sax.Locator locator; protected Delegator delegator; protected EntityEcaHandler<?> ecaHandler = null; protected GenericValue currentValue = null; protected CharSequence currentFieldName = null; protected CharSequence currentFieldValue = null; protected long numberRead = 0; protected long numberCreated = 0; protected long numberUpdated = 0; protected long numberReplaced = 0; protected long numberDeleted = 0; protected long numberSkipped = 0; protected int valuesPerWrite = 100; protected int valuesPerMessage = 1000; protected int transactionTimeout = 7200; protected boolean useTryInsertMethod = false; protected boolean maintainTxStamps = false; protected boolean createDummyFks = false; protected boolean checkDataOnly = false; protected boolean doCacheClear = true; protected boolean disableEeca = false; protected enum Action {CREATE, CREATE_UPDATE, CREATE_REPLACE, DELETE}; protected List<String> actionTags = UtilMisc.toList("create", "create-update", "create-replace", "delete"); protected Action currentAction = Action.CREATE_UPDATE; protected List<Object> messageList = null; protected List<GenericValue> valuesToWrite = new ArrayList<GenericValue>(valuesPerWrite); protected List<GenericValue> valuesToDelete = new ArrayList<GenericValue>(valuesPerWrite); protected boolean isParseForTemplate = false; protected CharSequence templatePath = null; protected Node rootNodeForTemplate = null; protected Node currentNodeForTemplate = null; protected Document documentForTemplate = null; protected EntitySaxReader() {} public EntitySaxReader(Delegator delegator, int transactionTimeout) { // clone the delegator right off so there is no chance of making change to the initial object this.delegator = delegator.cloneDelegator(); this.transactionTimeout = transactionTimeout; } public EntitySaxReader(Delegator delegator) { this(delegator, DEFAULT_TX_TIMEOUT); } public int getValuesPerWrite() { return this.valuesPerWrite; } public void setValuesPerWrite(int valuesPerWrite) { this.valuesPerWrite = valuesPerWrite; } public int getValuesPerMessage() { return this.valuesPerMessage; } public void setValuesPerMessage(int valuesPerMessage) { this.valuesPerMessage = valuesPerMessage; } public int getTransactionTimeout() { return this.transactionTimeout; } public void setUseTryInsertMethod(boolean value) { this.useTryInsertMethod = value; } public void setTransactionTimeout(int transactionTimeout) throws GenericTransactionException { if (this.transactionTimeout != transactionTimeout) { TransactionUtil.setTransactionTimeout(transactionTimeout); this.transactionTimeout = transactionTimeout; } } public boolean getMaintainTxStamps() { return this.maintainTxStamps; } public void setMaintainTxStamps(boolean maintainTxStamps) { this.maintainTxStamps = maintainTxStamps; } public boolean getCreateDummyFks() { return this.createDummyFks; } public void setCreateDummyFks(boolean createDummyFks) { this.createDummyFks = createDummyFks; } public boolean getCheckDataOnly() { return this.checkDataOnly; } public void setCheckDataOnly(boolean checkDataOnly) { this.checkDataOnly = checkDataOnly; } public boolean getDoCacheClear() { return this.doCacheClear; } public void setDoCacheClear(boolean doCacheClear) { this.doCacheClear = doCacheClear; } public boolean getDisableEeca() { return this.disableEeca; } public List<Object> getMessageList() { if (this.checkDataOnly && this.messageList == null) { messageList = new LinkedList<Object>(); } return this.messageList; } public void setMessageList(List<Object> messageList) { this.messageList = messageList; } public void setDisableEeca(boolean disableEeca) { this.disableEeca = disableEeca; if (disableEeca) { if (this.ecaHandler == null) { this.ecaHandler = delegator.getEntityEcaHandler(); } this.delegator.setEntityEcaHandler(null); } else { if (ecaHandler != null) { this.delegator.setEntityEcaHandler(ecaHandler); } } } public void setAction(Action action) { this.currentAction = action; } public Action getAction() { return this.currentAction; } public long parse(String content) throws SAXException, java.io.IOException { if (content == null) { Debug.logWarning("content was null, doing nothing", module); return 0; } ByteArrayInputStream bis = new ByteArrayInputStream(content.getBytes("UTF-8")); return this.parse(bis, "Internal Content"); } public long parse(URL location) throws SAXException, java.io.IOException { if (location == null) { Debug.logWarning("location URL was null, doing nothing", module); return 0; } Debug.logImportant("Beginning import from URL: " + location.toExternalForm(), module); InputStream is = null; long numberRead = 0; try { is = location.openStream(); numberRead = this.parse(is, location.toString()); } finally { if (is != null) { try { is.close(); } catch(Exception e) {} } } return numberRead; } public long parse(InputStream is, String docDescription) throws SAXException, java.io.IOException { /* NOTE: this method is not used because it doesn't work with various parsers... String orgXmlSaxDriver = System.getProperty("org.xml.sax.driver"); if (UtilValidate.isEmpty(orgXmlSaxDriver)) orgXmlSaxDriver = "org.apache.xerces.parsers.SAXParser"; XMLReader reader = XMLReaderFactory.createXMLReader(orgXmlSaxDriver); */ /* This code is for a standard SAXParser and XMLReader like xerces or such; for speed we are using the Javolution reader XMLReader reader = null; try { SAXParserFactory parserFactory = SAXParserFactory.newInstance(); SAXParser parser = parserFactory.newSAXParser(); reader = parser.getXMLReader(); } catch (javax.xml.parsers.ParserConfigurationException e) { Debug.logError(e, "Failed to get a SAX XML parser", module); throw new IllegalStateException("Failed to get a SAX XML parser"); } */ XMLReaderImpl parser = new XMLReaderImpl(); parser.setContentHandler(this); parser.setErrorHandler(this); // LocalResolver lr = new UtilXml.LocalResolver(new DefaultHandler()); // reader.setEntityResolver(lr); numberRead = 0; try { boolean beganTransaction = false; if (transactionTimeout > -1) { beganTransaction = TransactionUtil.begin(transactionTimeout); Debug.logImportant("Transaction Timeout set to " + transactionTimeout / 3600 + " hours (" + transactionTimeout + " seconds)", module); } try { parser.parse(is); // make sure all of the values to write got written... if (! valuesToWrite.isEmpty()) { writeValues(valuesToWrite); valuesToWrite.clear(); } if (! valuesToDelete.isEmpty()) { delegator.removeAll(valuesToDelete); valuesToDelete.clear(); } TransactionUtil.commit(beganTransaction); } catch (Exception e) { String errMsg = "An error occurred saving the data, rolling back transaction (" + beganTransaction + ")"; Debug.logError(e, errMsg, module); TransactionUtil.rollback(beganTransaction, errMsg, e); throw new SAXException("A transaction error occurred reading data", e); } } catch (GenericTransactionException e) { throw new SAXException("A transaction error occurred reading data", e); } Debug.logImportant("Finished " + numberRead + " values from " + docDescription, module); if (Debug.verboseOn()) { Debug.logVerbose(" Detail created : " + numberCreated + ", skipped : " + numberSkipped + ", updated : " + numberUpdated + ", replaced : " + numberReplaced + ", deleted : " + numberDeleted, module); } return numberRead; } protected void writeValues(List<GenericValue> valuesToWrite) throws GenericEntityException { if (this.checkDataOnly) { EntityDataAssert.checkValueList(valuesToWrite, delegator, this.getMessageList()); } else { delegator.storeAll(valuesToWrite, doCacheClear, createDummyFks); } } public void characters(char[] values, int offset, int count) throws org.xml.sax.SAXException { if (isParseForTemplate) { // if null, don't worry about it if (this.currentNodeForTemplate != null) { Node newNode = this.documentForTemplate.createTextNode(new String(values, offset, count)); this.currentNodeForTemplate.appendChild(newNode); } return; } if (currentValue != null && currentFieldName != null) { Text value = Text.valueOf(values, offset, count); // Debug.logInfo("characters: value=" + value, module); if (currentFieldValue == null) { currentFieldValue = value; } else { currentFieldValue = Text.valueOf(currentFieldValue).concat(value); } } } public void endDocument() throws org.xml.sax.SAXException {} public void endElement(CharArray namespaceURI, CharArray localName, CharArray fullName) throws org.xml.sax.SAXException { if (Debug.verboseOn()) Debug.logVerbose("endElement: localName=" + localName + ", fullName=" + fullName + ", numberRead=" + numberRead, module); String fullNameString = fullName.toString(); if ("entity-engine-xml".equals(fullNameString)) { return; } if ("entity-engine-transform-xml".equals(fullNameString)) { // transform file & parse it, then return URL templateUrl = null; try { templateUrl = FlexibleLocation.resolveLocation(templatePath.toString()); } catch (MalformedURLException e) { throw new SAXException("Could not find transform template with resource path [" + templatePath + "]; error was: " + e.toString()); } if (templateUrl == null) { throw new SAXException("Could not find transform template with resource path: " + templatePath); } else { try { Reader templateReader = new InputStreamReader(templateUrl.openStream()); StringWriter outWriter = new StringWriter(); Configuration config = new Configuration(); config.setObjectWrapper(FreeMarkerWorker.getDefaultOfbizWrapper()); config.setSetting("datetime_format", "yyyy-MM-dd HH:mm:ss.SSS"); Template template = new Template("FMImportFilter", templateReader, config); NodeModel nodeModel = NodeModel.wrap(this.rootNodeForTemplate); Map<String, Object> context = new HashMap<String, Object>(); TemplateHashModel staticModels = FreeMarkerWorker.getDefaultOfbizWrapper().getStaticModels(); context.put("Static", staticModels); context.put("doc", nodeModel); template.process(context, outWriter); String s = outWriter.toString(); if (Debug.verboseOn()) Debug.logVerbose("transformed xml: " + s, module); EntitySaxReader reader = new EntitySaxReader(delegator); reader.setUseTryInsertMethod(this.useTryInsertMethod); try { reader.setTransactionTimeout(this.transactionTimeout); } catch (GenericTransactionException e1) { // couldn't set tx timeout, shouldn't be a big deal } numberRead += reader.parse(s); } catch (TemplateException e) { throw new SAXException("Error storing value", e); } catch (IOException e) { throw new SAXException("Error storing value", e); } } return; } if (isParseForTemplate) { this.currentNodeForTemplate = this.currentNodeForTemplate.getParentNode(); return; } //Test if end action tag, set action to default if (actionTags.contains(fullNameString)) { setAction(Action.CREATE_UPDATE); return; } if (currentValue != null) { if (currentFieldName != null) { if (UtilValidate.isNotEmpty(currentFieldValue)) { if (currentValue.getModelEntity().isField(currentFieldName.toString())) { ModelEntity modelEntity = currentValue.getModelEntity(); ModelField modelField = modelEntity.getField(currentFieldName.toString()); String type = modelField.getType(); if (type != null && type.equals("blob")) { byte strData[] = new byte[currentFieldValue.length()]; strData = currentFieldValue.toString().getBytes(); byte binData[] = new byte[currentFieldValue.length()]; binData = Base64.base64Decode(strData); currentValue.setBytes(currentFieldName.toString(), binData); } else { currentValue.setString(currentFieldName.toString(), currentFieldValue.toString()); } } else { Debug.logWarning("Ignoring invalid field name [" + currentFieldName + "] found for the entity: " + currentValue.getEntityName() + " with value=" + currentFieldValue, module); } currentFieldValue = null; } currentFieldName = null; } else { // before we write currentValue check to see if PK is there, if not and it is one field, generate it from a sequence using the entity name if (!currentValue.containsPrimaryKey()) { if (currentValue.getModelEntity().getPksSize() == 1) { ModelField modelField = currentValue.getModelEntity().getOnlyPk(); String newSeq = delegator.getNextSeqId(currentValue.getEntityName()); currentValue.setString(modelField.getName(), newSeq); } else { throw new SAXException("Cannot store value with incomplete primary key with more than 1 primary key field: " + currentValue); } } try { boolean exist = true; boolean skip = false; //if verbose on, check if entity exist on database for count each action //It's necessay to check also for specific action CREATE and DELETE to ensure it's ok if (Action.CREATE == currentAction || Action.DELETE == currentAction || Debug.verboseOn()) { GenericHelper helper = delegator.getEntityHelper(currentValue.getEntityName()); if (currentValue.containsPrimaryKey()) { try { helper.findByPrimaryKey(currentValue.getPrimaryKey()); } catch (GenericEntityNotFoundException e) {exist = false;} } if (Action.CREATE == currentAction && exist) { skip = true; } else if (Action.DELETE == currentAction && ! exist) { skip = true; } } if (! skip) { if (this.useTryInsertMethod && !this.checkDataOnly) { if (Action.CREATE == currentAction) { currentValue.create(); } else if (Action.DELETE == currentAction) { try { currentValue.remove(); } catch (GenericEntityException e1) { String errMsg = "Error deleting value"; Debug.logError(e1, errMsg, module); throw new SAXException(errMsg, e1); } } else { // this technique is faster for data sets where most, if not all, values do not already exist in the database try { currentValue.create(); } catch (GenericEntityException e1) { // create failed, try a store, if that fails too we have a real error and the catch outside of this should handle it currentValue.store(); } } } else { if (Action.DELETE == currentAction) { valuesToDelete.add(currentValue); if (valuesToDelete.size() >= valuesPerWrite) { delegator.removeAll(valuesToDelete, doCacheClear); valuesToDelete.clear(); } } else { valuesToWrite.add(currentValue); if (valuesToWrite.size() >= valuesPerWrite) { writeValues(valuesToWrite); valuesToWrite.clear(); } } } } numberRead++; if (Debug.verboseOn()) countValue(skip, exist); if ((numberRead % valuesPerMessage) == 0) { Debug.logImportant("Another " + valuesPerMessage + " values imported: now up to " + numberRead, module); } currentValue = null; } catch (GenericEntityException e) { String errMsg = "Error storing value"; Debug.logError(e, errMsg, module); throw new SAXException(errMsg, e); } } } } //Use for detail the loading entities protected void countValue(boolean skip, boolean exist) { if (skip) numberSkipped++; else if (Action.DELETE == currentAction) numberDeleted++; else if (Action.CREATE == currentAction || ! exist) numberCreated++; else if (Action.CREATE_REPLACE == currentAction) numberReplaced++; else numberUpdated++; } public void endPrefixMapping(CharArray prefix) throws org.xml.sax.SAXException {} public void ignorableWhitespace(char[] values, int offset, int count) throws org.xml.sax.SAXException { // String value = new String(values, offset, count); // Debug.logInfo("ignorableWhitespace: value=" + value, module); } public void processingInstruction(CharArray target, CharArray instruction) throws org.xml.sax.SAXException {} public void setDocumentLocator(org.xml.sax.Locator locator) { this.locator = locator; } public void skippedEntity(CharArray name) throws org.xml.sax.SAXException {} public void startDocument() throws org.xml.sax.SAXException {} public void startElement(CharArray namepsaceURI, CharArray localName, CharArray fullName, Attributes attributes) throws org.xml.sax.SAXException { if (Debug.verboseOn()) Debug.logVerbose("startElement: localName=" + localName + ", fullName=" + fullName + ", attributes=" + attributes, module); String fullNameString = fullName.toString(); if ("entity-engine-xml".equals(fullNameString)) { // check the maintain-timestamp flag CharSequence maintainTx = attributes.getValue("maintain-timestamps"); if (maintainTx != null) { this.setMaintainTxStamps("true".equalsIgnoreCase(maintainTx.toString())); } // check the do-cache-clear flag CharSequence doCacheClear = attributes.getValue("do-cache-clear"); if (doCacheClear != null) { this.setDoCacheClear("true".equalsIgnoreCase(doCacheClear.toString())); } // check the disable-eeca flag CharSequence ecaDisable = attributes.getValue("disable-eeca"); if (ecaDisable != null) { this.setDisableEeca("true".equalsIgnoreCase(ecaDisable.toString())); } // check the use-dummy-fk flag CharSequence dummyFk = attributes.getValue("create-dummy-fk"); if (dummyFk != null) { this.setCreateDummyFks("true".equalsIgnoreCase(dummyFk.toString())); } return; } if ("entity-engine-transform-xml".equals(fullNameString)) { templatePath = attributes.getValue("template"); isParseForTemplate = true; documentForTemplate = UtilXml.makeEmptyXmlDocument(); return; } if (isParseForTemplate) { Element newElement = this.documentForTemplate.createElement(fullNameString); int length = attributes.getLength(); for (int i = 0; i < length; i++) { CharSequence name = attributes.getLocalName(i); CharSequence value = attributes.getValue(i); if (UtilValidate.isEmpty(name)) { name = attributes.getQName(i); } newElement.setAttribute(name.toString(), value.toString()); } if (this.currentNodeForTemplate == null) { this.currentNodeForTemplate = newElement; this.rootNodeForTemplate = newElement; } else { this.currentNodeForTemplate.appendChild(newElement); this.currentNodeForTemplate = newElement; } return; } //Test if action change if (actionTags.contains(fullNameString)) { if ("create".equals(fullNameString)) setAction(Action.CREATE); if ("create-update".equals(fullNameString)) setAction(Action.CREATE_UPDATE); if ("create-replace".equals(fullNameString)) setAction(Action.CREATE_REPLACE); if ("delete".equals(fullNameString)) setAction(Action.DELETE); return; } if (currentValue != null) { // we have a nested value/CDATA element currentFieldName = fullName; } else { String entityName = fullNameString; // if a dash or colon is in the tag name, grab what is after it if (entityName.indexOf('-') > 0) { entityName = entityName.substring(entityName.indexOf('-') + 1); } if (entityName.indexOf(':') > 0) { entityName = entityName.substring(entityName.indexOf(':') + 1); } try { currentValue = delegator.makeValue(entityName); // TODO: do we really want this? it makes it so none of the values imported have create/update timestamps set // DEJ 10/16/04 I think they should all be stamped, so commenting this out // JAZ 12/10/04 I think it should be specified when creating the reader if (this.maintainTxStamps) { currentValue.setIsFromEntitySync(true); } } catch (Exception e) { Debug.logError(e, module); } if (currentValue != null) { int length = attributes.getLength(); List<String> absentFields = null; if (Action.CREATE_REPLACE == currentAction) { //get all non pk fields ModelEntity currentEntity = currentValue.getModelEntity(); absentFields = currentEntity.getNoPkFieldNames(); absentFields.removeAll(currentEntity.getAutomaticFieldNames()); } for (int i = 0; i < length; i++) { CharSequence name = attributes.getLocalName(i); CharSequence value = attributes.getValue(i); if (UtilValidate.isEmpty(name)) { name = attributes.getQName(i); } try { // treat empty strings as nulls if (UtilValidate.isNotEmpty(value)) { if (currentValue.getModelEntity().isField(name.toString())) { currentValue.setString(name.toString(), value.toString()); if (Action.CREATE_REPLACE == currentAction && absentFields != null) absentFields.remove(name); } else { Debug.logWarning("Ignoring invalid field name [" + name + "] found for the entity: " + currentValue.getEntityName() + " with value=" + value, module); } } } catch (Exception e) { Debug.logWarning(e, "Could not set field " + entityName + "." + name + " to the value " + value, module); } } if (Action.CREATE_REPLACE == currentAction && absentFields != null) { for (String fieldName : absentFields) { currentValue.set(fieldName, null); } } } } } //public void startPrefixMapping(String prefix, String uri) throws org.xml.sax.SAXException {} public void startPrefixMapping(CharArray arg0, CharArray arg1) throws SAXException {} // ======== ErrorHandler interface implementations ======== public void error(org.xml.sax.SAXParseException exception) throws org.xml.sax.SAXException { Debug.logWarning(exception, "Error reading XML on line " + exception.getLineNumber() + ", column " + exception.getColumnNumber(), module); } public void fatalError(org.xml.sax.SAXParseException exception) throws org.xml.sax.SAXException { Debug.logError(exception, "Fatal Error reading XML on line " + exception.getLineNumber() + ", column " + exception.getColumnNumber(), module); throw new SAXException("Fatal Error reading XML on line " + exception.getLineNumber() + ", column " + exception.getColumnNumber(), exception); } public void warning(org.xml.sax.SAXParseException exception) throws org.xml.sax.SAXException { Debug.logWarning(exception, "Warning reading XML on line " + exception.getLineNumber() + ", column " + exception.getColumnNumber(), module); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.bucket.filters; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.EmptyQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; /** * */ public class FiltersAggregator extends BucketsAggregator { public static final ParseField FILTERS_FIELD = new ParseField("filters"); public static final ParseField OTHER_BUCKET_FIELD = new ParseField("other_bucket"); public static final ParseField OTHER_BUCKET_KEY_FIELD = new ParseField("other_bucket_key"); public static class KeyedFilter implements Writeable<KeyedFilter>, ToXContent { static final KeyedFilter PROTOTYPE = new KeyedFilter("", new MatchAllQueryBuilder()); private final String key; private final QueryBuilder<?> filter; public KeyedFilter(String key, QueryBuilder<?> filter) { if (key == null) { throw new IllegalArgumentException("[key] must not be null"); } if (filter == null) { throw new IllegalArgumentException("[filter] must not be null"); } this.key = key; if (filter instanceof EmptyQueryBuilder) { this.filter = new MatchAllQueryBuilder(); } else { this.filter = filter; } } public String key() { return key; } public QueryBuilder<?> filter() { return filter; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(key, filter); return builder; } @Override public KeyedFilter readFrom(StreamInput in) throws IOException { String key = in.readString(); QueryBuilder<?> filter = in.readQuery(); return new KeyedFilter(key, filter); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(key); out.writeQuery(filter); } @Override public int hashCode() { return Objects.hash(key, filter); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } KeyedFilter other = (KeyedFilter) obj; return Objects.equals(key, other.key) && Objects.equals(filter, other.filter); } } private final String[] keys; private Weight[] filters; private final boolean keyed; private final boolean showOtherBucket; private final String otherBucketKey; private final int totalNumKeys; public FiltersAggregator(String name, AggregatorFactories factories, String[] keys, Weight[] filters, boolean keyed, String otherBucketKey, AggregationContext aggregationContext, Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { super(name, factories, aggregationContext, parent, pipelineAggregators, metaData); this.keyed = keyed; this.keys = keys; this.filters = filters; this.showOtherBucket = otherBucketKey != null; this.otherBucketKey = otherBucketKey; if (showOtherBucket) { this.totalNumKeys = keys.length + 1; } else { this.totalNumKeys = keys.length; } } @Override public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { // no need to provide deleted docs to the filter final Bits[] bits = new Bits[filters.length]; for (int i = 0; i < filters.length; ++i) { bits[i] = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), filters[i].scorer(ctx)); } return new LeafBucketCollectorBase(sub, null) { @Override public void collect(int doc, long bucket) throws IOException { boolean matched = false; for (int i = 0; i < bits.length; i++) { if (bits[i].get(doc)) { collectBucket(sub, doc, bucketOrd(bucket, i)); matched = true; } } if (showOtherBucket && !matched) { collectBucket(sub, doc, bucketOrd(bucket, bits.length)); } } }; } @Override public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException { List<InternalFilters.InternalBucket> buckets = new ArrayList<>(filters.length); for (int i = 0; i < keys.length; i++) { long bucketOrd = bucketOrd(owningBucketOrdinal, i); InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(keys[i], bucketDocCount(bucketOrd), bucketAggregations(bucketOrd), keyed); buckets.add(bucket); } // other bucket if (showOtherBucket) { long bucketOrd = bucketOrd(owningBucketOrdinal, keys.length); InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(otherBucketKey, bucketDocCount(bucketOrd), bucketAggregations(bucketOrd), keyed); buckets.add(bucket); } return new InternalFilters(name, buckets, keyed, pipelineAggregators(), metaData()); } @Override public InternalAggregation buildEmptyAggregation() { InternalAggregations subAggs = buildEmptySubAggregations(); List<InternalFilters.InternalBucket> buckets = new ArrayList<>(filters.length); for (int i = 0; i < keys.length; i++) { InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(keys[i], 0, subAggs, keyed); buckets.add(bucket); } if (showOtherBucket) { InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(otherBucketKey, 0, subAggs, keyed); buckets.add(bucket); } return new InternalFilters(name, buckets, keyed, pipelineAggregators(), metaData()); } final long bucketOrd(long owningBucketOrdinal, int filterOrd) { return owningBucketOrdinal * totalNumKeys + filterOrd; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.deltaspike.core.api.provider; import org.apache.deltaspike.core.api.literal.AnyLiteral; import javax.enterprise.context.Dependent; import javax.enterprise.context.spi.CreationalContext; import javax.enterprise.inject.Typed; import javax.enterprise.inject.spi.Bean; import javax.enterprise.inject.spi.BeanManager; import java.lang.annotation.Annotation; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; /** * This class contains utility methods to resolve contextual references * in situations where no injection is available. * * @see BeanManagerProvider */ @Typed() public final class BeanProvider { private BeanProvider() { // this is a utility class which doesn't get instantiated. } /** * <p></p>Get a Contextual Reference by it's type and annotation (qualifier). * You can use this method to get contextual references of a given type. * A 'Contextual Reference' is a proxy which will automatically resolve * the correct contextual instance when you access any method.</p> * * <p><b>Attention:</b> You shall not use this method to manually resolve a * &#064;Dependent bean! The reason is that this contextual instances do usually * live in the well defined lifecycle of their injection point (the bean they got * injected into). But if we manually resolve a &#064;Dependent bean, then it does <b>not</b> * belong to such a well defined lifecycle (because &#064;Dependent it is not * &#064;NormalScoped) and thus will not automatically be * destroyed at the end of the lifecycle. You need to manually destroy this contextual instance via * {@link javax.enterprise.context.spi.Contextual#destroy(Object, javax.enterprise.context.spi.CreationalContext)}. * Thus you also need to manually store the CreationalContext and the Bean you * used to create the contextual instance which this method will not provide.</p> * * @param type the type of the bean in question * @param optional if <code>true</code> it will return <code>null</code> if no bean could be found or created. * Otherwise it will throw an {@code IllegalStateException} * @param qualifiers additional qualifiers which further distinct the resolved bean * @param <T> target type * @return the resolved Contextual Reference */ public static <T> T getContextualReference(Class<T> type, boolean optional, Annotation... qualifiers) { BeanManager beanManager = getBeanManager(); Set<Bean<?>> beans = beanManager.getBeans(type, qualifiers); if (beans == null || beans.isEmpty()) { if (optional) { return null; } throw new IllegalStateException("Could not find beans for Type=" + type + " and qualifiers:" + Arrays.toString(qualifiers)); } return getContextualReference(type, beanManager, beans); } /** * <p>Get a Contextual Reference by it's EL Name. * This only works for beans with the &#064;Named annotation.</p> * * <p><b>Attention:</b> please see the notes on manually resolving &#064;Dependent bean * in {@link #getContextualReference(Class, boolean, java.lang.annotation.Annotation...)}!</p> * * @param name the EL name of the bean * @param optional if <code>true</code> it will return <code>null</code> if no bean could be found or created. * Otherwise it will throw an {@code IllegalStateException} * @return the resolved Contextual Reference */ public static Object getContextualReference(String name, boolean optional) { return getContextualReference(name, optional, Object.class); } /** * <p>Get a Contextual Reference by it's EL Name. * This only works for beans with the &#064;Named annotation.</p> * * <p><b>Attention:</b> please see the notes on manually resolving &#064;Dependent bean * in {@link #getContextualReference(Class, boolean, java.lang.annotation.Annotation...)}!</p> * * * @param name the EL name of the bean * @param optional if <code>true</code> it will return <code>null</code> if no bean could be found or created. * Otherwise it will throw an {@code IllegalStateException} * @param type the type of the bean in question - use {@link #getContextualReference(String, boolean)} * if the type is unknown e.g. in dyn. use-cases * @param <T> target type * @return the resolved Contextual Reference */ public static <T> T getContextualReference(String name, boolean optional, Class<T> type) { BeanManager beanManager = getBeanManager(); Set<Bean<?>> beans = beanManager.getBeans(name); if (beans == null || beans.isEmpty()) { if (optional) { return null; } throw new IllegalStateException("Could not find beans for Type=" + type + " and name:" + name); } return getContextualReference(type, beanManager, beans); } /** * <p>Get a list of Contextual References by it's type independent of the qualifier * (including dependent scoped beans). * * You can use this method to get all contextual references of a given type. * A 'Contextual Reference' is a proxy which will automatically resolve * the correct contextual instance when you access any method.</p> * * <p><b>Attention:</b> please see the notes on manually resolving &#064;Dependent bean * in {@link #getContextualReference(Class, boolean, java.lang.annotation.Annotation...)}!</p> * * @param type the type of the bean in question * @param optional if <code>true</code> it will return an empty list if no bean could be found or created. * Otherwise it will throw an {@code IllegalStateException} * @param <T> target type * @return the resolved list of Contextual Reference or an empty-list if optional is true */ public static <T> List<T> getContextualReferences(Class<T> type, boolean optional) { return getContextualReferences(type, optional, true); } /** * <p>Get a list of Contextual References by it's type independent of the qualifier. * * Further details are available at {@link #getContextualReferences(Class, boolean)} * * @param type the type of the bean in question * @param optional if <code>true</code> it will return an empty list if no bean could be found or created. * Otherwise it will throw an {@code IllegalStateException} * @param includeDefaultScopedBeans specifies if dependent scoped beans should be included in the in the result * @param <T> target type * @return the resolved list of Contextual Reference or an empty-list if optional is true */ public static <T> List<T> getContextualReferences(Class<T> type, boolean optional, boolean includeDefaultScopedBeans) { BeanManager beanManager = getBeanManager(); Set<Bean<?>> beans = beanManager.getBeans(type, new AnyLiteral()); if (beans == null || beans.isEmpty()) { if (optional) { return Collections.emptyList(); } throw new IllegalStateException("Could not find beans for Type=" + type); } if(!includeDefaultScopedBeans) { beans = filterDefaultScopedBeans(beans); } List<T> result = new ArrayList<T>(beans.size()); for(Bean<?> bean : beans) { result.add(getContextualReference(type, beanManager, new HashSet<Bean<?>>((Collection)Arrays.asList(new Object[]{bean})))); } return result; } private static Set<Bean<?>> filterDefaultScopedBeans(Set<Bean<?>> beans) { Set<Bean<?>> result = new HashSet<Bean<?>>(beans.size()); Iterator<Bean<?>> beanIterator = beans.iterator(); Bean<?> currentBean; while (beanIterator.hasNext()) { currentBean = beanIterator.next(); if(!Dependent.class.isAssignableFrom(currentBean.getScope())) { result.add(currentBean); } } return result; } /** * Internal helper method to resolve the right bean and resolve the contextual reference. * * @param type the type of the bean in question * @param beanManager current bean-manager * @param beans beans in question * @param <T> target type * @return the contextual reference */ private static <T> T getContextualReference(Class<T> type, BeanManager beanManager, Set<Bean<?>> beans) { Bean<?> bean = beanManager.resolve(beans); CreationalContext<?> creationalContext = beanManager.createCreationalContext(bean); @SuppressWarnings({"unchecked", "UnnecessaryLocalVariable"}) T result = (T)beanManager.getReference(bean, type, creationalContext); return result; } /** * Internal method to resolve the BeanManager via the {@link BeanManagerProvider} */ private static BeanManager getBeanManager() { return BeanManagerProvider.getInstance().getBeanManager(); } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.hint; import com.intellij.codeInsight.CodeInsightBundle; import com.intellij.ide.highlighter.HighlighterFactory; import com.intellij.lang.Language; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.EditorFactory; import com.intellij.openapi.editor.ScrollType; import com.intellij.openapi.editor.ex.EditorEx; import com.intellij.openapi.editor.highlighter.EditorHighlighter; import com.intellij.openapi.fileEditor.FileEditor; import com.intellij.openapi.fileEditor.FileEditorProvider; import com.intellij.openapi.fileEditor.OpenFileDescriptor; import com.intellij.openapi.fileEditor.ex.FileEditorManagerEx; import com.intellij.openapi.fileEditor.ex.FileEditorProviderManager; import com.intellij.openapi.fileEditor.impl.text.TextEditorProvider; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.fileTypes.LanguageFileType; import com.intellij.openapi.keymap.KeymapManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.popup.JBPopup; import com.intellij.openapi.util.IconLoader; import com.intellij.openapi.vcs.FileStatusManager; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiBinaryFile; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.ui.EdgeBorder; import com.intellij.ui.IdeBorderFactory; import com.intellij.usageView.UsageInfo; import com.intellij.usages.UsageInfoToUsageConverter; import com.intellij.usages.UsageTarget; import com.intellij.usages.UsageViewManager; import com.intellij.usages.UsageViewPresentation; import org.jetbrains.annotations.NonNls; import javax.swing.*; import javax.swing.border.Border; import javax.swing.border.CompoundBorder; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyEvent; import java.util.ArrayList; import java.util.List; public class ImplementationViewComponent extends JPanel { private final PsiElement[] myElements; private int myIndex; private final Editor myEditor; private final JPanel myViewingPanel; private final JLabel myLocationLabel; private final JLabel myCountLabel; private final CardLayout myBinarySwitch; private final JPanel myBinaryPanel; private JComboBox myFileChooser; private FileEditor myNonTextEditor; private FileEditorProvider myCurrentNonTextEditorProvider; private JBPopup myHint; private String myTitle; @NonNls private static final String TEXT_PAGE_KEY = "Text"; @NonNls private static final String BINARY_PAGE_KEY = "Binary"; private final ActionToolbar myToolbar; private static final Icon FIND_ICON = IconLoader.getIcon("/actions/find.png"); public void setHint(final JBPopup hint, final String title) { myHint = hint; myTitle = title; } public boolean hasElementsToShow() { return myElements.length > 0; } private static class FileDescriptor { public final VirtualFile myFile; public FileDescriptor(VirtualFile file) { myFile = file; } } public ImplementationViewComponent(PsiElement[] elements, final int index) { super(new BorderLayout()); List<PsiElement> candidates = new ArrayList<PsiElement>(elements.length); List<FileDescriptor> files = new ArrayList<FileDescriptor>(elements.length); for (PsiElement element : elements) { PsiFile file = getContainingFile(element); if (file == null) continue; files.add(new FileDescriptor(file.getVirtualFile())); candidates.add(element.getNavigationElement()); } myElements = candidates.toArray(new PsiElement[candidates.size()]); if (myElements.length == 0) { myToolbar = null; myEditor = null; myViewingPanel = null; myLocationLabel = null; myCountLabel = null; myBinarySwitch = null; myBinaryPanel = null; return; } myIndex = index < myElements.length ? index : 0; final Project project = elements[myIndex].getProject(); EditorFactory factory = EditorFactory.getInstance(); Document doc = factory.createDocument(""); doc.setReadOnly(true); myEditor = factory.createEditor(doc, project); PsiFile psiFile = getContainingFile(myElements[myIndex]); String fileName = psiFile.getName(); final Language language = myElements[myIndex].getLanguage(); if (psiFile.getFileType() instanceof LanguageFileType && ((LanguageFileType)psiFile.getFileType()).getLanguage() != language ) { final FileType associatedFileType = language.getAssociatedFileType(); if (associatedFileType != null) { fileName += "." + associatedFileType.getDefaultExtension(); } } EditorHighlighter highlighter = HighlighterFactory.createHighlighter(project, fileName); ((EditorEx)myEditor).setHighlighter(highlighter); ((EditorEx)myEditor).setBackgroundColor(EditorFragmentComponent.getBackgroundColor(myEditor)); myEditor.getSettings().setAdditionalLinesCount(1); myEditor.getSettings().setAdditionalColumnsCount(1); myEditor.getSettings().setLineMarkerAreaShown(false); myEditor.getSettings().setLineNumbersShown(false); myEditor.getSettings().setFoldingOutlineShown(false); myBinarySwitch = new CardLayout(); myViewingPanel = new JPanel(myBinarySwitch); final Border lineBorder = new EdgeBorder(EdgeBorder.EDGE_TOP); final Border emptyBorder = BorderFactory.createEmptyBorder(0, 2, 2, 2); final Border compoundBorder = BorderFactory.createCompoundBorder(emptyBorder, lineBorder); myViewingPanel.setBorder(compoundBorder); myViewingPanel.add(myEditor.getComponent(), TEXT_PAGE_KEY); myBinaryPanel = new JPanel(new BorderLayout()); myViewingPanel.add(myBinaryPanel, BINARY_PAGE_KEY); add(myViewingPanel, BorderLayout.CENTER); myToolbar = createToolbar(); myLocationLabel = new JLabel(); myCountLabel = new JLabel(); JPanel header = new JPanel(new BorderLayout()); header.setBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5)); JPanel toolbarPanel = new JPanel(new FlowLayout()); toolbarPanel.add(myToolbar.getComponent()); if (myElements.length > 1) { myFileChooser = new JComboBox(files.toArray(new FileDescriptor[files.size()])); myFileChooser.setRenderer(new DefaultListCellRenderer() { public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) { super.getListCellRendererComponent(list, null, index, isSelected, cellHasFocus); VirtualFile file = ((FileDescriptor)value).myFile; setIcon(file.getIcon()); setForeground(FileStatusManager.getInstance(project).getStatus(file).getColor()); setText(file.getPresentableName()); return this; } }); myFileChooser.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { int index = myFileChooser.getSelectedIndex(); if (myIndex != index) { myIndex = index; updateControls(); } } }); toolbarPanel.add(myFileChooser); toolbarPanel.add(myCountLabel); } else { final JLabel label = new JLabel(); VirtualFile file = psiFile.getVirtualFile(); if (file != null) { label.setIcon(file.getIcon()); label.setForeground(FileStatusManager.getInstance(project).getStatus(file).getColor()); label.setText(file.getPresentableName()); label.setBorder(new CompoundBorder(IdeBorderFactory.createBorder(), IdeBorderFactory.createEmptyBorder(0, 0, 0, 5))); } toolbarPanel.add(label); } header.add(toolbarPanel, BorderLayout.WEST); header.add(myLocationLabel, BorderLayout.EAST); add(header, BorderLayout.NORTH); setPreferredSize(new Dimension(600, 400)); updateControls(); } public JComponent getPrefferedFocusableComponent() { return myFileChooser != null ? myFileChooser : myViewingPanel; } private void updateControls() { updateLabels(); updateCombo(); updateEditorText(); myToolbar.updateActionsImmediately(); } private void updateCombo() { if (myFileChooser != null) { myFileChooser.setSelectedIndex(myIndex); } } private void updateEditorText() { disposeNonTextEditor(); final PsiElement elt = myElements[myIndex]; Project project = elt.getProject(); PsiFile psiFile = getContainingFile(elt); final VirtualFile vFile = psiFile.getVirtualFile(); if (vFile == null) return; final FileEditorProvider[] providers = FileEditorProviderManager.getInstance().getProviders(project, vFile); for (FileEditorProvider provider : providers) { if (provider instanceof TextEditorProvider) { updateTextElement(elt); myBinarySwitch.show(myViewingPanel, TEXT_PAGE_KEY); break; } else if (provider.accept(project, vFile)) { myCurrentNonTextEditorProvider = provider; myNonTextEditor = myCurrentNonTextEditorProvider.createEditor(project, vFile); myBinaryPanel.removeAll(); myBinaryPanel.add(myNonTextEditor.getComponent()); myBinarySwitch.show(myViewingPanel, BINARY_PAGE_KEY); break; } } } private void disposeNonTextEditor() { if (myNonTextEditor != null) { myCurrentNonTextEditorProvider.disposeEditor(myNonTextEditor); myNonTextEditor = null; myCurrentNonTextEditorProvider = null; } } private void updateTextElement(final PsiElement elt) { Project project = elt.getProject(); PsiFile psiFile = getContainingFile(elt); final Document doc = PsiDocumentManager.getInstance(project).getDocument(psiFile); if (doc == null) return; final ImplementationTextSelectioner implementationTextSelectioner = LanguageImplementationTextSelectioner.INSTANCE.forLanguage(elt.getLanguage()); int start = implementationTextSelectioner.getTextStartOffset(elt); final int end = implementationTextSelectioner.getTextEndOffset(elt); final int lineStart = doc.getLineStartOffset(doc.getLineNumber(start)); final int lineEnd = end < doc.getTextLength() ? doc.getLineEndOffset(doc.getLineNumber(end)) : end; CommandProcessor.getInstance().runUndoTransparentAction(new Runnable() { public void run() { ApplicationManager.getApplication().runWriteAction(new Runnable() { public void run() { Document fragmentDoc = myEditor.getDocument(); fragmentDoc.setReadOnly(false); fragmentDoc.replaceString(0, fragmentDoc.getTextLength(), doc.getCharsSequence().subSequence(lineStart, lineEnd).toString()); fragmentDoc.setReadOnly(true); myEditor.getCaretModel().moveToOffset(0); myEditor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE); } }); } }); } private static PsiFile getContainingFile(final PsiElement elt) { PsiFile psiFile = elt.getContainingFile(); if (psiFile == null) return null; return psiFile.getOriginalFile(); } public void removeNotify() { super.removeNotify(); EditorFactory.getInstance().releaseEditor(myEditor); disposeNonTextEditor(); } private void updateLabels() { //TODO: Move from JavaDoc to somewhere more appropriate place. ElementLocationUtil.customizeElementLabel(myElements[myIndex], myLocationLabel); //noinspection AutoBoxing myCountLabel.setText(CodeInsightBundle.message("n.of.m", myIndex + 1, myElements.length)); } private ActionToolbar createToolbar() { DefaultActionGroup group = new DefaultActionGroup(); BackAction back = new BackAction(); back.registerCustomShortcutSet(new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_LEFT, 0)), this); group.add(back); ForwardAction forward = new ForwardAction(); forward.registerCustomShortcutSet(new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_RIGHT, 0)), this); group.add(forward); EditSourceActionBase edit = new EditSourceAction(); edit.registerCustomShortcutSet(new CompositeShortcutSet(CommonShortcuts.getEditSource(), CommonShortcuts.ENTER), this); group.add(edit); edit = new ShowSourceAction(); edit.registerCustomShortcutSet(new CompositeShortcutSet(CommonShortcuts.getViewSource(), CommonShortcuts.CTRL_ENTER), this); group.add(edit); final ShowFindUsagesAction findUsagesAction = new ShowFindUsagesAction(); findUsagesAction.registerCustomShortcutSet(new CustomShortcutSet(KeymapManager.getInstance().getActiveKeymap().getShortcuts(IdeActions.ACTION_FIND_USAGES)), this); group.add(findUsagesAction); return ActionManager.getInstance().createActionToolbar(ActionPlaces.UNKNOWN, group, true); } private void goBack() { myIndex--; updateControls(); } private void goForward() { myIndex++; updateControls(); } private class BackAction extends AnAction implements HintManagerImpl.ActionToIgnore { public BackAction() { super(CodeInsightBundle.message("quick.definition.back"), null, IconLoader.getIcon("/actions/back.png")); } public void actionPerformed(AnActionEvent e) { goBack(); } public void update(AnActionEvent e) { Presentation presentation = e.getPresentation(); presentation.setEnabled(myIndex > 0); } } private class ForwardAction extends AnAction implements HintManagerImpl.ActionToIgnore { public ForwardAction() { super(CodeInsightBundle.message("quick.definition.forward"), null, IconLoader.getIcon("/actions/forward.png")); } public void actionPerformed(AnActionEvent e) { goForward(); } public void update(AnActionEvent e) { Presentation presentation = e.getPresentation(); presentation.setEnabled(myIndex < myElements.length - 1); } } private class EditSourceAction extends EditSourceActionBase { public EditSourceAction() { super(true, IconLoader.getIcon("/actions/editSource.png"), CodeInsightBundle.message("quick.definition.edit.source")); } @Override public void actionPerformed(AnActionEvent e) { super.actionPerformed(e); if (myHint.isVisible()) { myHint.cancel(); } } } private class ShowSourceAction extends EditSourceActionBase implements HintManagerImpl.ActionToIgnore { public ShowSourceAction() { super(false, IconLoader.getIcon("/actions/showSource.png"), CodeInsightBundle.message("quick.definition.show.source")); } } private class EditSourceActionBase extends AnAction { private final boolean myFocusEditor; public EditSourceActionBase(boolean focusEditor, Icon icon, String text) { super(text, null, icon); myFocusEditor = focusEditor; } public void update(AnActionEvent e) { e.getPresentation().setEnabled(myFileChooser == null || !myFileChooser.isPopupVisible()); } public void actionPerformed(AnActionEvent e) { PsiElement element = myElements[myIndex]; PsiElement navigationElement = element.getNavigationElement(); PsiFile file = getContainingFile(navigationElement); if (file == null) return; VirtualFile virtualFile = file.getVirtualFile(); if (virtualFile == null) return; Project project = element.getProject(); FileEditorManagerEx fileEditorManager = FileEditorManagerEx.getInstanceEx(project); OpenFileDescriptor descriptor = new OpenFileDescriptor(project, virtualFile, navigationElement.getTextOffset()); fileEditorManager.openTextEditor(descriptor, myFocusEditor); } } private class ShowFindUsagesAction extends AnAction { private static final String ACTION_NAME = "Show in usage view"; public ShowFindUsagesAction() { super(ACTION_NAME, ACTION_NAME, FIND_ICON); } @Override public void actionPerformed(final AnActionEvent e) { final UsageViewPresentation presentation = new UsageViewPresentation(); presentation.setCodeUsagesString(myTitle); presentation.setTabName(myTitle); presentation.setTabText(myTitle); PsiElement[] elements = collectNonBinaryElements(); final UsageInfo[] usages = new UsageInfo[elements.length]; for (int i = 0; i < elements.length; i++) { usages[i] = new UsageInfo(elements[i]); } UsageViewManager.getInstance(myEditor.getProject()).showUsages(UsageTarget.EMPTY_ARRAY, UsageInfoToUsageConverter.convert( new UsageInfoToUsageConverter.TargetElementsDescriptor(elements), usages), presentation); if (myHint.isVisible()) { myHint.cancel(); } } @Override public void update(AnActionEvent e) { e.getPresentation().setVisible(collectNonBinaryElements().length > 0); } } private PsiElement[] collectNonBinaryElements() { List<PsiElement> result = new ArrayList<PsiElement>(); for (PsiElement element : myElements) { if (!(element instanceof PsiBinaryFile)) { result.add(element); } } return result.toArray(new PsiElement[result.size()]); } }
/* * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://duracloud.org/license/ */ package org.duracloud.sync.config; import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertFalse; import static junit.framework.Assert.assertNotNull; import static junit.framework.Assert.assertNull; import static junit.framework.Assert.assertTrue; import static junit.framework.Assert.fail; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import org.apache.commons.cli.ParseException; import org.duracloud.common.util.ConsolePrompt; import org.easymock.EasyMock; import org.junit.After; import org.junit.Before; import org.junit.Test; /** * @author: Bill Branan * Date: Mar 25, 2010 */ public class SyncToolConfigParserTest { SyncToolConfigParser syncConfigParser; File tempDir; private String expectedPassword = "password"; @Before public void setUp() throws Exception { syncConfigParser = createSyncToolParser(); tempDir = new File(System.getProperty("java.io.tmpdir")); } @After public void tearDown() throws Exception { File backupFile = new File(tempDir, SyncToolConfigParser.BACKUP_FILE_NAME); if (backupFile.exists()) { backupFile.delete(); } File prevBackupFile = new File(tempDir, SyncToolConfigParser.PREV_BACKUP_FILE_NAME); if (prevBackupFile.exists()) { prevBackupFile.delete(); } } @Test public void testPasswordPrompt() throws Exception { syncConfigParser = new SyncToolConfigParser() { protected ConsolePrompt getConsole() { ConsolePrompt console = EasyMock.createMock(ConsolePrompt.class); char[] charPass = {'p', 'r', 'o', 'm', 'p', 't', 'P', 'a', 's', 's', 'w', 'o', 'r', 'd'}; EasyMock.expect(console.readPassword("DuraCloud password: ")).andReturn(charPass); EasyMock.replay(console); return console; } }; expectedPassword = "promptPassword"; testStandardOptions(); } @Test public void testStandardOptions() throws Exception { HashMap<String, String> argsMap = getArgsMap(); // Process configs, make sure values match SyncToolConfig syncConfig = syncConfigParser.processStandardOptions(mapToArray(argsMap)); checkStandardOptions(argsMap, syncConfig); // Remove optional params argsMap.remove("-f"); argsMap.remove("-r"); argsMap.remove("-i"); argsMap.remove("-t"); argsMap.remove("-m"); argsMap.remove("-d"); argsMap.remove("-l"); argsMap.remove("-x"); argsMap.remove("-w"); argsMap.remove("-a"); argsMap.remove("-e"); argsMap.remove("-n"); argsMap.remove("-o"); argsMap.remove("-j"); // Process configs, make sure optional params are set to defaults syncConfig = syncConfigParser.processStandardOptions(mapToArray(argsMap)); assertEquals(SyncToolConfigParser.DEFAULT_POLL_FREQUENCY, syncConfig.getPollFrequency()); assertEquals(SyncToolConfigParser.DEFAULT_PORT, syncConfig.getPort()); assertNull(syncConfig.getStoreId()); assertEquals(SyncToolConfigParser.DEFAULT_NUM_THREADS, syncConfig.getNumThreads()); assertEquals(SyncToolConfigParser.DEFAULT_MAX_FILE_SIZE * SyncToolConfigParser.GIGABYTE, syncConfig.getMaxFileSize()); assertEquals(false, syncConfig.syncDeletes()); assertEquals(false, syncConfig.isCleanStart()); assertEquals(false, syncConfig.exitOnCompletion()); assertEquals(expectedPassword, syncConfig.getPassword()); assertNull(syncConfig.getWorkDir()); assertNull(syncConfig.getPrefix()); assertNull(syncConfig.getExcludeList()); assertFalse(syncConfig.isRenameUpdates()); assertTrue(syncConfig.isSyncUpdates()); assertFalse(syncConfig.isJumpStart()); // Make sure error is thrown on missing required params for (String arg : argsMap.keySet()) { String failMsg = "An exception should have been thrown due to " + "missing arg: " + arg; removeArgFailTest(argsMap, arg, failMsg); } // Make sure error is thrown when numerical args are not numerical String failMsg = "Frequency arg should require a numerical value"; addArgFailTest(argsMap, "-f", "nonNum", failMsg); failMsg = "Port arg should require a numerical value"; addArgFailTest(argsMap, "-r", "nonNum", failMsg); failMsg = "Threads arg should require a numerical value"; addArgFailTest(argsMap, "-t", "nonNum", failMsg); failMsg = "Max file size arg should require a numerical value"; addArgFailTest(argsMap, "-m", "nonNum", failMsg); failMsg = "Max file size arg should be between 1 and 5"; addArgFailTest(argsMap, "-m", "0", failMsg); addArgFailTest(argsMap, "-m", "6", failMsg); } private HashMap<String, String> getArgsMap() { HashMap<String, String> argsMap = new HashMap<String, String>(); argsMap.put("-w", tempDir.getAbsolutePath()); argsMap.put("-f", "1000"); argsMap.put("-h", "localhost"); argsMap.put("-r", "8088"); argsMap.put("-i", "0"); argsMap.put("-c", tempDir.getAbsolutePath()); argsMap.put("-t", "5"); argsMap.put("-u", "user"); argsMap.put("-s", "mySpace"); argsMap.put("-m", "2"); argsMap.put("-d", ""); argsMap.put("-l", ""); argsMap.put("-x", ""); argsMap.put("-a", "prefix/"); argsMap.put("-j", ""); return argsMap; } private void checkStandardOptions(HashMap<String, String> argsMap, SyncToolConfig syncConfig) { assertEquals(argsMap.get("-w"), syncConfig.getWorkDir().getAbsolutePath()); assertEquals(argsMap.get("-f"), String.valueOf(syncConfig.getPollFrequency())); assertEquals(argsMap.get("-h"), syncConfig.getHost()); assertEquals(argsMap.get("-r"), String.valueOf(syncConfig.getPort())); assertEquals(argsMap.get("-i"), syncConfig.getStoreId()); assertEquals(argsMap.get("-c"), syncConfig.getContentDirs().get(0).getAbsolutePath()); assertEquals(argsMap.get("-t"), String.valueOf(syncConfig.getNumThreads())); assertEquals(argsMap.get("-u"), syncConfig.getUsername()); assertEquals(argsMap.get("-s"), syncConfig.getSpaceId()); assertEquals(argsMap.get("-m"), String.valueOf(syncConfig.getMaxFileSize() / SyncToolConfigParser.GIGABYTE)); assertEquals(argsMap.get("-a"), syncConfig.getPrefix()); assertEquals(true, syncConfig.syncDeletes()); assertEquals(true, syncConfig.isCleanStart()); assertEquals(true, syncConfig.exitOnCompletion()); } private String[] mapToArray(HashMap<String, String> map) { ArrayList<String> list = new ArrayList<String>(); for (String key : map.keySet()) { list.add(key); list.add(map.get(key)); } return list.toArray(new String[0]); } private void addArgFailTest(HashMap<String, String> argsMap, String arg, String value, String failMsg) { HashMap<String, String> cloneMap = (HashMap<String, String>) argsMap.clone(); cloneMap.put(arg, value); try { syncConfigParser.processStandardOptions(mapToArray(cloneMap)); fail(failMsg); } catch (ParseException e) { assertNotNull(e); } } private void removeArgFailTest(HashMap<String, String> argsMap, String arg, String failMsg) { HashMap<String, String> cloneMap = (HashMap<String, String>) argsMap.clone(); cloneMap.remove(arg); try { syncConfigParser.processStandardOptions(mapToArray(cloneMap)); fail(failMsg); } catch (ParseException e) { assertNotNull(e); } } @Test public void testBackupRestore() throws Exception { String[] testArgs = {"-a", "b", "-c", "d", "e", "f", "-g", "-h", "i"}; syncConfigParser.backupConfig(tempDir, testArgs); File backupFile = getBackupFile(); String[] retrieveArgs = syncConfigParser.retrieveConfig(backupFile); compareArrays(testArgs, retrieveArgs); } @Test public void testPrevBackupFile() throws Exception { HashMap<String, String> argsMap = getArgsMap(); String[] args = mapToArray(argsMap); // First backup syncConfigParser.backupConfig(tempDir, args); File backupFile = getBackupFile(); String[] retrieveArgs = syncConfigParser.retrieveConfig(backupFile); compareArrays(args, retrieveArgs); HashMap<String, String> newArgsMap = (HashMap<String, String>) argsMap.clone(); newArgsMap.put("-z", "new"); String[] newArgs = mapToArray(newArgsMap); // Second backup syncConfigParser.backupConfig(tempDir, newArgs); // Check config file (should be new args) backupFile = getBackupFile(); retrieveArgs = syncConfigParser.retrieveConfig(backupFile); compareArrays(newArgs, retrieveArgs); // Check previous config backup (should be old args) backupFile = getPrevBackupFile(); retrieveArgs = syncConfigParser.retrieveConfig(backupFile); compareArrays(args, retrieveArgs); } private File getBackupFile() { File backupFile = new File(tempDir, SyncToolConfigParser.BACKUP_FILE_NAME); assertTrue(backupFile.exists()); return backupFile; } private File getPrevBackupFile() { File prevBackupFile = new File(tempDir, SyncToolConfigParser.PREV_BACKUP_FILE_NAME); assertTrue(prevBackupFile.exists()); return prevBackupFile; } private void compareArrays(String[] arr1, String[] arr2) { assertEquals(arr1.length, arr2.length); for (int i = 0; i < arr1.length; i++) { assertTrue(arr1[i].equals(arr2[i])); } } @Test public void testConfigFileOptions() throws Exception { HashMap<String, String> argsMap = getArgsMap(); // Process standard options String[] args = mapToArray(argsMap); SyncToolConfig syncConfig = syncConfigParser.processStandardOptions(args); // Create config backup file syncConfigParser.backupConfig(syncConfig.getWorkDir(), args); File backupFile = getBackupFile(); // Create arg map including only -g option, pointing to config file argsMap = new HashMap<String, String>(); argsMap.put("-g", backupFile.getAbsolutePath()); // Process using config file syncConfigParser = createSyncToolParser(); syncConfig = syncConfigParser.processConfigFileOptions(mapToArray(argsMap)); checkStandardOptions(getArgsMap(), syncConfig); } private SyncToolConfigParser createSyncToolParser() { return new SyncToolConfigParser() { protected String getPasswordEnvVariable() { return "password"; } }; } @Test public void testInvalidCombinations() { HashMap<String, String> argsMap = getArgsMap(); argsMap.put("-n", ""); argsMap.put("-o", ""); verifyInvalidCombos(argsMap); argsMap = getArgsMap(); argsMap.put("-n", ""); argsMap.put("-d", ""); verifyInvalidCombos(argsMap); argsMap = getArgsMap(); argsMap.put("-n", ""); argsMap.put("-j", ""); verifyInvalidCombos(argsMap); argsMap = getArgsMap(); argsMap.put("-o", ""); argsMap.put("-j", ""); verifyInvalidCombos(argsMap); } private void verifyInvalidCombos(HashMap<String, String> argsMap) { try { syncConfigParser.processStandardOptions(mapToArray(argsMap)); fail("Should fail when attempting to parse invalid arg combination"); } catch (ParseException e) { assertNotNull(e.getMessage()); } } }
/* * #%L * Protempa Test Suite * %% * Copyright (C) 2012 - 2013 Emory University * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.protempa.test; import java.sql.Connection; import java.sql.Date; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.SQLException; import java.sql.Statement; import java.sql.Timestamp; import java.util.ArrayList; import java.util.List; import java.util.stream.Stream; /** * Inserts sample data into a database * * * @author Himanshu Rathod, Michel Mansour * */ final class DataInserter { private static final String PATIENT = "patient"; private static final String ENCOUNTER = "encounter"; private static final String PROVIDER = "provider"; private static final String ICD9D = "icd9d_event"; private static final String ICD9P = "icd9p_event"; private static final String LABS = "labs_event"; private static final String MEDS = "meds_event"; private static final String VITALS = "vitals_event"; private static final String SCHEMA = "TEST"; private static final String TABLES[] = new String[]{PATIENT, ENCOUNTER, PROVIDER, ICD9D, ICD9P, LABS, MEDS, VITALS}; private final Connection connection; /** * Initializes with a database driver. * * @param connectionString the connection string describing the connection * * @throws SQLException Thrown if there are any JDBC errors. */ DataInserter(String connectionString) throws SQLException { this.connection = DriverManager.getConnection(connectionString); this.connection.setAutoCommit(false); } /** * Closes the connection. Must be called when data insertion is complete. * * @throws SQLException */ void close() throws SQLException { this.connection.close(); } /** * Truncate all the tables that we will be inserting into later. * * @throws SQLException Thrown if there are any JDBC errors. */ void truncateTables() throws SQLException { final List<String> sqlStatements = new ArrayList<>(); for (String table : TABLES) { sqlStatements.add("truncate table " + table); } sqlStatements.add("drop schema " + SCHEMA); try { for (String sql : sqlStatements) { try (Statement statement = this.connection.createStatement()) { statement.executeUpdate(sql); } } this.connection.commit(); } catch (SQLException ex) { try { this.connection.rollback(); } catch (SQLException ignore) {} throw ex; } } /** * Insert the given stream of ICD9 diagnosis codes to a target database using * the given connection. * * @param diagnoses The diagnosis codes to insert. * @throws SQLException Thrown if there are any JDBC errors. */ void insertIcd9Diagnoses(Stream<Icd9Diagnosis> diagnoses) throws SQLException { this.insertObservations(diagnoses, "icd9d_event"); } /** * Insert the given stream of ICD9 procedure codes to a target database using * the given connection. * * @param procedures The procedure codes to insert. * @throws SQLException Thrown if there are any JDBC errors. */ void insertIcd9Procedures(Stream<Icd9Procedure> procedures) throws SQLException { this.insertObservations(procedures, "icd9p_event"); } /** * Insert the given stream of medications to a target database using the given * connection. * * @param medications The medications to insert. * @throws SQLException Thrown if there are any JDBC errors. */ void insertMedications(Stream<Medication> medications) throws SQLException { this.insertObservations(medications, "meds_event"); } /** * Insert the given stream of lab results to a target database using the given * connection. * * @param labs The lab results to insert. * @throws SQLException Thrown if there are any JDBC errors. */ void insertLabs(Stream<Lab> labs) throws SQLException { this.insertObservationsWithResult(labs, "labs_event"); } /** * Insert the given list of vital signs to a target database using the given * connection. * * @param vitals The vitals to insert. * @throws SQLException Thrown if there are any JDBC errors. */ void insertVitals(Stream<Vital> vitals) throws SQLException { this.insertObservationsWithResult(vitals, "vitals_event"); } /** * Add the given list of observation objects to a target database using the * given connection. * * @param observations The list of observations to insert. * @param table The table in which the observations should be inserted. * @throws SQLException Thrown if there are any JDBC errors. */ private <T extends Observation> void insertObservations(Stream<T> observations, String table) throws SQLException { int counter = 0; StringBuilder sqlBuilder = new StringBuilder(); sqlBuilder.append("insert into ").append(table) .append(" values (?,?,?,?,?,?,?)"); try (PreparedStatement preparedStatement = connection .prepareStatement(sqlBuilder.toString())) { for (T observation : (Iterable<T>) observations::iterator) { preparedStatement.setString(1, observation.getId()); preparedStatement.setLong(2, observation.getEncounterId() .longValue()); preparedStatement.setTimestamp(3, new Timestamp(observation .getTimestamp().getTime())); preparedStatement.setString(4, observation.getEntityId()); preparedStatement.setTimestamp(5, toTimestamp(observation.getCreateDate())); preparedStatement.setTimestamp(6, toTimestamp(observation.getUpdateDate())); preparedStatement.setTimestamp(7, toTimestamp(observation.getDeleteDate())); preparedStatement.addBatch(); counter++; if (counter >= 128) { preparedStatement.executeBatch(); connection.commit(); preparedStatement.clearBatch(); counter = 0; } } preparedStatement.executeBatch(); connection.commit(); preparedStatement.clearBatch(); } catch (SQLException ex) { try { connection.rollback(); } catch (SQLException ignore) { } throw ex; } } /** * Insert a stream of observations and their related results to a target * database using the given connection. * * @param obsx The observations to insert. * @param table The table in which the observations should be inserted. * @throws SQLException Thrown if there are any JDBC errors. */ private <T extends ObservationWithResult> void insertObservationsWithResult( Stream<T> obsx, String table) throws SQLException { int counter = 0; StringBuilder sqlBuilder = new StringBuilder(); sqlBuilder.append("insert into ").append(table) .append(" values (?,?,?,?,?,?,?,?,?,?,?)"); try (PreparedStatement preparedStmt = connection .prepareStatement(sqlBuilder.toString())) { for (T obx : (Iterable<T>) obsx::iterator) { preparedStmt.setString(1, obx.getId()); preparedStmt.setLong(2, obx.getEncounterId() .longValue()); preparedStmt.setTimestamp(3, new Timestamp(obx .getTimestamp().getTime())); preparedStmt.setString(4, obx.getEntityId()); preparedStmt.setString(5, obx.getResultAsStr()); preparedStmt.setDouble(6, obx.getResultAsNum() .doubleValue()); preparedStmt.setString(7, obx.getUnits()); preparedStmt.setString(8, obx.getFlag()); preparedStmt.setTimestamp(9, toTimestamp(obx.getCreateDate())); preparedStmt.setTimestamp(10, toTimestamp(obx.getUpdateDate())); preparedStmt.setTimestamp(11, toTimestamp(obx.getDeleteDate())); preparedStmt.addBatch(); counter++; if (counter >= 128) { preparedStmt.executeBatch(); connection.commit(); preparedStmt.clearBatch(); counter = 0; } } preparedStmt.executeBatch(); connection.commit(); preparedStmt.clearBatch(); } catch (SQLException ex) { try { connection.rollback(); } catch (SQLException ignore) { } throw ex; } } /** * Insert a stream of patients to the data base using the given connection. * * @param patients The stream of patients to insert. * @throws SQLException Thrown if there are any JDBC errors. */ void insertPatients(Stream<Patient> patients) throws SQLException { int counter = 0; try (PreparedStatement preparedStatement = connection .prepareStatement("insert into patient values (?,?,?,?,?,?,?,?,?,?,?)")) { for (Patient patient : (Iterable<Patient>) patients::iterator) { Date dateOfBirth; if (patient.getDateOfBirth() == null) { dateOfBirth = null; } else { dateOfBirth = new Date(patient.getDateOfBirth().getTime()); } preparedStatement.setLong(1, patient.getId().longValue()); preparedStatement.setString(2, patient.getFirstName()); preparedStatement.setString(3, patient.getLastName()); preparedStatement.setDate(4, dateOfBirth); preparedStatement.setString(5, patient.getLanguage()); preparedStatement.setString(6, patient.getMaritalStatus()); preparedStatement.setString(7, patient.getRace()); preparedStatement.setString(8, patient.getGender()); preparedStatement.setTimestamp(9, toTimestamp(patient.getCreateDate())); preparedStatement.setTimestamp(10, toTimestamp(patient.getUpdateDate())); preparedStatement.setTimestamp(11, toTimestamp(patient.getDeleteDate())); preparedStatement.addBatch(); counter++; if (counter >= 128) { preparedStatement.executeBatch(); connection.commit(); preparedStatement.clearBatch(); counter = 0; } } preparedStatement.executeBatch(); connection.commit(); preparedStatement.clearBatch(); } catch (SQLException ex) { try { connection.rollback(); } catch (SQLException ignore) { } throw ex; } } /** * Insert the given stream of encounters to a target database using the given * connection. * * @param encounters The stream of encounters to insert. * @throws SQLException Thrown if there are any JDBC errors. */ void insertEncounters(Stream<Encounter> encounters) throws SQLException { int counter = 0; try (PreparedStatement preparedStatement = connection .prepareStatement("insert into encounter values (?,?,?,?,?,?,?,?,?,?)")) { for (Encounter encounter : (Iterable<Encounter>) encounters::iterator) { preparedStatement.setLong(1, encounter.getId().longValue()); preparedStatement.setLong(2, encounter.getPatientId().longValue()); preparedStatement.setLong(3, encounter.getProviderId().longValue()); preparedStatement.setTimestamp(4, new Timestamp(encounter .getStart().getTime())); preparedStatement.setTimestamp(5, new Timestamp(encounter.getEnd() .getTime())); preparedStatement.setString(6, encounter.getType()); preparedStatement.setString(7, encounter.getDischargeDisposition()); preparedStatement.setTimestamp(8, toTimestamp(encounter.getCreateDate())); preparedStatement.setTimestamp(9, toTimestamp(encounter.getUpdateDate())); preparedStatement.setTimestamp(10, toTimestamp(encounter.getDeleteDate())); preparedStatement.addBatch(); counter++; if (counter >= 128) { preparedStatement.executeBatch(); connection.commit(); preparedStatement.clearBatch(); counter = 0; } } preparedStatement.executeBatch(); connection.commit(); preparedStatement.clearBatch(); } catch (SQLException ex) { try { connection.rollback(); } catch (SQLException ignore) { } throw ex; } } /** * Insert the given stream of providers to a target database using the given * connection. * * @param providers The stream of providers to insert. * @throws SQLException Thrown if there are any JDBC errors. */ void insertProviders(Stream<Provider> providers) throws SQLException { int counter = 0; try (PreparedStatement preparedStatement = connection .prepareStatement("insert into provider values (?,?,?,?,?,?)")) { for (Provider provider : (Iterable<Provider>) providers::iterator) { preparedStatement.setLong(1, provider.getId().longValue()); preparedStatement.setString(2, provider.getFirstName()); preparedStatement.setString(3, provider.getLastName()); preparedStatement.setTimestamp(4, toTimestamp(provider.getCreateDate())); preparedStatement.setTimestamp(5, toTimestamp(provider.getUpdateDate())); preparedStatement.setTimestamp(6, toTimestamp(provider.getDeleteDate())); preparedStatement.addBatch(); counter++; if (counter >= 128) { preparedStatement.executeBatch(); connection.commit(); preparedStatement.clearBatch(); counter = 0; } } preparedStatement.executeBatch(); connection.commit(); preparedStatement.clearBatch(); } catch (SQLException ex) { try { connection.rollback(); } catch (SQLException ignore) { } throw ex; } } private static Timestamp toTimestamp(java.util.Date date) { if (date != null) { return new Timestamp(date.getTime()); } else { return null; } } }
package com.bitdubai.fermat_dmp_plugin.layer.world.blockchain_info.developer.bitdubai.version_1.structure.api_v_1.blockexplorer; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import com.bitdubai.fermat_dmp_plugin.layer.world.blockchain_info.developer.bitdubai.version_1.structure.api_v_1.APIException; import com.bitdubai.fermat_dmp_plugin.layer.world.blockchain_info.developer.bitdubai.version_1.structure.api_v_1.HttpClient; /** * The BlockExplorer class reflects the functionality documented at * https://blockchain.info/api/blockchain_api. It can be used to query the block chain, * fetch block, transaction and address data, get unspent outputs for an address etc. * */ public class BlockExplorer { private String apiCode; private String testnet = "true"; public BlockExplorer() { this(null); } /** * * @param apiCode Blockchain.info API code (optional, nullable) */ public BlockExplorer(String apiCode) { this.apiCode = apiCode; } /** * Gets a single transaction based on a transaction index. * @param txIndex Transaction index * @return An instance of the {@link Transaction} class * @throws APIException If the server returns an error * @throws IOException */ public Transaction getTransaction(long txIndex) throws APIException, IOException { return getTransaction(String.valueOf(txIndex)); } /** * Gets a single transaction based on a transaction hash. * @param txHash Transaction hash * @return An instance of the {@link Transaction} class * @throws APIException If the server returns an error * @throws IOException */ public Transaction getTransaction(String txHash) throws APIException, IOException { String response = HttpClient.get("rawtx/" + txHash + "?api_code=" + apiCode, null); JsonObject txJson = new JsonParser().parse(response).getAsJsonObject(); return new Transaction(txJson); } /** * Gets a single block based on a block index. * @param blockIndex Block index * @return An instance of the {@link Block} class * @throws APIException If the server returns an error * @throws IOException */ public Block getBlock(long blockIndex) throws APIException, IOException { return getBlock(String.valueOf(blockIndex)); } /** * Gets a single block based on a block hash. * @param blockHash Block hash * @return An instance of the {@link Block} class * @throws APIException If the server returns an error * @throws IOException */ public Block getBlock(String blockHash) throws APIException, IOException { String response = HttpClient.get("rawblock/" + blockHash + "?testnet=true&api_code=" + apiCode, null); JsonObject blockJson = new JsonParser().parse(response).getAsJsonObject(); return new Block(blockJson); } /** * Gets data for a single address. * @param address Base58check or hash160 address string * @return An instance of the {@link Address} class * @throws APIException If the server returns an error * @throws IOException */ public Address getAddress(String address) throws APIException, IOException { String response = HttpClient.get("rawaddr/" + address + "?testnet=true&api_code=" + apiCode, null); JsonObject addrJson = new JsonParser().parse(response).getAsJsonObject(); return new Address(addrJson); } /** * Gets a list of blocks at the specified height. Normally, only one block will be * returned, but in case of a chain fork, multiple blocks may be present. * @param height Block height * @return A list of blocks at the specified height * @throws APIException If the server returns an error * @throws IOException */ public List<Block> getBlocksAtHeight(long height) throws APIException, IOException { List<Block> blocks = new ArrayList<Block>(); Map<String, String> params = new HashMap<String, String>(); params.put("format", "json"); if (apiCode != null) params.put("api_code", apiCode); String response = HttpClient.get("block-height/" + height, params); JsonObject blocksJson = new JsonParser().parse(response).getAsJsonObject(); for (JsonElement blockElem : blocksJson.get("blocks").getAsJsonArray()) { blocks.add(new Block(blockElem.getAsJsonObject())); } return blocks; } /** * Gets unspent outputs for a single address. * @param address Base58check or hash160 address string * @return A list of unspent outputs for the specified address * @throws APIException If the server returns an error * @throws IOException */ public List<UnspentOutput> getUnspentOutputs(String address) throws APIException, IOException { List<UnspentOutput> outputs = new ArrayList<UnspentOutput>(); Map<String, String> params = new HashMap<String, String>(); params.put("active", address); if (apiCode != null) params.put("api_code", apiCode); String response = null; try { response = HttpClient.get("unspent", params); } catch (APIException e) { // the API isn't supposed to return an error code here. No free outputs is // a legitimate situation. We are circumventing that by returning an empty list if (e.getMessage().equals("No free outputs to spend")) return outputs; else throw e; } JsonObject outsJson = new JsonParser().parse(response).getAsJsonObject(); for (JsonElement outElem : outsJson.get("unspent_outputs").getAsJsonArray()) { outputs.add(new UnspentOutput(outElem.getAsJsonObject())); } return outputs; } /** * Gets the latest block on the main chain (simplified representation). * @return An instance of the {@link LatestBlock} class * @throws APIException If the server returns an error * @throws IOException */ public LatestBlock getLatestBlock() throws APIException, IOException { String response = HttpClient.get("latestblock?api_code=" + apiCode, null); JsonObject blockObj = new JsonParser().parse(response).getAsJsonObject(); return new LatestBlock(blockObj); } /** * Gets a list of currently unconfirmed transactions. * @return A list of unconfirmed {@link Transaction} objects * @throws APIException If the server returns an error * @throws IOException */ public List<Transaction> getUnconfirmedTransactions() throws APIException, IOException { List<Transaction> transactions = new ArrayList<Transaction>(); Map<String, String> params = new HashMap<String, String>(); params.put("format", "json"); if (apiCode != null) params.put("api_code", apiCode); String response = HttpClient.get("unconfirmed-transactions", params); JsonObject txList = new JsonParser().parse(response).getAsJsonObject(); for (JsonElement txElem : txList.get("txs").getAsJsonArray()) { JsonObject txObj = txElem.getAsJsonObject(); transactions.add(new Transaction(txObj, -1, txObj.get("double_spend").getAsBoolean())); } return transactions; } /** * Gets a list of blocks mined today by all pools since 00:00 UTC. * @return A list of {@link SimpleBlock} objects * @throws APIException APIException If the server returns an error * @throws IOException */ public List<SimpleBlock> getBlocks() throws APIException, IOException { return getBlocks(null); } /** * Gets a list of blocks mined on a specific day. * @param timestamp Unix timestamp (without milliseconds) that falls between * 00:00 UTC and 23:59 UTC of the desired day. * @return A list of {@link SimpleBlock} objects * @throws APIException * @throws IOException */ public List<SimpleBlock> getBlocks(long timestamp) throws APIException, IOException { return getBlocks(String.valueOf(timestamp * 1000)); } /** * Gets a list of recent blocks by a specific mining pool. * @param poolName Name of the mining pool * @return A list of {@link SimpleBlock} objects * @throws APIException If the server returns an error * @throws IOException */ public List<SimpleBlock> getBlocks(String poolName) throws APIException, IOException { List<SimpleBlock> blocks = new ArrayList<SimpleBlock>(); poolName = poolName == null ? "" : poolName; Map<String, String> params = new HashMap<String, String>(); params.put("format", "json"); if (apiCode != null) params.put("api_code", apiCode); String response = HttpClient.get("blocks/" + poolName, params); JsonObject blockList = new JsonParser().parse(response).getAsJsonObject(); for (JsonElement blockElem : blockList.get("blocks").getAsJsonArray()) { blocks.add(new SimpleBlock(blockElem.getAsJsonObject())); } return blocks; } /** * Gets inventory data for an object. * @param hash Object hash * @return An instance of the {@link InventoryData} class * @throws APIException If the server returns an error * @throws IOException */ public InventoryData getInventoryData(String hash) throws APIException, IOException { Map<String, String> params = new HashMap<String, String>(); params.put("format", "json"); if (apiCode != null) params.put("api_code", apiCode); String response = HttpClient.get("inv/" + hash, params); JsonObject invObj = new JsonParser().parse(response).getAsJsonObject(); return new InventoryData(invObj); } }
/* * Copyright 2014 Guidewire Software, Inc. */ package gw.internal.gosu.runtime; import gw.config.CommonServices; import gw.internal.gosu.ir.transform.AbstractElementTransformer; import gw.internal.gosu.parser.IGosuClassInternal; import gw.internal.gosu.parser.TypeLord; import gw.lang.function.IBlock; import gw.lang.parser.StandardCoercionManager; import gw.lang.reflect.IConstructorInfo; import gw.lang.reflect.IExpando; import gw.lang.reflect.IMethodInfo; import gw.lang.reflect.IPlaceholder; import gw.lang.reflect.IPropertyInfo; import gw.lang.reflect.IRelativeTypeInfo; import gw.lang.reflect.IType; import gw.lang.reflect.ITypeInfo; import gw.lang.reflect.ReflectUtil; import gw.lang.reflect.TypeSystem; import gw.lang.reflect.gs.IGosuClass; import gw.lang.reflect.java.IJavaType; import gw.lang.reflect.java.JavaTypes; import gw.util.GosuExceptionUtil; import javax.script.Bindings; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; public class GosuRuntimeMethods { public static Object getProperty( Object root, IType type, String propertyName ) { if( root instanceof Bindings ) { return ((Bindings)root).get( propertyName ); } if( isDynamic( type ) ) { type = TypeSystem.getFromObject( root ); } Object ret = invokePropertyGetter( "$getProperty", root, type, propertyName ); if( ret != IPlaceholder.UNHANDLED ) { return ret; } IPropertyInfo propertyInfo = getPropertyInfo( root, type, propertyName ); if( propertyInfo == null ) { ret = invokePropertyGetter( "$getMissingProperty", root, type, propertyName ); if( ret == IPlaceholder.UNHANDLED ) { throw new IllegalArgumentException( "No property named " + propertyName + " found on type " + type.getName() ); } return ret; } return propertyInfo.getAccessor().getValue( root ); } private static boolean isDynamic( IType type ) { return type != null && (type.isDynamic() || (type instanceof IGosuClass && ((IGosuClass)type).isStructure())); } private static Object invokePropertyGetter( String dispatchName, Object root, IType type, String propertyName ) { ITypeInfo typeInfo = type.getTypeInfo(); IMethodInfo method; if( typeInfo instanceof IRelativeTypeInfo ) { method = ((IRelativeTypeInfo) typeInfo).getMethod( type, dispatchName, JavaTypes.STRING() ); } else { method = typeInfo.getMethod( dispatchName, JavaTypes.STRING() ); } return method == null ? IPlaceholder.UNHANDLED : method.getCallHandler().handleCall( root, propertyName ); } public static Object getPropertyDynamically(Object rootObject, String propertyName) { if (rootObject == null) { throw new NullPointerException(); } return getProperty(rootObject, TypeSystem.getFromObject(rootObject), propertyName); } public static void setProperty( Object root, IType type, String propertyName, Object value ) { if( root instanceof Bindings ) { ((Bindings)root).put( propertyName, value ); return; } if( isDynamic( type ) ) { type = TypeSystem.getFromObject( root ); } Object ret = invokePropertySetter( "$setProperty", root, type, propertyName ); if( ret != IPlaceholder.UNHANDLED ) { return; } IPropertyInfo propertyInfo = getPropertyInfo( root, type, propertyName ); if( propertyInfo == null ) { ret = invokePropertySetter( "$setMissingProperty", root, type, propertyName ); if( ret == IPlaceholder.UNHANDLED ) { throw new IllegalArgumentException( "No property named " + propertyName + " found on type " + type.getName() ); } return; } propertyInfo.getAccessor().setValue( root, value ); } private static Object invokePropertySetter( String dispatchName, Object root, IType type, String propertyName, Object... args ) { ITypeInfo typeInfo = type.getTypeInfo(); IMethodInfo method; if( typeInfo instanceof IRelativeTypeInfo ) { method = ((IRelativeTypeInfo) typeInfo).getMethod( type, dispatchName, JavaTypes.STRING(), JavaTypes.OBJECT() ); } else { method = typeInfo.getMethod( dispatchName, JavaTypes.STRING(), JavaTypes.OBJECT() ); } return method == null ? IPlaceholder.UNHANDLED : method.getCallHandler().handleCall( root, propertyName, args ); } public static void setPropertyDynamically(Object rootObject, String propertyName, Object value) { if (rootObject == null) { throw new NullPointerException(); } setProperty(rootObject, TypeSystem.getFromObject(rootObject), propertyName, value); } private static IPropertyInfo getPropertyInfo( Object rootObject, IType type, String propertyName ) { IPropertyInfo propertyInfo = ReflectUtil.findProperty( type, propertyName ); if( propertyInfo == null ) { propertyInfo = ReflectUtil.findProperty( TypeSystem.getFromObject( rootObject ), propertyName ); if( propertyInfo == null ) { return null; } } return propertyInfo; } public static Object initMultiArray( IType componentType, Object instance, int iDimension, int[] sizes ) { if( sizes.length <= iDimension-1 ) { return instance; } int iLength = componentType.getArrayLength( instance ); componentType = componentType.getComponentType(); for( int i = 0; i < iLength; i++ ) { Object component = componentType.makeArrayInstance( sizes[iDimension-1] ); initMultiArray( componentType, component, iDimension + 1, sizes ); componentType.setArrayComponent( instance, i, component ); } return instance; } public static IType getType( Object obj ) { return TypeSystem.get( obj.getClass() ); } public static Object newInstance( IType type, Object ctx, Object[] args ) { ITypeInfo typeInfo = type.getTypeInfo(); IConstructorInfo method; IType[] runtimeTypes = ReflectUtil.extractRuntimeTypes( args ); method = typeInfo.getCallableConstructor( runtimeTypes ); args = ReflectUtil.coerceArgsIfNecessary( method.getParameters(), args ); args = maybeAddOuter( type, ctx, args ); return method.getConstructor().newInstance( args ); } private static Object[] maybeAddOuter( IType type, Object ctx, Object[] args ) { if( ctx == null ) { return args; } if( type instanceof IGosuClassInternal && ((IGosuClassInternal)type).isStatic() ) { return args; } IType enclosingType = type.getEnclosingType(); if( enclosingType == null ) { return args; } IType outerType = TypeLord.getPureGenericType( TypeSystem.getFromObject( ctx ) ); enclosingType = TypeLord.getPureGenericType( enclosingType ); while( outerType != enclosingType ) { try { Field outerThis = ctx.getClass().getDeclaredField( "this$0" ); outerThis.setAccessible( true ); ctx = outerThis.get( ctx ); } catch( Exception e ) { return args; } outerType = TypeLord.getPureGenericType( TypeSystem.getFromObject( ctx ) ); } Object[] args2 = new Object[args.length + 1]; args2[0] = ctx; System.arraycopy( args, 0, args2, 1, args.length ); return args2; } public static Object invokeMethod( Class c, String methodName, Class[] argTypes, Object root, Object[] args ) { Method declaredMethod = AbstractElementTransformer.getDeclaredMethod( c, methodName, argTypes ); try { return declaredMethod.invoke( root, args ); } catch( IllegalAccessException e ) { throw GosuExceptionUtil.forceThrow( e ); } catch( InvocationTargetException e ) { throw GosuExceptionUtil.forceThrow( e.getTargetException() ); } } public static Object invokeMethodInfo( IType type, String methodName, IType[] parameterTypes, Object root, Object[] args ) { if( root instanceof IExpando ) { Object ret = ((IExpando)root).invoke( methodName, args ); if( ret != IPlaceholder.UNHANDLED ) { return ret; } } if( root instanceof Bindings ) { Object ret = invoke( ((Bindings)root).get( methodName ), args ); if( ret != IPlaceholder.UNHANDLED ) { return ret; } } boolean bDynamicType = isDynamic( type ); if( bDynamicType ) { type = TypeSystem.getFromObject( root ); } Object ret = invokeMethodInvoker( "$invokeMethod", root, type, methodName, args ); if( ret != IPlaceholder.UNHANDLED ) { return ret; } ITypeInfo typeInfo = type.getTypeInfo(); IMethodInfo method; if( bDynamicType ) { IType[] runtimeTypes = ReflectUtil.extractRuntimeTypes( args ); method = ReflectUtil.findCallableMethod( methodName, runtimeTypes, type ); } else { parameterTypes = replaceDynamicTypesWithRuntimeTypes( parameterTypes, args ); if( typeInfo instanceof IRelativeTypeInfo ) { method = ((IRelativeTypeInfo)typeInfo).getMethod( type, methodName, parameterTypes ); } else { method = typeInfo.getMethod( methodName, parameterTypes ); } } if( method == null ) { ret = invokeMethodInvoker( "$invokeMissingMethod", root, type, methodName, args ); if( ret == IPlaceholder.UNHANDLED ) { throw new IllegalStateException( "Could not find method for " + methodName + " on " + type.getName() + " with specified param types" ); } return ret; } if( bDynamicType ) { args = ReflectUtil.coerceArgsIfNecessary( method.getParameters(), args ); } return method.getCallHandler().handleCall( root, args ); } private static Object invoke( Object o, Object[] args ) { if( o instanceof IBlock ) { return ((IBlock)o).invokeWithArgs( args ); } return IPlaceholder.UNHANDLED; } private static IType[] replaceDynamicTypesWithRuntimeTypes( IType[] parameterTypes, Object[] args ) { if( parameterTypes == null ) { return null; } IType[] ret = null; for( int i = 0; i < parameterTypes.length; i++ ) { IType type = parameterTypes[i]; if( type instanceof IPlaceholder && ((IPlaceholder)type).isPlaceholder() ) { if( ret == null ) { ret = new IType[parameterTypes.length]; System.arraycopy( parameterTypes, 0, ret, 0, ret.length ); } ret[i] = args[i] == null ? ret[i] : TypeSystem.getFromObject( args[i] ); } } return ret == null ? parameterTypes : ret; } private static Object invokeMethodInvoker( String dispatchName, Object root, IType type, String methodName, Object... args ) { ITypeInfo typeInfo = type.getTypeInfo(); IMethodInfo method; if( typeInfo instanceof IRelativeTypeInfo ) { method = ((IRelativeTypeInfo)typeInfo).getMethod( type, dispatchName, JavaTypes.STRING(), JavaTypes.OBJECT().getArrayType() ); } else { method = typeInfo.getMethod( dispatchName, JavaTypes.STRING(), JavaTypes.OBJECT().getArrayType() ); } return method == null ? IPlaceholder.UNHANDLED : method.getCallHandler().handleCall( root, methodName, args ); } public static Class lookUpClass( String className ) { if (className.startsWith("L") && className.endsWith(";")) { className = className.substring(1, className.length() -1 ); } className = className.replaceAll("/", "."); try { return Class.forName(className, false, GosuRuntimeMethods.class.getClassLoader()); } catch( ClassNotFoundException e ) { throw GosuExceptionUtil.forceThrow( e ); } } public static void invokeLockMethod( Object o ) { if( o != null ) { IMethodInfo iMethodInfo = TypeSystem.getFromObject( o ).getTypeInfo().getMethod( "lock" ); if( iMethodInfo != null ) { iMethodInfo.getCallHandler().handleCall( o ); } } } public static IType typeof( Object o ) { IType type = TypeSystem.getFromObject( o ); if( type instanceof IJavaType && type.isGenericType() ) { // Never return a generic type resulting from Java's generic type erasure. // Instead return the "erased" or default type. type = TypeLord.getDefaultParameterizedType( type ); } return type; } public static boolean logicalNot( Object o ) { if( o instanceof Boolean ) { return !((Boolean)o).booleanValue(); } return !CommonServices.getCoercionManager().makePrimitiveBooleanFrom( o ); } public static void invokeUnlockOrDisposeOrCloseMethod( Object o ) { if( o != null ) { ITypeInfo ti = TypeSystem.getFromObject( o ).getTypeInfo(); IMethodInfo mi = ti.getMethod( "unlock" ); if( mi != null ) { mi.getCallHandler().handleCall( o ); } else { mi = ti.getMethod( "dispose" ); if( mi != null ) { mi.getCallHandler().handleCall( o ); } else { mi = ti.getMethod( "close" ); if( mi != null ) { mi.getCallHandler().handleCall( o ); } else { } } } } } @SuppressWarnings("UnusedDeclaration") public static boolean isStructurallyAssignable( IType toType, IType fromType ) { if( toType == null || fromType == null ) { return false; } //noinspection SimplifiableIfStatement if( toType.isAssignableFrom( fromType ) ) { // Nominal assignability check first return true; } return StandardCoercionManager.isStructurallyAssignable( toType, fromType ); } public static void print( Object obj ) { System.out.println( toString( obj ) ); } public static String toString( Object obj ) { if ( obj == null ) { return "null"; } if ( obj instanceof Byte ) { int value = (Byte) obj; if ( value < 0 ) { value = 256 + value; } return "0x" + Integer.toHexString( value ); } IType type = TypeSystem.getFromObject( obj ); if ( type.isArray() ) { StringBuilder sb = new StringBuilder(); sb.append( '[' ); int arrayLength = type.getArrayLength(obj); for ( int idx = 0; idx < arrayLength; idx++ ) { if ( idx > 0 ) { sb.append( ", " ); } sb.append( toString( type.getArrayComponent( obj, idx ) ) ); } sb.append( ']' ); return sb.toString(); } return obj.toString(); } public static void error( Object strError ) { System.out.println( strError ); throw new Error( String.valueOf( strError ) ); } public static IType getTypeForTypeVar( Object ref, IType iface, int iIndex ) { IType rtType = TypeSystem.getFromObject( ref ); IType rtIface = TypeSystem.findParameterizedType( rtType, iface ); return rtIface.getTypeParameters()[iIndex]; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.olingo.server.core.deserializer.batch; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.List; import org.apache.olingo.commons.api.http.HttpHeader; import org.apache.olingo.server.api.deserializer.batch.BatchDeserializerException; import org.junit.Assert; import org.junit.Test; public class BatchParserCommonTest { private static final String CRLF = "\r\n"; private static final String MULTIPART_MIXED = "multipart/mixed"; @Test public void multipleHeaders() throws Exception { final Header header = BatchParserCommon.consumeHeaders(toLineList( "Content-Id: 1" + CRLF, "Content-Id: 2" + CRLF, "content-type: Application/http" + CRLF, "content-transfer-encoding: Binary" + CRLF)); assertNotNull(header); final List<String> contentIdHeaders = header.getHeaders(HttpHeader.CONTENT_ID); assertNotNull(contentIdHeaders); assertEquals(2, contentIdHeaders.size()); assertEquals("1", contentIdHeaders.get(0)); assertEquals("2", contentIdHeaders.get(1)); } @Test public void multipleHeadersSameValue() throws Exception { final Header header = BatchParserCommon.consumeHeaders(toLineList( "Content-Id: 1" + CRLF, "Content-Id: 1" + CRLF, "content-type: Application/http" + CRLF, "content-transfer-encoding: Binary" + CRLF)); assertNotNull(header); final List<String> contentIdHeaders = header.getHeaders(HttpHeader.CONTENT_ID); assertNotNull(contentIdHeaders); assertEquals(1, contentIdHeaders.size()); assertEquals("1", contentIdHeaders.get(0)); } @Test public void headersSeparatedByComma() throws Exception { final Header header = BatchParserCommon.consumeHeaders(toLineList( "Content-Id: 1" + CRLF, "Upgrade: HTTP/2.0, SHTTP/1.3, IRC/6.9, RTA/x11" + CRLF, "content-type: Application/http" + CRLF, "content-transfer-encoding: Binary" + CRLF)); assertNotNull(header); final List<String> upgradeHeader = header.getHeaders("upgrade"); assertNotNull(upgradeHeader); assertEquals(4, upgradeHeader.size()); assertEquals("HTTP/2.0", upgradeHeader.get(0)); assertEquals("SHTTP/1.3", upgradeHeader.get(1)); assertEquals("IRC/6.9", upgradeHeader.get(2)); assertEquals("RTA/x11", upgradeHeader.get(3)); } @Test public void multipleAcceptHeaders() throws Exception { final Header header = BatchParserCommon.consumeHeaders(toLineList( "Accept: application/atomsvc+xml;q=0.8, application/json;odata=verbose;q=0.5, */*;q=0.1" + CRLF, "Accept: text/plain;q=0.3" + CRLF, "Accept-Language:en-US,en;q=0.7,en-UK;q=0.9" + CRLF, "content-type: Application/http" + CRLF, "content-transfer-encoding: Binary" + CRLF)); assertNotNull(header); final List<String> acceptHeader = header.getHeaders(HttpHeader.ACCEPT); assertNotNull(acceptHeader); assertEquals(4, acceptHeader.size()); } @Test public void multipleAcceptHeadersSameValue() throws Exception { final Header header = BatchParserCommon.consumeHeaders(toLineList( "Accept: application/atomsvc+xml;q=0.8, application/json;odata=verbose;q=0.5, */*;q=0.1" + CRLF, "Accept: application/atomsvc+xml;q=0.8" + CRLF, "Accept-Language:en-US,en;q=0.7,en-UK;q=0.9" + CRLF, "content-type: Application/http" + CRLF, "content-transfer-encoding: Binary" + CRLF)); assertNotNull(header); final List<String> acceptHeader = header.getHeaders(HttpHeader.ACCEPT); assertNotNull(acceptHeader); assertEquals(3, acceptHeader.size()); } @Test public void multipleAcceptLanguageHeaders() throws Exception { final Header header = BatchParserCommon.consumeHeaders(toLineList( "Accept-Language:en-US,en;q=0.7,en-UK;q=0.9" + CRLF, "Accept-Language: de-DE;q=0.3" + CRLF, "content-type: Application/http" + CRLF, "content-transfer-encoding: Binary" + CRLF)); assertNotNull(header); final List<String> acceptLanguageHeader = header.getHeaders(HttpHeader.ACCEPT_LANGUAGE); assertNotNull(acceptLanguageHeader); assertEquals(4, acceptLanguageHeader.size()); } @Test public void multipleAcceptLanguageHeadersSameValue() throws Exception { final Header header = BatchParserCommon.consumeHeaders(toLineList( "Accept-Language:en-US,en;q=0.7,en-UK;q=0.9" + CRLF, "Accept-Language:en-US,en;q=0.7" + CRLF, "content-type: Application/http" + CRLF, "content-transfer-encoding: Binary" + CRLF)); assertNotNull(header); final List<String> acceptLanguageHeader = header.getHeaders(HttpHeader.ACCEPT_LANGUAGE); assertNotNull(acceptLanguageHeader); assertEquals(3, acceptLanguageHeader.size()); } @Test public void headersWithSpecialNames() throws Exception { final Header header = BatchParserCommon.consumeHeaders(toLineList( "Test0123456789: 42" + CRLF, "a_b: c/d" + CRLF, "!#$%&'*+-.^_`|~: weird" + CRLF)); assertNotNull(header); assertTrue(header.exists("Test0123456789")); assertTrue(header.exists("a_b")); assertTrue(header.exists("!#$%&'*+-.^_`|~")); assertEquals("weird", header.getHeader("!#$%&'*+-.^_`|~")); } @Test public void headerWithWrongName() throws Exception { final Header header = BatchParserCommon.consumeHeaders(toLineList("a,b: c/d" + CRLF)); assertNotNull(header); assertFalse(header.iterator().hasNext()); } @Test public void boundaryParameter() throws Exception { final String boundary = "boundary"; final String contentType = MULTIPART_MIXED + "; boundary=" + boundary + " "; Assert.assertEquals(boundary, BatchParserCommon.getBoundary(contentType, 0)); } @Test public void boundaryParameterWithQuotes() throws Exception { final String boundary = "batch_1.2+34:2j)0?"; final String contentType = MULTIPART_MIXED + "; boundary=\"" + boundary + "\""; Assert.assertEquals(boundary, BatchParserCommon.getBoundary(contentType, 0)); } @Test public void boundaryParameterWithSpaces() throws Exception { final String boundary = " boundary"; final String contentType = MULTIPART_MIXED + "; boundary=\"" + boundary + "\" "; Assert.assertEquals(boundary, BatchParserCommon.getBoundary(contentType, 0)); } @Test public void invalidContentType() throws Exception { invalidBoundary("multipart;boundary=BOUNDARY", BatchDeserializerException.MessageKeys.INVALID_CONTENT_TYPE); } @Test public void contentTypeCharset() throws Exception { final String contentType = MULTIPART_MIXED + "; charset=UTF-8;boundary=" + BatchParserCommon.BOUNDARY; final String boundary = BatchParserCommon.getBoundary(contentType, 0); Assert.assertEquals(BatchParserCommon.BOUNDARY, boundary); } @Test public void withoutBoundaryParameter() throws Exception { invalidBoundary(MULTIPART_MIXED, BatchDeserializerException.MessageKeys.MISSING_BOUNDARY_DELIMITER); } @Test public void boundaryParameterWithoutQuote() throws Exception { invalidBoundary(MULTIPART_MIXED + ";boundary=batch_1740-bb:84-2f7f", BatchDeserializerException.MessageKeys.INVALID_BOUNDARY); } @Test public void boundaryEmpty() throws Exception { invalidBoundary(MULTIPART_MIXED + ";boundary=\"\"", BatchDeserializerException.MessageKeys.INVALID_BOUNDARY); } @Test public void boundarySpace() throws Exception { invalidBoundary(MULTIPART_MIXED + ";boundary=\" \"", BatchDeserializerException.MessageKeys.INVALID_BOUNDARY); } @Test public void removeEndingCRLF() { String line = "Test" + CRLF; assertEquals("Test", BatchParserCommon.removeEndingCRLF(new Line(line, 1)).toString()); } @Test public void removeLastEndingCRLF() { String line = "Test" + CRLF + CRLF; assertEquals("Test" + CRLF, BatchParserCommon.removeEndingCRLF(new Line(line, 1)).toString()); } @Test public void removeEndingCRLFWithWS() { String line = "Test" + CRLF + " "; assertEquals("Test", BatchParserCommon.removeEndingCRLF(new Line(line, 1)).toString()); } @Test public void removeEndingCRLFNothingToRemove() { String line = "Hallo" + CRLF + "Bla"; assertEquals(line, BatchParserCommon.removeEndingCRLF(new Line(line, 1)).toString()); } @Test public void removeEndingCRLFAll() { String line = CRLF; assertEquals("", BatchParserCommon.removeEndingCRLF(new Line(line, 1)).toString()); } @Test public void removeEndingCRLFSpace() { String line = CRLF + " "; assertEquals("", BatchParserCommon.removeEndingCRLF(new Line(line, 1)).toString()); } @Test public void removeLastEndingCRLFWithWS() { String line = "Test " + CRLF; assertEquals("Test ", BatchParserCommon.removeEndingCRLF(new Line(line, 1)).toString()); } @Test public void removeLastEndingCRLFWithWSLong() { String line = "Test " + CRLF + "Test2 " + CRLF; assertEquals("Test " + CRLF + "Test2 ", BatchParserCommon.removeEndingCRLF(new Line(line, 1)).toString()); } private List<Line> toLineList(final String... messageRaw) { final List<Line> lineList = new ArrayList<Line>(); int counter = 1; for (final String currentLine : messageRaw) { lineList.add(new Line(currentLine, counter++)); } return lineList; } private void invalidBoundary(final String contentType, final BatchDeserializerException.MessageKeys messageKey) { try { BatchParserCommon.getBoundary(contentType, 0); Assert.fail("Expected exception not thrown."); } catch (final BatchDeserializerException e) { Assert.assertEquals(messageKey, e.getMessageKey()); } } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package tech.hobbs.hlfdocmgmntsystem.model.student; import java.io.Serializable; import java.util.Date; import java.util.List; import javax.persistence.Basic; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.Id; import javax.persistence.ManyToMany; import javax.persistence.NamedQueries; import javax.persistence.NamedQuery; import javax.persistence.OneToMany; import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.validation.constraints.NotNull; import javax.validation.constraints.Size; import org.springframework.context.annotation.Profile; import tech.hobbs.hlfdocmgmntsystem.model.activity.ActivityImage; import tech.hobbs.hlfdocmgmntsystem.model.activity.ActivityVideo; import tech.hobbs.hlfdocmgmntsystem.model.documents.ActivityProposal; import tech.hobbs.hlfdocmgmntsystem.model.documents.ActivityReport; import tech.hobbs.hlfdocmgmntsystem.model.documents.ProgressReport; import tech.hobbs.hlfdocmgmntsystem.model.documents.Quotation; import tech.hobbs.hlfdocmgmntsystem.model.security.User; import tech.hobbs.hlfdocmgmntsystem.model.security.UserProfile; /** * * @author Wilson Chiviti */ @Entity @Table(name = "students") @NamedQueries({ @NamedQuery(name = "Student.findAll", query = "SELECT s FROM Student s") , @NamedQuery(name = "Student.findById", query = "SELECT s FROM Student s WHERE s.id = :id") , @NamedQuery(name = "Student.findByFileno", query = "SELECT s FROM Student s WHERE s.fileno = :fileno") , @NamedQuery(name = "Student.findByName", query = "SELECT s FROM Student s WHERE s.name = :name") , @NamedQuery(name = "Student.findBySurname", query = "SELECT s FROM Student s WHERE s.surname = :surname") , @NamedQuery(name = "Student.findByDob", query = "SELECT s FROM Student s WHERE s.dob = :dob") , @NamedQuery(name = "Student.findByCellNumber", query = "SELECT s FROM Student s WHERE s.cellNumber = :cellNumber") , @NamedQuery(name = "Student.findByEmailAddress", query = "SELECT s FROM Student s WHERE s.emailAddress = :emailAddress") , @NamedQuery(name = "Student.findByAge", query = "SELECT s FROM Student s WHERE s.age = :age")}) public class Student implements Serializable { private static final long serialVersionUID = 1L; @Basic(optional = false) @NotNull @Column(name = "id") private int id; @Id @Basic(optional = false) @NotNull @Size(min = 1, max = 50) @Column(name = "fileno") private String fileno; @Basic(optional = false) @NotNull @Size(min = 1, max = 300) @Column(name = "name") private String name; @Basic(optional = false) @NotNull @Size(min = 1, max = 300) @Column(name = "surname") private String surname; @Basic(optional = false) @NotNull @Column(name = "dob") @Temporal(TemporalType.DATE) private Date dob; @Size(max = 50) @Column(name = "cell_number") private String cellNumber; @Size(max = 100) @Column(name = "email_address") private String emailAddress; @Basic(optional = false) @NotNull @Column(name = "age") private int age; @ManyToMany(mappedBy = "studentList", fetch = FetchType.LAZY) private List<UserProfile> profileList; @OneToMany(cascade = CascadeType.ALL, mappedBy = "fileno", fetch = FetchType.LAZY) private List<ActivityImage> activityImageList; @OneToMany(cascade = CascadeType.ALL, mappedBy = "fileno", fetch = FetchType.LAZY) private List<ProgressReport> progressReportList; @OneToMany(cascade = CascadeType.ALL, mappedBy = "fileno", fetch = FetchType.LAZY) private List<TertiaryStudent> tertiaryStudentList; @OneToMany(cascade = CascadeType.ALL, mappedBy = "fileo", fetch = FetchType.LAZY) private List<Quotation> quotationList; @OneToMany(cascade = CascadeType.ALL, mappedBy = "fileno", fetch = FetchType.LAZY) private List<ActivityVideo> activityVideoList; @OneToMany(cascade = CascadeType.ALL, mappedBy = "fileno", fetch = FetchType.LAZY) private List<ActivityProposal> activityProposalList; @OneToMany(cascade = CascadeType.ALL, mappedBy = "fileno", fetch = FetchType.LAZY) private List<StudentResult> studentResultList; @OneToMany(cascade = CascadeType.ALL, mappedBy = "fileno", fetch = FetchType.LAZY) private List<User> userList; @OneToMany(cascade = CascadeType.ALL, mappedBy = "fileno", fetch = FetchType.LAZY) private List<ActivityReport> activityReportList; public Student() { } public Student(String fileno) { this.fileno = fileno; } public Student(String fileno, int id, String name, String surname, Date dob, int age) { this.fileno = fileno; this.id = id; this.name = name; this.surname = surname; this.dob = dob; this.age = age; } public int getId() { return id; } public void setId(int id) { this.id = id; } public String getFileno() { return fileno; } public void setFileno(String fileno) { this.fileno = fileno; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getSurname() { return surname; } public void setSurname(String surname) { this.surname = surname; } public Date getDob() { return dob; } public void setDob(Date dob) { this.dob = dob; } public String getCellNumber() { return cellNumber; } public void setCellNumber(String cellNumber) { this.cellNumber = cellNumber; } public String getEmailAddress() { return emailAddress; } public void setEmailAddress(String emailAddress) { this.emailAddress = emailAddress; } public int getAge() { return age; } public void setAge(int age) { this.age = age; } public List<UserProfile> getProfileList() { return profileList; } public void setProfileList(List<UserProfile> profileList) { this.profileList = profileList; } public List<ActivityImage> getActivityImageList() { return activityImageList; } public void setActivityImageList(List<ActivityImage> activityImageList) { this.activityImageList = activityImageList; } public List<ProgressReport> getProgressReportList() { return progressReportList; } public void setProgressReportList(List<ProgressReport> progressReportList) { this.progressReportList = progressReportList; } public List<TertiaryStudent> getTertiaryStudentList() { return tertiaryStudentList; } public void setTertiaryStudentList(List<TertiaryStudent> tertiaryStudentList) { this.tertiaryStudentList = tertiaryStudentList; } public List<Quotation> getQuotationList() { return quotationList; } public void setQuotationList(List<Quotation> quotationList) { this.quotationList = quotationList; } public List<ActivityVideo> getActivityVideoList() { return activityVideoList; } public void setActivityVideoList(List<ActivityVideo> activityVideoList) { this.activityVideoList = activityVideoList; } public List<ActivityProposal> getActivityProposalList() { return activityProposalList; } public void setActivityProposalList(List<ActivityProposal> activityProposalList) { this.activityProposalList = activityProposalList; } public List<StudentResult> getStudentResultList() { return studentResultList; } public void setStudentResultList(List<StudentResult> studentResultList) { this.studentResultList = studentResultList; } public List<User> getUserList() { return userList; } public void setUserList(List<User> userList) { this.userList = userList; } public List<ActivityReport> getActivityReportList() { return activityReportList; } public void setActivityReportList(List<ActivityReport> activityReportList) { this.activityReportList = activityReportList; } @Override public int hashCode() { int hash = 0; hash += (fileno != null ? fileno.hashCode() : 0); return hash; } @Override public boolean equals(Object object) { // TODO: Warning - this method won't work in the case the id fields are not set if (!(object instanceof Student)) { return false; } Student other = (Student) object; if ((this.fileno == null && other.fileno != null) || (this.fileno != null && !this.fileno.equals(other.fileno))) { return false; } return true; } @Override public String toString() { return "tech.hobbs.hlfdocmgmntsystem.model.Student[ fileno=" + fileno + " ]"; } }
/* * Copyright 2014-2016 Samsung Research America, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.samsung.sjs.constraintsolver; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Consumer; import org.mozilla.javascript.ast.AstNode; import org.mozilla.javascript.ast.NewExpression; import org.mozilla.javascript.ast.NumberLiteral; import org.mozilla.javascript.ast.StringLiteral; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.ibm.wala.util.CancelException; import com.ibm.wala.util.collections.HashMapFactory; import com.ibm.wala.util.collections.HashSetFactory; import com.ibm.wala.util.collections.Pair; import com.samsung.sjs.BasicSourceLocation; import com.samsung.sjs.SourceLocation; import com.samsung.sjs.constraintgenerator.ConstraintFactory; import com.samsung.sjs.constraintgenerator.ConstraintGenUtil; import com.samsung.sjs.constraintgenerator.ConstraintGenerator; import com.samsung.sjs.typeconstraints.ArrayLiteralTerm; import com.samsung.sjs.typeconstraints.ExpressionTerm; import com.samsung.sjs.typeconstraints.FunctionCallTerm; import com.samsung.sjs.typeconstraints.FunctionParamTerm; import com.samsung.sjs.typeconstraints.FunctionReturnTerm; import com.samsung.sjs.typeconstraints.IConstraint; import com.samsung.sjs.typeconstraints.ITypeConstraint; import com.samsung.sjs.typeconstraints.ITypeTerm; import com.samsung.sjs.typeconstraints.IndexedTerm; import com.samsung.sjs.typeconstraints.InheritPropsConstraint; import com.samsung.sjs.typeconstraints.MROMRWConstraint; import com.samsung.sjs.typeconstraints.MapLiteralTerm; import com.samsung.sjs.typeconstraints.MethodReceiverTerm; import com.samsung.sjs.typeconstraints.ObjectLiteralTerm; import com.samsung.sjs.typeconstraints.PropertyAccessTerm; import com.samsung.sjs.typeconstraints.ProtoConstraint; import com.samsung.sjs.typeconstraints.ProtoParentTerm; import com.samsung.sjs.typeconstraints.ProtoTerm; import com.samsung.sjs.typeconstraints.SubTypeConstraint; import com.samsung.sjs.typeconstraints.TypeConstantTerm; import com.samsung.sjs.typeconstraints.TypeEqualityConstraint; import com.samsung.sjs.typeconstraints.TypeVariableTerm; import com.samsung.sjs.typeconstraints.UpperBoundConstraint; import com.samsung.sjs.types.AnyType; import com.samsung.sjs.types.ArrayType; import com.samsung.sjs.types.CodeType; import com.samsung.sjs.types.ConstructorType; import com.samsung.sjs.types.FunctionType; import com.samsung.sjs.types.IntegerType; import com.samsung.sjs.types.IntersectionType; import com.samsung.sjs.types.MapType; import com.samsung.sjs.types.ObjectType; import com.samsung.sjs.types.Property; import com.samsung.sjs.types.Type; import com.samsung.sjs.types.TypeVar; import com.samsung.sjs.types.UnattachedMethodType; import com.samsung.sjs.types.UnknownIndexableType; /** * Directional solver for type constraints, based on WALA's constraint solving engine. * * Created by schandra on 3/11/15. */ public class DirectionalConstraintSolver { private final Set<ITypeConstraint> constraints; private final Set<MROMRWConstraint> mroMRWConstraints = HashSetFactory.make(); private final TypeConstraintFixedPointSolver fixedpointSolver; private final ConstraintFactory factory; private final Map<IConstraint,Set<Integer>> sourceMapping; @SuppressWarnings("unused") private final Map<ITypeTerm,Set<Integer>> termMapping; private Cause currentCause; public final Map<ITypeConstraint, Cause> causesByConstraint = new LinkedHashMap<>(); private static Logger logger = LoggerFactory.getLogger(DirectionalConstraintSolver.class); public DirectionalConstraintSolver(Set<ITypeConstraint> constraints, ConstraintFactory factory, ConstraintGenerator generator) { this.constraints = constraints; this.factory = factory; this.sourceMapping = HashMapFactory.make(generator.getSourceMapping()); this.termMapping = HashMapFactory.make(generator.getTermMapping()); this.fixedpointSolver = new TypeConstraintFixedPointSolver(constraints, causesByConstraint, mroMRWConstraints, factory, sourceMapping); } public TypeAssignment solve() { augmentConstraints(); try { return fixedpointSolver.solve(); } catch (CancelException e) { throw new RuntimeException(e); } } /** * Augment the initial constraints. This should move into the previous passes * */ public void augmentConstraints() { Set<ITypeConstraint> moreConstraints = new LinkedHashSet<ITypeConstraint>(); Consumer<IConstraint> constraintAdder = (IConstraint c) -> { logger.debug("adding {}", c); if (c instanceof ITypeConstraint) { moreConstraints.add((ITypeConstraint) c); causesByConstraint.put((ITypeConstraint) c, currentCause); } else { mroMRWConstraints.add((MROMRWConstraint) c); } }; // terms for which we have already added the appropriate index type constraints Set<ITypeTerm> constrainedIndexTerms = HashSetFactory.make(); // terms for which we have already added the appropriate prop term constraints Map<PropertyAccessTerm,TypeVariableTerm> propTerm2TypeVar = HashMapFactory.make(); // terms for which we have already added the appropriate function type constraints // we need a term *and* an arity, due to intersection types Set<Pair<ITypeTerm,Integer>> constrainedFunctionTerms = HashSetFactory.make(); Set<ProtoTerm> handledProtoTerms = HashSetFactory.make(); Set<ProtoParentTerm> handledProtoParentTerms = HashSetFactory.make(); logger.debug("Adding constraints ..."); // HACK do ProtoTerms first. We do this since we do not want to automatically // generate a prototype variable for all constructors, since some come from // the library and are not prototypable // TODO clean this up for (ITypeConstraint constraint: constraints) { currentCause = Cause.src(constraint); if (constraint.getLeft() instanceof ProtoTerm) { handleProtoTerm((ProtoTerm)constraint.getLeft(), constraintAdder, handledProtoTerms); } if (constraint.getRight() instanceof ProtoTerm) { handleProtoTerm((ProtoTerm)constraint.getRight(), constraintAdder, handledProtoTerms); } } for (ITypeConstraint constraint : constraints) { currentCause = Cause.src(constraint); logger.debug("Checking constraint {}", constraint); if (constraint.getLeft() instanceof ProtoParentTerm) { handleProtoParentTerm((ProtoParentTerm)constraint.getLeft(), constraintAdder, handledProtoParentTerms); } if (constraint.getRight() instanceof ProtoParentTerm) { handleProtoParentTerm((ProtoParentTerm)constraint.getRight(), constraintAdder, handledProtoParentTerms); } if (constraint.getLeft() instanceof IndexedTerm || constraint.getRight() instanceof IndexedTerm) { handleIndexedTerm(constraintAdder, constrainedIndexTerms, constrainedFunctionTerms, constraint); } if (constraint.getLeft() instanceof MapLiteralTerm) { handleMapLiteralTerm(constraintAdder, constraint); } if (constraint.getLeft() instanceof FunctionParamTerm || constraint.getRight() instanceof FunctionParamTerm) { handleFunctionParamTerm(constraintAdder, constrainedFunctionTerms, constraint); } if (constraint.getLeft() instanceof FunctionReturnTerm || constraint.getRight() instanceof FunctionReturnTerm) { handleFunctionReturnTerm(constraintAdder, constrainedFunctionTerms, constraint); } if (constraint.getRight() instanceof MethodReceiverTerm) { assert constraint instanceof TypeEqualityConstraint; handleReceiverTerm((MethodReceiverTerm) constraint.getRight(), constraintAdder); } if (constraint.getLeft().getType() instanceof IntersectionType) { handleIntersectionType((IntersectionType)constraint.getLeft().getType(), constraintAdder); } if (constraint.getRight().getType() instanceof IntersectionType) { handleIntersectionType((IntersectionType)constraint.getRight().getType(), constraintAdder); } if (constraint instanceof SubTypeConstraint) { handleSubtypeConstraint(constraintAdder, constraint, propTerm2TypeVar); } if (constraint instanceof TypeEqualityConstraint && ConstraintGenUtil.isNullUndefinedLitOrVoidOp(constraint.getLeft())) { // this is a type equality, but it was generated for assigning null/undefined // into some location. for the purposes of augmenting constraints, treat // it as a subtype constraint handleSubtypeConstraint(constraintAdder, constraint, propTerm2TypeVar); } } for (ITypeConstraint constraint: constraints) { currentCause = Cause.src(constraint); if (constraint instanceof TypeEqualityConstraint) { logger.debug("Re-checking constraint {}", constraint); handleTypeEqualityConstraint(constraintAdder, constraint, propTerm2TypeVar); } } constraints.addAll(moreConstraints); } private void handleReceiverTerm(MethodReceiverTerm receiverTerm, Consumer<IConstraint> constraintAdder) { UnattachedMethodType methodType = (UnattachedMethodType) receiverTerm.getFunctionTerm().getType(); TypeVar receiverTypeVar = makeFreshTypeVar(); methodType.setReceiverType(receiverTypeVar); constraintAdder.accept(new TypeEqualityConstraint(receiverTerm, factory.getTermForType(receiverTypeVar))); } private void handleSubtypeConstraint( Consumer<IConstraint> constraintAdder, ITypeConstraint constraint, Map<PropertyAccessTerm,TypeVariableTerm> propTerm2TypeVar) { /* handle e <= prop(e', foo) * we need to introduce a var x for the type of object's foo field, and * then x = prop(e', foo) */ // we have some serious code duplication now, but // these cases will diverge. // TODO extract the common code SourceLocation loc = getSourceLoc(sourceMapping.get(constraint)); if (constraint.getLeft() instanceof PropertyAccessTerm) { PropertyAccessTerm propTerm = (PropertyAccessTerm) constraint.getLeft(); if (!propTerm2TypeVar.containsKey(propTerm)) { ITypeTerm baseTerm = propTerm.getBase(); TypeVar tyVar = makeFreshTypeVar(); TypeVariableTerm tvTerm = factory.findOrCreateTypeVariableTerm(tyVar); assert !(baseTerm instanceof ObjectLiteralTerm); // create a fresh object type as the upper bound // here, since the property is being read (since it is on the LHS // of the subtype constraint), make the property read-only ObjectType ot = new ObjectType(null, Collections.emptyList(), Collections.singletonList(new Property(propTerm .getPropertyName(), tyVar, true, loc))); ITypeConstraint c = new SubTypeConstraint(baseTerm, factory.getTermForType(ot)); constraintAdder.accept(c); ITypeConstraint c1 = new SubTypeConstraint(tvTerm, propTerm); propTerm2TypeVar.put(propTerm, tvTerm); constraintAdder.accept(c1); } } if (constraint.getRight() instanceof PropertyAccessTerm) { PropertyAccessTerm propTerm = (PropertyAccessTerm) constraint.getRight(); ITypeTerm baseTerm = propTerm.getBase(); if (!propTerm2TypeVar.containsKey(propTerm)) { TypeVar tyVar = makeFreshTypeVar(); TypeVariableTerm tvTerm = factory.findOrCreateTypeVariableTerm(tyVar); if (baseTerm instanceof ObjectLiteralTerm) { ObjectLiteralTerm olTerm = (ObjectLiteralTerm) propTerm.getBase(); ObjectType ot = (ObjectType) olTerm.getType(); ot.setProperty(propTerm.getPropertyName(), tyVar); } else { // create a fresh object type as the upper bound // since this is a write, add the property as RW ObjectType ot = new ObjectType(null, Collections.singletonList(new Property(propTerm .getPropertyName(), tyVar, false, loc)), Collections.emptyList()); ITypeConstraint c = new SubTypeConstraint(baseTerm, factory.getTermForType(ot)); constraintAdder.accept(c); } ITypeConstraint c1 = new TypeEqualityConstraint(tvTerm, propTerm); propTerm2TypeVar.put(propTerm, tvTerm); constraintAdder.accept(c1); } // generate MRO-MRW upper bound constraint ITypeTerm leftTerm = constraint.getLeft(); if (possiblyAMethodTerm(leftTerm)) { constraintAdder.accept(new UpperBoundConstraint(leftTerm, baseTerm, Cause.src(constraint))); } } } /** * conservative check that returns false only for terms that obviously do not represent methods * * TODO move this code inside ITypeTerm?? * @param t * @return */ private boolean possiblyAMethodTerm(ITypeTerm t) { if (ConstraintGenUtil.isNullUndefinedLitOrVoidOp(t)) { return false; } if (t instanceof ExpressionTerm) { ExpressionTerm et = (ExpressionTerm) t; AstNode node = et.getNode(); if (node != null) { return !(node instanceof NumberLiteral || node instanceof StringLiteral); } } return !(t instanceof ArrayLiteralTerm || t instanceof MapLiteralTerm || t instanceof ObjectLiteralTerm || t instanceof TypeConstantTerm); } private SourceLocation getSourceLoc(Set<Integer> sourceLines) { int lineNum = -1; if (sourceLines != null) { assert sourceLines.size() == 1; lineNum = sourceLines.iterator().next(); } SourceLocation loc = new BasicSourceLocation(lineNum); return loc; } private void handleTypeEqualityConstraint( Consumer<IConstraint> constraintAdder, ITypeConstraint constraint, Map<PropertyAccessTerm, TypeVariableTerm> propTerm2TypeVar) { /* * We only deal with |a.foo| = prop( |a|, foo) here * * We add * |a| <: ObjTy( { foo: X0 } ) * |a.foo| = X0 * where X0 is new */ ITypeTerm dotTerm = null; PropertyAccessTerm propTerm = null; if (constraint.getLeft() instanceof PropertyAccessTerm) { dotTerm = constraint.getRight(); propTerm = (PropertyAccessTerm) constraint.getLeft(); } else if (constraint.getRight() instanceof PropertyAccessTerm) { dotTerm = constraint.getLeft(); propTerm = (PropertyAccessTerm) constraint.getRight(); } if (propTerm != null) { ITypeTerm baseTerm = propTerm.getBase(); TypeVariableTerm tvTerm = propTerm2TypeVar.get(propTerm); if (tvTerm == null) { // this occurs when the prop term never appears in a subtype constraint, // e.g., for reads of length from arrays. Since it must be a read, upper // bound has a read-only property SourceLocation loc = getSourceLoc(sourceMapping.get(constraint)); TypeVar tyVar = makeFreshTypeVar(); ObjectType ot = new ObjectType(null, Collections.emptyList(), Collections.singletonList(new Property(propTerm .getPropertyName(), tyVar, true, loc))); ITypeConstraint c1 = new SubTypeConstraint(baseTerm, factory.getTermForType(ot)); constraintAdder.accept(c1); ITypeConstraint c2 = new TypeEqualityConstraint(dotTerm, factory.findOrCreateTypeVariableTerm(tyVar)); constraintAdder.accept(c2); if (baseTerm instanceof ObjectLiteralTerm) { // get rid of lingering any types ObjectLiteralTerm baseOLT = (ObjectLiteralTerm) baseTerm; ObjectType baseOLTType = (ObjectType) baseOLT.getType(); for (Property p: baseOLTType.properties()) { if (p.getType() instanceof AnyType) { baseOLTType.setProperty(p.getName(), makeFreshTypeVar(), p.isRO()); } } } } } } private void handleFunctionReturnTerm( Consumer<IConstraint> constraintAdder, Set<Pair<ITypeTerm, Integer>> constrainedFunctionTerms, ITypeConstraint constraint) { FunctionReturnTerm returnTerm = (FunctionReturnTerm)(constraint.getLeft() instanceof FunctionReturnTerm ? constraint.getLeft() : constraint.getRight()); ITypeTerm otherTerm = constraint.getLeft() instanceof FunctionReturnTerm ? constraint.getRight() : constraint.getLeft(); ITypeTerm functionTerm = returnTerm.getFunctionTerm(); int nrParams = returnTerm.getNrParams(); // TODO make sure we handle constructors with parameters boolean isConstructorCall = otherTerm instanceof FunctionCallTerm && ((FunctionCallTerm)otherTerm).getFunctionCall() instanceof NewExpression; doConstraintsForFunctionTerm(constraintAdder, constrainedFunctionTerms, functionTerm, nrParams, isConstructorCall); } private void handleFunctionParamTerm( Consumer<IConstraint> constraintAdder, Set<Pair<ITypeTerm, Integer>> constrainedFunctionTerms, ITypeConstraint constraint) { FunctionParamTerm paramTerm = (FunctionParamTerm)(constraint.getLeft() instanceof FunctionParamTerm ? constraint.getLeft() : constraint.getRight()); // create type variables for return type and parameters ITypeTerm functionTerm = paramTerm.getFunctionTerm(); int nrParams = paramTerm.getNrParams(); doConstraintsForFunctionTerm(constraintAdder, constrainedFunctionTerms, functionTerm, nrParams, false); } private void handleMapLiteralTerm( Consumer<IConstraint> constraintAdder, ITypeConstraint constraint) { MapLiteralTerm mlt = (MapLiteralTerm) constraint.getLeft(); MapType mapType = (MapType) mlt.getType(); if (mapType.elemType() instanceof AnyType) { // introduce a fresh type variable TypeVar elemTypeVar = makeFreshTypeVar(); TypeVariableTerm elemTypeVarTerm = factory.findOrCreateTypeVariableTerm(elemTypeVar); mapType.setElemType(elemTypeVar); constraintAdder.accept(new TypeEqualityConstraint(elemTypeVarTerm, factory.findOrCreateIndexedTerm(mlt))); } } private void handleIndexedTerm(Consumer<IConstraint> constraintAdder, Set<ITypeTerm> constrainedIndexTerms, Set<Pair<ITypeTerm, Integer>> constrainedFunctionTerms, ITypeConstraint constraint) { IndexedTerm indexedTerm = (IndexedTerm)(constraint.getLeft() instanceof IndexedTerm ? constraint.getLeft() : constraint.getRight()); ITypeTerm base = indexedTerm.getBase(); // TODO temporary hack until we handle function types; remove this condition! // if (base instanceof FunctionReturnTerm) continue; if (!constrainedIndexTerms.contains(base)) { ITypeTerm other = constraint.getLeft() instanceof IndexedTerm ? constraint.getRight() : constraint.getLeft(); TypeVar elemTypeVar = makeFreshTypeVar(); TypeVariableTerm elemTypeVarTerm = factory.findOrCreateTypeVariableTerm(elemTypeVar); if (base instanceof ArrayLiteralTerm) { // mutate the array type to use our elements type variable ArrayType arrType = (ArrayType)((ArrayLiteralTerm)base).getType(); arrType.setElemType(elemTypeVar); } else if (base instanceof MapLiteralTerm) { // mutate to use our elements type variable MapType mapType = (MapType)((MapLiteralTerm)base).getType(); mapType.setElemType(elemTypeVar); } else if (other.toString().startsWith("TP(")) { // TODO HACK! // we know we're talking about an array in this case, so // don't introduce a type variable for the key ArrayType arrayType = new ArrayType(elemTypeVar); constraintAdder.accept(new SubTypeConstraint(base, factory.getTermForType(arrayType))); // hack! make sure return type of "push" is an int PropertyAccessTerm pushTerm = factory.findOrCreatePropertyAccessTerm(base, "push", null); FunctionReturnTerm pushRetTerm = factory.findOrCreateFunctionReturnTerm(pushTerm, 1); constraintAdder.accept(new TypeEqualityConstraint( pushRetTerm, factory.getTermForType(IntegerType .make()))); // another hack! need this to handle Array function if (base instanceof FunctionReturnTerm && base.toString().equals("ret(|Array|)")) { doConstraintsForFunctionTerm(constraintAdder, constrainedFunctionTerms, ((FunctionReturnTerm) base) .getFunctionTerm(), ((FunctionReturnTerm) base).getNrParams(), false); } } else { TypeVar keyTypeVar = makeFreshTypeVar(); TypeVariableTerm keyTypeVarTerm = factory.findOrCreateTypeVariableTerm(keyTypeVar); UnknownIndexableType mapOrArrayType = new UnknownIndexableType(keyTypeVar, elemTypeVar); constraintAdder.accept(new SubTypeConstraint(base, factory.getTermForType(mapOrArrayType))); constraintAdder.accept(new TypeEqualityConstraint(keyTypeVarTerm, factory.findOrCreateKeyTerm(base))); } // always constrain the element type constraintAdder.accept(new TypeEqualityConstraint(elemTypeVarTerm, indexedTerm)); constrainedIndexTerms.add(base); } } private void handleProtoParentTerm(ProtoParentTerm protoParentTerm, Consumer<IConstraint> constraintAdder, Set<ProtoParentTerm> handled) { if (handled.add(protoParentTerm)) { constraintAdder.accept(new InheritPropsConstraint(protoParentTerm)); } } private void handleProtoTerm(ProtoTerm protoTerm, Consumer<IConstraint> constraintAdder, Set<ProtoTerm> handled) { if (handled.add(protoTerm)) { logger.debug("handling proto term {}", protoTerm); ITypeTerm baseTerm = protoTerm.getTerm(); if (baseTerm.getType() instanceof ConstructorType) { ConstructorType cType = (ConstructorType) baseTerm.getType(); TypeVar prototypeVar = makeFreshTypeVar(); ITypeTerm protoVarTerm = factory.getTermForType(prototypeVar); if (cType.getPrototype() != null) { // this must be due to initialization of individual prototype properties ObjectType protoType = (ObjectType) cType.getPrototype(); for (Property p : protoType.properties()) { if (p.getType() instanceof AnyType) { protoType.setProperty(p.getName(), makeFreshTypeVar()); } } constraintAdder.accept(new TypeEqualityConstraint( protoVarTerm, factory.getTermForType(protoType))); } cType.setPrototype(prototypeVar); ProtoTerm consProto = factory.findOrCreateProtoTerm(baseTerm); constraintAdder.accept(new TypeEqualityConstraint(consProto, protoVarTerm)); // also put it on the return type Type returnType = typeVarsForReturnType(constraintAdder, cType, prototypeVar); FunctionReturnTerm returnTerm = factory .findOrCreateFunctionReturnTerm(baseTerm, cType.nrParams()); constraintAdder.accept(new TypeEqualityConstraint(returnTerm, factory.getTermForType(returnType))); } else { // the upper bound of the base term should be a constructor // type, but we don't know its arity! so we can't // immediately generate an upper bound. instead, handle this // case during solving with a ProtoConstraint constraintAdder.accept(new ProtoConstraint(protoTerm)); } } } /** * special-case hack to get rid of AnyType inside the type of Array. * TODO don't generate AnyType any more, and use type variables instead */ private void handleIntersectionType(IntersectionType isectType, Consumer<IConstraint> constraintAdder) { for (Type t: isectType.getTypes()) { if (t instanceof FunctionType) { FunctionType ft = (FunctionType) t; // check for Array<any> in return type, and for any in parameter types List<Type> paramTypes = ft.paramTypes(); for (int i = 0; i < paramTypes.size(); i++) { if (paramTypes.get(i) instanceof AnyType) { // replace with fresh type variable ft.setParamType(factory.freshTypeVar(), i); } } Type returnType = ft.returnType(); if (returnType instanceof ArrayType) { ArrayType arrType = (ArrayType) returnType; if (arrType.elemType() instanceof AnyType) { arrType.setElemType(factory.freshTypeVar()); // TODO constrain indexed term? } } } } } private void doConstraintsForFunctionTerm( Consumer<IConstraint> constraintAdder, Set<Pair<ITypeTerm, Integer>> constrainedFunctionTerms, ITypeTerm functionTerm, int nrParams, boolean isConstructor) { Pair<ITypeTerm, Integer> key = Pair.make(functionTerm, nrParams); if (!constrainedFunctionTerms.contains(key)) { constrainedFunctionTerms.add(key); // this call ensures that we have an entry for the function // the final type mapping. see endtoend test iife.js fixedpointSolver.initBounds(functionTerm, currentCause); List<Type> paramTypes = null; Type returnType = null; Type type = functionTerm.getType(); if (type instanceof CodeType) { // update fnType with new type variables as needed CodeType fnType = (CodeType) type; paramTypes = fnType.paramTypes(); for (int i = 0; i < nrParams; i++) { Type curParamType = paramTypes.get(i); if (curParamType instanceof AnyType) { fnType.setParamType(makeFreshTypeVar(), i); } } returnType = typeVarsForReturnType(constraintAdder, fnType, null); logger.debug("updated type of {} to {}", functionTerm, fnType); } else { paramTypes = new ArrayList<>(nrParams); for (int i = 0; i < nrParams; i++) { paramTypes.add(makeFreshTypeVar()); } returnType = makeFreshTypeVar(); if (!isConstructor) { // create a fresh function type, and make type a subtype // TODO passing bogus names in here; hopefully they won't be // used. clean this up FunctionType fnType = new FunctionType(paramTypes, Collections.<String> emptyList(), returnType); constraintAdder.accept(new SubTypeConstraint(functionTerm, factory.getTermForType(fnType))); } else { TypeVar protoType = makeFreshTypeVar(); ConstructorType cType = new ConstructorType(paramTypes, Collections.<String>emptyList(), returnType, protoType); constraintAdder.accept(new SubTypeConstraint(functionTerm, factory.getTermForType(cType))); // also equate protoType appropriately to a term ProtoTerm protoTerm = factory.findOrCreateProtoTerm(functionTerm); constraintAdder.accept(new TypeEqualityConstraint(protoTerm, factory.getTermForType(protoType))); } } // add equality constraints for FunctionParamTerms and // FunctionReturnTerm for (int i = 0; i < nrParams; i++) { FunctionParamTerm curParamTerm = factory .findOrCreateFunctionParamTerm(functionTerm, i, nrParams); constraintAdder .accept(new TypeEqualityConstraint( curParamTerm, factory.getTermForType(paramTypes.get(i)))); } FunctionReturnTerm returnTerm = factory .findOrCreateFunctionReturnTerm(functionTerm, nrParams); constraintAdder.accept(new TypeEqualityConstraint(returnTerm, factory.getTermForType(returnType))); } } /** * Introduce type variables for the return type of fnType as needed * @param constraintAdder * @param fnType * @param prototypeVar * @return */ private Type typeVarsForReturnType( Consumer<IConstraint> constraintAdder, CodeType fnType, TypeVar prototypeVar) { Type returnType = fnType.returnType(); if (!((returnType instanceof AnyType) || (returnType instanceof ObjectType))) { return returnType; } TypeVar returnTypeVar = makeFreshTypeVar(); if (returnType instanceof ObjectType) { ObjectType retObj = (ObjectType) returnType; for (Property p : retObj.properties()) { if (p.getType() instanceof AnyType) { retObj.setProperty(p.getName(), makeFreshTypeVar()); } } if (prototypeVar != null) { retObj = new ObjectType(prototypeVar, retObj.ownProperties(), Collections.emptyList()); } constraintAdder.accept(new TypeEqualityConstraint(factory .getTermForType(returnTypeVar), factory .getTermForType(retObj))); } fnType.setReturnType(returnTypeVar); return returnTypeVar; } private TypeVar makeFreshTypeVar() { TypeVar freshVar = factory.freshTypeVar(); return freshVar; } }
/* * Copyright (C) 2013 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.support.v7.internal.widget; import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.content.res.Resources; import android.content.res.TypedArray; import android.database.DataSetObserver; import android.graphics.drawable.Drawable; import android.support.v4.view.ActionProvider; import android.support.v7.appcompat.R; import android.util.AttributeSet; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.ViewTreeObserver; import android.view.ViewTreeObserver.OnGlobalLayoutListener; import android.widget.AdapterView; import android.widget.BaseAdapter; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.PopupWindow; import android.widget.TextView; /** * This class is a view for choosing an activity for handling a given {@link android.content.Intent}. * <p> * The view is composed of two adjacent buttons: * <ul> * <li> * The left button is an immediate action and allows one click activity choosing. * Tapping this button immediately executes the intent without requiring any further * user input. Long press on this button shows a popup for changing the default * activity. * </li> * <li> * The right button is an overflow action and provides an optimized menu * of additional activities. Tapping this button shows a popup anchored to this * view, listing the most frequently used activities. This list is initially * limited to a small number of items in frequency used order. The last item, * "Show all..." serves as an affordance to display all available activities. * </li> * </ul> * </p> * * @hide */ public class ActivityChooserView extends ViewGroup implements ActivityChooserModel.ActivityChooserModelClient { /** * An adapter for displaying the activities in an {@link android.widget.AdapterView}. */ private final ActivityChooserViewAdapter mAdapter; /** * Implementation of various interfaces to avoid publishing them in the APIs. */ private final Callbacks mCallbacks; /** * The content of this view. */ private final LinearLayout mActivityChooserContent; /** * Stores the background drawable to allow hiding and latter showing. */ private final Drawable mActivityChooserContentBackground; /** * The expand activities action button; */ private final FrameLayout mExpandActivityOverflowButton; /** * The image for the expand activities action button; */ private final ImageView mExpandActivityOverflowButtonImage; /** * The default activities action button; */ private final FrameLayout mDefaultActivityButton; /** * The image for the default activities action button; */ private final ImageView mDefaultActivityButtonImage; /** * The maximal width of the list popup. */ private final int mListPopupMaxWidth; /** * The ActionProvider hosting this view, if applicable. */ ActionProvider mProvider; /** * Observer for the model data. */ private final DataSetObserver mModelDataSetOberver = new DataSetObserver() { @Override public void onChanged() { super.onChanged(); mAdapter.notifyDataSetChanged(); } @Override public void onInvalidated() { super.onInvalidated(); mAdapter.notifyDataSetInvalidated(); } }; private final OnGlobalLayoutListener mOnGlobalLayoutListener = new OnGlobalLayoutListener() { @Override public void onGlobalLayout() { if (isShowingPopup()) { if (!isShown()) { getListPopupWindow().dismiss(); } else { getListPopupWindow().show(); if (mProvider != null) { mProvider.subUiVisibilityChanged(true); } } } } }; /** * Popup window for showing the activity overflow list. */ private ListPopupWindow mListPopupWindow; /** * Listener for the dismissal of the popup/alert. */ private PopupWindow.OnDismissListener mOnDismissListener; /** * Flag whether a default activity currently being selected. */ private boolean mIsSelectingDefaultActivity; /** * The count of activities in the popup. */ private int mInitialActivityCount = ActivityChooserViewAdapter.MAX_ACTIVITY_COUNT_DEFAULT; /** * Flag whether this view is attached to a window. */ private boolean mIsAttachedToWindow; /** * String resource for formatting content description of the default target. */ private int mDefaultActionButtonContentDescription; /** * Create a new instance. * * @param context The application environment. */ public ActivityChooserView(Context context) { this(context, null); } /** * Create a new instance. * * @param context The application environment. * @param attrs A collection of attributes. */ public ActivityChooserView(Context context, AttributeSet attrs) { this(context, attrs, 0); } /** * Create a new instance. * * @param context The application environment. * @param attrs A collection of attributes. * @param defStyle The default style to apply to this view. */ public ActivityChooserView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); TypedArray attributesArray = context.obtainStyledAttributes(attrs, R.styleable.ActivityChooserView, defStyle, 0); mInitialActivityCount = attributesArray.getInt( R.styleable.ActivityChooserView_initialActivityCount, ActivityChooserViewAdapter.MAX_ACTIVITY_COUNT_DEFAULT); Drawable expandActivityOverflowButtonDrawable = attributesArray.getDrawable( R.styleable.ActivityChooserView_expandActivityOverflowButtonDrawable); attributesArray.recycle(); LayoutInflater inflater = LayoutInflater.from(getContext()); inflater.inflate(R.layout.abc_activity_chooser_view, this, true); mCallbacks = new Callbacks(); mActivityChooserContent = (LinearLayout) findViewById(R.id.activity_chooser_view_content); mActivityChooserContentBackground = mActivityChooserContent.getBackground(); mDefaultActivityButton = (FrameLayout) findViewById(R.id.default_activity_button); mDefaultActivityButton.setOnClickListener(mCallbacks); mDefaultActivityButton.setOnLongClickListener(mCallbacks); mDefaultActivityButtonImage = (ImageView) mDefaultActivityButton.findViewById(R.id.image); mExpandActivityOverflowButton = (FrameLayout) findViewById(R.id.expand_activities_button); mExpandActivityOverflowButton.setOnClickListener(mCallbacks); mExpandActivityOverflowButtonImage = (ImageView) mExpandActivityOverflowButton.findViewById(R.id.image); mExpandActivityOverflowButtonImage.setImageDrawable(expandActivityOverflowButtonDrawable); mAdapter = new ActivityChooserViewAdapter(); mAdapter.registerDataSetObserver(new DataSetObserver() { @Override public void onChanged() { super.onChanged(); updateAppearance(); } }); Resources resources = context.getResources(); mListPopupMaxWidth = Math.max(resources.getDisplayMetrics().widthPixels / 2, resources.getDimensionPixelSize(R.dimen.abc_config_prefDialogWidth)); } /** * {@inheritDoc} */ public void setActivityChooserModel(ActivityChooserModel dataModel) { mAdapter.setDataModel(dataModel); if (isShowingPopup()) { dismissPopup(); showPopup(); } } /** * Sets the background for the button that expands the activity * overflow list. * * <strong>Note:</strong> Clients would like to set this drawable * as a clue about the action the chosen activity will perform. For * example, if a share activity is to be chosen the drawable should * give a clue that sharing is to be performed. * * @param drawable The drawable. */ public void setExpandActivityOverflowButtonDrawable(Drawable drawable) { mExpandActivityOverflowButtonImage.setImageDrawable(drawable); } /** * Sets the content description for the button that expands the activity * overflow list. * * description as a clue about the action performed by the button. * For example, if a share activity is to be chosen the content * description should be something like "Share with". * * @param resourceId The content description resource id. */ public void setExpandActivityOverflowButtonContentDescription(int resourceId) { CharSequence contentDescription = getContext().getString(resourceId); mExpandActivityOverflowButtonImage.setContentDescription(contentDescription); } /** * Set the provider hosting this view, if applicable. * @hide Internal use only */ public void setProvider(ActionProvider provider) { mProvider = provider; } /** * Shows the popup window with activities. * * @return True if the popup was shown, false if already showing. */ public boolean showPopup() { if (isShowingPopup() || !mIsAttachedToWindow) { return false; } mIsSelectingDefaultActivity = false; showPopupUnchecked(mInitialActivityCount); return true; } /** * Shows the popup no matter if it was already showing. * * @param maxActivityCount The max number of activities to display. */ private void showPopupUnchecked(int maxActivityCount) { if (mAdapter.getDataModel() == null) { throw new IllegalStateException("No data model. Did you call #setDataModel?"); } getViewTreeObserver().addOnGlobalLayoutListener(mOnGlobalLayoutListener); final boolean defaultActivityButtonShown = mDefaultActivityButton.getVisibility() == VISIBLE; final int activityCount = mAdapter.getActivityCount(); final int maxActivityCountOffset = defaultActivityButtonShown ? 1 : 0; if (maxActivityCount != ActivityChooserViewAdapter.MAX_ACTIVITY_COUNT_UNLIMITED && activityCount > maxActivityCount + maxActivityCountOffset) { mAdapter.setShowFooterView(true); mAdapter.setMaxActivityCount(maxActivityCount - 1); } else { mAdapter.setShowFooterView(false); mAdapter.setMaxActivityCount(maxActivityCount); } ListPopupWindow popupWindow = getListPopupWindow(); if (!popupWindow.isShowing()) { if (mIsSelectingDefaultActivity || !defaultActivityButtonShown) { mAdapter.setShowDefaultActivity(true, defaultActivityButtonShown); } else { mAdapter.setShowDefaultActivity(false, false); } final int contentWidth = Math.min(mAdapter.measureContentWidth(), mListPopupMaxWidth); popupWindow.setContentWidth(contentWidth); popupWindow.show(); if (mProvider != null) { mProvider.subUiVisibilityChanged(true); } popupWindow.getListView().setContentDescription(getContext().getString( R.string.abc_activitychooserview_choose_application)); } } /** * Dismisses the popup window with activities. * * @return True if dismissed, false if already dismissed. */ public boolean dismissPopup() { if (isShowingPopup()) { getListPopupWindow().dismiss(); ViewTreeObserver viewTreeObserver = getViewTreeObserver(); if (viewTreeObserver.isAlive()) { viewTreeObserver.removeGlobalOnLayoutListener(mOnGlobalLayoutListener); } } return true; } /** * Gets whether the popup window with activities is shown. * * @return True if the popup is shown. */ public boolean isShowingPopup() { return getListPopupWindow().isShowing(); } @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); ActivityChooserModel dataModel = mAdapter.getDataModel(); if (dataModel != null) { dataModel.registerObserver(mModelDataSetOberver); } mIsAttachedToWindow = true; } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); ActivityChooserModel dataModel = mAdapter.getDataModel(); if (dataModel != null) { dataModel.unregisterObserver(mModelDataSetOberver); } ViewTreeObserver viewTreeObserver = getViewTreeObserver(); if (viewTreeObserver.isAlive()) { viewTreeObserver.removeGlobalOnLayoutListener(mOnGlobalLayoutListener); } if (isShowingPopup()) { dismissPopup(); } mIsAttachedToWindow = false; } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { View child = mActivityChooserContent; // If the default action is not visible we want to be as tall as the // ActionBar so if this widget is used in the latter it will look as // a normal action button. if (mDefaultActivityButton.getVisibility() != VISIBLE) { heightMeasureSpec = MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(heightMeasureSpec), MeasureSpec.EXACTLY); } measureChild(child, widthMeasureSpec, heightMeasureSpec); setMeasuredDimension(child.getMeasuredWidth(), child.getMeasuredHeight()); } @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { mActivityChooserContent.layout(0, 0, right - left, bottom - top); if (!isShowingPopup()) { dismissPopup(); } } public ActivityChooserModel getDataModel() { return mAdapter.getDataModel(); } /** * Sets a listener to receive a callback when the popup is dismissed. * * @param listener The listener to be notified. */ public void setOnDismissListener(PopupWindow.OnDismissListener listener) { mOnDismissListener = listener; } /** * Sets the initial count of items shown in the activities popup * i.e. the items before the popup is expanded. This is an upper * bound since it is not guaranteed that such number of intent * handlers exist. * * @param itemCount The initial popup item count. */ public void setInitialActivityCount(int itemCount) { mInitialActivityCount = itemCount; } /** * Sets a content description of the default action button. This * resource should be a string taking one formatting argument and * will be used for formatting the content description of the button * dynamically as the default target changes. For example, a resource * pointing to the string "share with %1$s" will result in a content * description "share with Bluetooth" for the Bluetooth activity. * * @param resourceId The resource id. */ public void setDefaultActionButtonContentDescription(int resourceId) { mDefaultActionButtonContentDescription = resourceId; } /** * Gets the list popup window which is lazily initialized. * * @return The popup. */ private ListPopupWindow getListPopupWindow() { if (mListPopupWindow == null) { mListPopupWindow = new ListPopupWindow(getContext()); mListPopupWindow.setAdapter(mAdapter); mListPopupWindow.setAnchorView(ActivityChooserView.this); mListPopupWindow.setModal(true); mListPopupWindow.setOnItemClickListener(mCallbacks); mListPopupWindow.setOnDismissListener(mCallbacks); } return mListPopupWindow; } /** * Updates the buttons state. */ private void updateAppearance() { // Expand overflow button. if (mAdapter.getCount() > 0) { mExpandActivityOverflowButton.setEnabled(true); } else { mExpandActivityOverflowButton.setEnabled(false); } // Default activity button. final int activityCount = mAdapter.getActivityCount(); final int historySize = mAdapter.getHistorySize(); if (activityCount==1 || activityCount > 1 && historySize > 0) { mDefaultActivityButton.setVisibility(VISIBLE); ResolveInfo activity = mAdapter.getDefaultActivity(); PackageManager packageManager = getContext().getPackageManager(); mDefaultActivityButtonImage.setImageDrawable(activity.loadIcon(packageManager)); if (mDefaultActionButtonContentDescription != 0) { CharSequence label = activity.loadLabel(packageManager); String contentDescription = getContext().getString( mDefaultActionButtonContentDescription, label); mDefaultActivityButton.setContentDescription(contentDescription); } } else { mDefaultActivityButton.setVisibility(View.GONE); } // Activity chooser content. if (mDefaultActivityButton.getVisibility() == VISIBLE) { mActivityChooserContent.setBackgroundDrawable(mActivityChooserContentBackground); } else { mActivityChooserContent.setBackgroundDrawable(null); } } /** * Interface implementation to avoid publishing them in the APIs. */ private class Callbacks implements AdapterView.OnItemClickListener, OnClickListener, OnLongClickListener, PopupWindow.OnDismissListener { // AdapterView#OnItemClickListener public void onItemClick(AdapterView<?> parent, View view, int position, long id) { ActivityChooserViewAdapter adapter = (ActivityChooserViewAdapter) parent.getAdapter(); final int itemViewType = adapter.getItemViewType(position); switch (itemViewType) { case ActivityChooserViewAdapter.ITEM_VIEW_TYPE_FOOTER: { showPopupUnchecked(ActivityChooserViewAdapter.MAX_ACTIVITY_COUNT_UNLIMITED); } break; case ActivityChooserViewAdapter.ITEM_VIEW_TYPE_ACTIVITY: { dismissPopup(); if (mIsSelectingDefaultActivity) { // The item at position zero is the default already. if (position > 0) { mAdapter.getDataModel().setDefaultActivity(position); } } else { // If the default target is not shown in the list, the first // item in the model is default action => adjust index position = mAdapter.getShowDefaultActivity() ? position : position + 1; Intent launchIntent = mAdapter.getDataModel().chooseActivity(position); if (launchIntent != null) { launchIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); getContext().startActivity(launchIntent); } } } break; default: throw new IllegalArgumentException(); } } // View.OnClickListener public void onClick(View view) { if (view == mDefaultActivityButton) { dismissPopup(); ResolveInfo defaultActivity = mAdapter.getDefaultActivity(); final int index = mAdapter.getDataModel().getActivityIndex(defaultActivity); Intent launchIntent = mAdapter.getDataModel().chooseActivity(index); if (launchIntent != null) { launchIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); getContext().startActivity(launchIntent); } } else if (view == mExpandActivityOverflowButton) { mIsSelectingDefaultActivity = false; showPopupUnchecked(mInitialActivityCount); } else { throw new IllegalArgumentException(); } } // OnLongClickListener#onLongClick @Override public boolean onLongClick(View view) { if (view == mDefaultActivityButton) { if (mAdapter.getCount() > 0) { mIsSelectingDefaultActivity = true; showPopupUnchecked(mInitialActivityCount); } } else { throw new IllegalArgumentException(); } return true; } // PopUpWindow.OnDismissListener#onDismiss public void onDismiss() { notifyOnDismissListener(); if (mProvider != null) { mProvider.subUiVisibilityChanged(false); } } private void notifyOnDismissListener() { if (mOnDismissListener != null) { mOnDismissListener.onDismiss(); } } } /** * Adapter for backing the list of activities shown in the popup. */ private class ActivityChooserViewAdapter extends BaseAdapter { public static final int MAX_ACTIVITY_COUNT_UNLIMITED = Integer.MAX_VALUE; public static final int MAX_ACTIVITY_COUNT_DEFAULT = 4; private static final int ITEM_VIEW_TYPE_ACTIVITY = 0; private static final int ITEM_VIEW_TYPE_FOOTER = 1; private static final int ITEM_VIEW_TYPE_COUNT = 3; private ActivityChooserModel mDataModel; private int mMaxActivityCount = MAX_ACTIVITY_COUNT_DEFAULT; private boolean mShowDefaultActivity; private boolean mHighlightDefaultActivity; private boolean mShowFooterView; public void setDataModel(ActivityChooserModel dataModel) { ActivityChooserModel oldDataModel = mAdapter.getDataModel(); if (oldDataModel != null && isShown()) { oldDataModel.unregisterObserver(mModelDataSetOberver); } mDataModel = dataModel; if (dataModel != null && isShown()) { dataModel.registerObserver(mModelDataSetOberver); } notifyDataSetChanged(); } @Override public int getItemViewType(int position) { if (mShowFooterView && position == getCount() - 1) { return ITEM_VIEW_TYPE_FOOTER; } else { return ITEM_VIEW_TYPE_ACTIVITY; } } @Override public int getViewTypeCount() { return ITEM_VIEW_TYPE_COUNT; } public int getCount() { int count = 0; int activityCount = mDataModel.getActivityCount(); if (!mShowDefaultActivity && mDataModel.getDefaultActivity() != null) { activityCount--; } count = Math.min(activityCount, mMaxActivityCount); if (mShowFooterView) { count++; } return count; } public Object getItem(int position) { final int itemViewType = getItemViewType(position); switch (itemViewType) { case ITEM_VIEW_TYPE_FOOTER: return null; case ITEM_VIEW_TYPE_ACTIVITY: if (!mShowDefaultActivity && mDataModel.getDefaultActivity() != null) { position++; } return mDataModel.getActivity(position); default: throw new IllegalArgumentException(); } } public long getItemId(int position) { return position; } public View getView(int position, View convertView, ViewGroup parent) { final int itemViewType = getItemViewType(position); switch (itemViewType) { case ITEM_VIEW_TYPE_FOOTER: if (convertView == null || convertView.getId() != ITEM_VIEW_TYPE_FOOTER) { convertView = LayoutInflater.from(getContext()).inflate( R.layout.abc_activity_chooser_view_list_item, parent, false); convertView.setId(ITEM_VIEW_TYPE_FOOTER); TextView titleView = (TextView) convertView.findViewById(R.id.title); titleView.setText(getContext().getString( R.string.abc_activity_chooser_view_see_all)); } return convertView; case ITEM_VIEW_TYPE_ACTIVITY: if (convertView == null || convertView.getId() != R.id.list_item) { convertView = LayoutInflater.from(getContext()).inflate( R.layout.abc_activity_chooser_view_list_item, parent, false); } PackageManager packageManager = getContext().getPackageManager(); // Set the icon ImageView iconView = (ImageView) convertView.findViewById(R.id.icon); ResolveInfo activity = (ResolveInfo) getItem(position); iconView.setImageDrawable(activity.loadIcon(packageManager)); // Set the title. TextView titleView = (TextView) convertView.findViewById(R.id.title); titleView.setText(activity.loadLabel(packageManager)); // Highlight the default. if (mShowDefaultActivity && position == 0 && mHighlightDefaultActivity) { //TODO convertView.setActivated(true); } else { //TODO convertView.setActivated(false); } return convertView; default: throw new IllegalArgumentException(); } } public int measureContentWidth() { // The user may have specified some of the target not to be shown but we // want to measure all of them since after expansion they should fit. final int oldMaxActivityCount = mMaxActivityCount; mMaxActivityCount = MAX_ACTIVITY_COUNT_UNLIMITED; int contentWidth = 0; View itemView = null; final int widthMeasureSpec = MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED); final int heightMeasureSpec = MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED); final int count = getCount(); for (int i = 0; i < count; i++) { itemView = getView(i, itemView, null); itemView.measure(widthMeasureSpec, heightMeasureSpec); contentWidth = Math.max(contentWidth, itemView.getMeasuredWidth()); } mMaxActivityCount = oldMaxActivityCount; return contentWidth; } public void setMaxActivityCount(int maxActivityCount) { if (mMaxActivityCount != maxActivityCount) { mMaxActivityCount = maxActivityCount; notifyDataSetChanged(); } } public ResolveInfo getDefaultActivity() { return mDataModel.getDefaultActivity(); } public void setShowFooterView(boolean showFooterView) { if (mShowFooterView != showFooterView) { mShowFooterView = showFooterView; notifyDataSetChanged(); } } public int getActivityCount() { return mDataModel.getActivityCount(); } public int getHistorySize() { return mDataModel.getHistorySize(); } public int getMaxActivityCount() { return mMaxActivityCount; } public ActivityChooserModel getDataModel() { return mDataModel; } public void setShowDefaultActivity(boolean showDefaultActivity, boolean highlightDefaultActivity) { if (mShowDefaultActivity != showDefaultActivity || mHighlightDefaultActivity != highlightDefaultActivity) { mShowDefaultActivity = showDefaultActivity; mHighlightDefaultActivity = highlightDefaultActivity; notifyDataSetChanged(); } } public boolean getShowDefaultActivity() { return mShowDefaultActivity; } } }
/* * Copyright (c) 2013-2017 Cinchapi Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cinchapi.concourse.test; import java.io.File; import org.apache.thrift.TException; import org.apache.thrift.transport.TTransportException; import org.junit.Rule; import org.junit.rules.TestWatcher; import org.junit.runner.Description; import com.cinchapi.common.io.ByteBuffers; import com.cinchapi.concourse.Concourse; import com.cinchapi.concourse.server.ConcourseServer; import com.cinchapi.concourse.server.io.FileSystem; import com.cinchapi.concourse.test.Variables; import com.cinchapi.concourse.thrift.AccessToken; import com.cinchapi.concourse.time.Time; import com.google.common.base.Throwables; /** * This is the base class for all integration tests. This class contains logic * to setup a new {@link #server} and a corresponding {@link #client} connection * before every test. At the end of each test, those resources are cleaned up. * <p> * Interaction with the server goes through the {@link #client} variable. * </p> * * @author Jeff Nelson */ public abstract class ConcourseIntegrationTest { // Initialization for all tests static { System.setProperty("test", "true"); } /** * The tests run against a local server. */ protected static final String SERVER_HOST = "localhost"; /** * The default server port is 1717, so we use 1718 as to avoid interfering * with any real servers that might be running. */ protected static final int SERVER_PORT = 1718; /** * The test server stores data in a distinct folder under the user's home * directory. This directory is deleted after each test. */ private static final String SERVER_DATA_HOME = System .getProperty("user.home") + File.separator + "concourse_" + Long.toString(Time.now()); private static final String SERVER_DATABASE_DIRECTORY = SERVER_DATA_HOME + File.separator + "db"; private static final String SERVER_BUFFER_DIRECTORY = SERVER_DATA_HOME + File.separator + "buffer"; /** * The instance of the local server that is running. The subclass should not * need to access this directly because all calls should be funneled through * the {@link #client}. */ private ConcourseServer server; /** * The client that is used to interact with the server. */ protected Concourse client; @Rule public TestWatcher __watcher = new TestWatcher() { @Override protected void failed(Throwable t, Description description) { System.err.println("TEST FAILURE in " + description.getMethodName() + ": " + t.getMessage()); System.err.println("---"); System.err.println(Variables.dump()); System.err.println(""); stop(); afterEachTest(); } @Override protected void finished(Description description) { stop(); afterEachTest(); } @Override protected void starting(Description description) { Variables.clear(); start(); beforeEachTest(); } }; /** * This method is provided for the subclass to specify additional behaviour * to be run after each test is done. The subclass should define such logic * in this method as opposed to a test watcher. */ protected void afterEachTest() {} /** * This method is provided for the subclass to specify additional behaviour * to be run before each test begins. The subclass should define such logic * in this method as opposed to a test watcher. */ protected void beforeEachTest() {} /** * Grant access to the server for a user identified by {@code username} and * {@code password}. * * @param username * @param password */ protected final void grantAccess(String username, String password) { try { AccessToken token = server.login( ByteBuffers.fromUtf8String("admin"), ByteBuffers.fromUtf8String("admin")); server.grant(ByteBuffers.fromUtf8String(username), ByteBuffers.fromUtf8String(password), token); } catch (TException e) { throw Throwables.propagate(e); } } /** * Disable access to the server for the user identified by {@code username}. * * @param username the username for which access should be disabled */ protected final void disableAccess(String username) { try { AccessToken token = server.login( ByteBuffers.fromUtf8String("admin"), ByteBuffers.fromUtf8String("admin")); server.disableUser(ByteBuffers.fromUtf8String(username), token); } catch (TException e) { throw Throwables.propagate(e); } } /** * Reset the test by stopping the server, deleting any stored data, and * starting a new server. */ protected void reset() { stop(); start(); }; /** * Restart the embedded server. This method will preserve stored data. */ protected void restartServer() { server.stop(); start(); } /** * Startup a new {@link ConcourseServer} and grab a new client connection. */ private void start() { startServer(); client = Concourse.connect(SERVER_HOST, SERVER_PORT, "admin", "admin"); } /** * Start an embedded server. */ private void startServer() { try { server = ConcourseServer.create(SERVER_PORT, SERVER_BUFFER_DIRECTORY, SERVER_DATABASE_DIRECTORY); } catch (TTransportException e1) { throw Throwables.propagate(e1); } Thread t = new Thread(new Runnable() { @Override public void run() { try { server.start(); } catch (TTransportException e) { throw Throwables.propagate(e); } } }); t.start(); }; /** * Exit the client. Stop the server. Delete any stored data. */ private void stop() { client.exit(); server.stop(); FileSystem.deleteDirectory(SERVER_DATA_HOME); FileSystem.deleteFile(".access"); // delete the creds in case there were // any changes made during a test } }
package jaist.echonet.sampledevices; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.util.Timer; import java.util.TimerTask; import java.util.logging.Level; import java.util.logging.Logger; import jaist.echonet.AbstractEchonetObject; import jaist.echonet.EchoEventListener; import jaist.echonet.WriteEventAdapter; import jaist.echonet.EchonetAnswer; import jaist.echonet.EchonetCharacterProperty; import jaist.echonet.EchonetDateProperty; import jaist.echonet.EchonetNode; import jaist.echonet.EchonetProperty; import jaist.echonet.EchonetTimeProperty; import jaist.echonet.LocalEchonetObject; import jaist.echonet.PropertyMemento; import jaist.echonet.RemoteEchonetObject; import jaist.echonet.config.VirtualAirconditionInfo; import jaist.echonet.gui.SupportsAppend; import jaist.echonet.wrappers.Aircondition; import jaist.echonet.wrappers.LocalWriter; import jaist.echonet.util.Utils; /** * Implementation of a virtual air condition unit. This air condition unit * outputs the results of its operation on an "appendable" object (implement the * {@link SupportsAppend} interface and pass it as an argument to the constructor. * <p> * What this airconditon does, is it intercepts commands and prints out on the * appendable the actions it is going to take. It uses a mix of: * <ul> * <li> overriding the read operation of custom properties</li> * <li> overriding the write operation of custom properties</li> * <li> write listeners </li> * <li> use of wrapper functions </li> * </ul> * to implement its device logic. Usually, the write listener will intercept * the command, check its validity and apply the requested setting. However, at * a second level, the written property is also backed up by an EchonetCharacterProperty, * that will be written for consistency. When a write request fails, a Set*_SNA * is returned to the originator of the request. * <p> * This air condition supports save states, by using {@link PropertyMemento}s as * a serialization mechanism. * <p> * Setting the temperature property of the aircondition will also end up setting the * corresponding "mode"temperature property, i.e. if the temperature is set * when the operation mode is in COOL, then also the "cooling"temperature will * be set at the same time. However, the other way around is not happening * <p> * As final remarks, this code was developed in a short amount of time for demo * purposes, so its quality is at best throwaway. Do not consider it as "best * practice" or as a reference for correctness, just use it as a hint to see * what is possible. * * @author Sioutis Marios */ public class VirtualAirCondition extends Aircondition { enum OpMode { AUTO(100), COOLING(120), HEATING(150), DEHUMIDIFY(80), FAN(50), OTHER(100); int power; //percentage OpMode(int powerpercent) { this.power = powerpercent; } int getPower() { return power; } } enum FlowAmount { LEAST(50), LESS(75), MEDIUM(100), HIGH(120), HIGHEST(150), AUTO(100); int power; //percentage FlowAmount(int powerpercent) { this.power = powerpercent; } int getPower() { return power; } } enum FlowDirection { UP, DOWN, CENTER, CENTERUP, CENTERDOWN,} private SupportsAppend appendable; private boolean autotemp = false; private int temperature; private byte dehumidtemperature; private byte heattemperature; private byte cooltemperature; private FlowAmount flowAmount = FlowAmount.AUTO; private FlowDirection flowDirection = FlowDirection.CENTER; private OpMode opMode = OpMode.AUTO; private boolean autodirection = true; private TimeKeeper timer = null; final byte[] proplist = { (byte) 0xb0, (byte) 0xb1, (byte) 0xb3, (byte) 0xb5, (byte) 0xb6, (byte) 0xb7, (byte) 0xa0, (byte) 0xa1, (byte) 0xa4, (byte) 0x90, (byte) 0x92, (byte) 0x94, (byte) 0x96 }; final String filename = "aircon.state"; EchoEventListener listener = new EchoEventListener() { @Override public boolean processWriteEvent(EchonetProperty property) { appendable.append("Received property: " + Utils.toHexString(property.getPropertyCode()) + " data: " + Utils.toHexString(property.read()) + "\n"); return true; } @Override public boolean processNotificationEvent(RemoteEchonetObject robject, EchonetProperty property) { throw new UnsupportedOperationException("Not supported yet."); } @Override public void processAnswer(EchonetAnswer answer) { throw new UnsupportedOperationException("Not supported yet."); } }; EchoEventListener writelistener = new WriteEventAdapter() { @Override public boolean processWriteEvent(EchonetProperty property) { byte[] data = property.read(); if (data == null || data.length == 0) { return true; } switch (property.getPropertyCode()) { case (byte) 0xb0: //mode setting. if (data.length != 1) { return false; } return setOpMode(data[0]); case (byte) 0xb1: //auto temp control if (data.length != 1) { return false; } return setAutoTemp(data[0]); case (byte) 0xb3: //temp setting if (data.length != 1) { return false; } return setTemp(data[0]); case (byte) 0xA0: if (data.length != 1) { return false; } return setAirAmount(data[0]); //generic check for the temperatures case (byte) 0xB5: case (byte) 0xB6: case (byte) 0xB7: if (data.length != 1 || data[0] < 18 || data[0] > 31) { appendable.appendln("Requested temperature out of bounds"); return false; } return true; case (byte) 0xA1: if (data.length != 1) { return false; } return setAutoDirection(data[0]); case (byte) 0xA4: if (data.length != 1) { return false; } return setAirDirection(data[0]); case (byte) 0x90: if (data.length != 1) { return false; } return startOnTimer(data[0]); case (byte) 0x94: if (data.length != 1) { return false; } return startOffTimer(data[0]); default: //this is necessary! //we want to allow events we don't handle be handled by anybody //else that can. return true; } } }; public VirtualAirCondition(EchonetNode node, SupportsAppend appendable) { setAppendable(appendable); setup(); this.getLocalEchonetObject().registerListener(listener); this.getLocalEchonetObject().registerListener(writelistener); this.registerSelfWithNode(node); Timer t = new Timer(true); t.scheduleAtFixedRate(new TimeKeeper(), 4000, 4000); } private class TimeKeeper extends TimerTask { final int ONTIMER = 1; final int OFFTIMER = 0; @Override public void run() { //appendable.appendln("Time keeper run"); String out = "Timer Expired: Air condition will "; if (isTimerEnabled(getTimerOnStatus())) { //on timer is enabled. if (decreaseTimer(ONTIMER)) { //aircondition must turn on appendable.appendln(out + "TURN ON"); setTimerOnStatus((byte) 0x42); setStatus(true); } } if (isTimerEnabled(getTimerOffStatus())) { //on timer is enabled. if (decreaseTimer(OFFTIMER)) { //aircondition must turn on appendable.appendln(out + "TURN OFF"); setTimerOffStatus((byte) 0x42); setStatus(false); } } } private boolean decreaseTimer(int timer) { String out = "Time left: "; int minutes = 0; if (timer == ONTIMER) { minutes = convertRelativeTimeToMinutes(getTimerOnRelative()); setTimerOnRelative(convertMinutesToRelativeTime(--minutes)); appendable.appendln("ON timer: " + out + minutes + "minutes"); } if (timer == OFFTIMER) { minutes = convertRelativeTimeToMinutes(getTimerOffRelative()); setTimerOffRelative(convertMinutesToRelativeTime(--minutes)); appendable.appendln("OFF timer: " + out + minutes + "minutes"); } if (minutes <= 0) { return true; } return false; } } class OnOffProperty extends EchonetProperty { public OnOffProperty() { //super((byte) 0x80, true, true); super((byte) 0x80, true, true, true, 1, EchonetProperty.EXACT); } byte abyte = 0x31; @Override public byte[] read() { return new byte[]{abyte}; } @Override public boolean write(byte[] data) { if (data == null || data.length != 1) { return true; } if (data[0] == (byte) 0x31) { abyte = data[0]; switchOff(); return false; } if (data[0] == (byte) 0x30) { abyte = data[0]; switchOn(); return false; } return true; } @Override public boolean isEmpty() { return false; } } class PowerConsumption extends EchonetProperty { PowerConsumption() { super((byte) 0x84, false, false); } @Override public byte[] read() { short energy = 0; if (getStatus() == false) //device is turned off. no energy consumption. { return ShortToBytes(energy); } energy = 100; //a "base energy", nothing really meaningfull //simple formula: base + (base * flowamount/100 * opMode/100 ) energy += energy * flowAmount.getPower() * opMode.getPower() / 10000; return ShortToBytes(energy); } @Override public boolean write(byte[] data) { return true; // this is not writeable, an "error" occured } @Override public boolean isEmpty() { return false; } } class TemperatureHandler extends EchonetCharacterProperty { private byte opmode; public TemperatureHandler(byte propcode, byte opmode) { super(propcode, true, false, 1); this.opmode = opmode; } @Override public boolean write(byte[] data) { if (VirtualAirCondition.this.getOperationMode() == this.opmode) { //black magic: to avoid a loop we handle the raw temperature property by bypassing any check mechanisms VirtualAirCondition.this.getLocalEchonetObject().getProperty((byte)0xB3).write(data); String output = " set to " + data[0] + " degrees"; switch (this.getPropertyCode()){ case (byte) 0xb5: appendable.appendln("Heating temperature"+output); break; case (byte) 0xb6: appendable.appendln("Cooling temperature"+output); break; case (byte) 0xb7: appendable.appendln("Dehumidify temperature"+output); break; } } super.write(data); return false; } } private void saveState() { ObjectOutputStream oout; try { oout = new ObjectOutputStream(new FileOutputStream(filename)); AbstractEchonetObject raw = this.getEchonetObject(); for (byte opcode : proplist) { oout.writeObject(raw.getProperty(opcode).getMemento()); debug("Write: ", raw.getProperty(opcode).getMemento()); } oout.close(); appendable.appendln("Save state: Successful"); } catch (FileNotFoundException ex) { Logger.getLogger(VirtualAirCondition.class.getName()).log(Level.SEVERE, "The file was not found", ex); return; } catch (IOException ex) { Logger.getLogger(VirtualAirCondition.class.getName()).log(Level.SEVERE, "IO error occured", ex); appendable.appendln("IO Error: failed to save state"); return; } } private void debug(String string, PropertyMemento memento) { appendable.appendln(string + Utils.toHexString(memento.getPropertyCode()) + " " + Utils.toHexString(memento.read())); } private void loadState() { ObjectInputStream oin = null; try { oin = new ObjectInputStream(new FileInputStream(filename)); PropertyMemento state; for (byte opcode : proplist) { try { state = (PropertyMemento) oin.readObject(); debug("Read: ", state); this.getEchonetObject().getProperty(state.getPropertyCode()).write(state.read()); } catch (ClassNotFoundException ex) { Logger.getLogger(VirtualAirCondition.class.getName()).log(Level.SEVERE, null, ex); } } appendable.appendln("Load state: Successful"); } catch (FileNotFoundException ex) { Logger.getLogger(VirtualAirCondition.class.getName()).log(Level.SEVERE, null, ex); } catch (IOException ex) { Logger.getLogger(VirtualAirCondition.class.getName()).log(Level.SEVERE, null, ex); } finally { try { oin.close(); } catch (IOException ex) { Logger.getLogger(VirtualAirCondition.class.getName()).log(Level.SEVERE, null, ex); } catch (NullPointerException e){ //do nothing } } } private void setup() { this.rawobject = new LocalEchonetObject(new VirtualAirconditionInfo()); this.rawobject.addProperty(new EchonetDateProperty()); this.rawobject.addProperty(new OnOffProperty()); this.rawobject.addProperty(new EchonetTimeProperty()); this.rawobject.addProperty(new PowerConsumption()); this.rawobject.addProperty(new TemperatureHandler((byte) 0xb5,(byte) 0x42) ); this.rawobject.addProperty(new TemperatureHandler((byte) 0xb6,(byte)0x43)) ; this.rawobject.addProperty(new TemperatureHandler((byte) 0xb7, (byte)0x44)); this.writer = new LocalWriter((LocalEchonetObject) this.rawobject); this.getLocalEchonetObject().updatePropertyMap(); } /** * @return the appendable */ public SupportsAppend getAppendable() { return appendable; } /** * @param appendable the appendable to set */ public void setAppendable(SupportsAppend appendable) { this.appendable = appendable; appendable.append("Virtual Aircondition: attatched to output\n"); } private void switchOn() { appendable.append("Aircondition is switched on \n"); this.loadState(); } private void switchOff() { appendable.append("Aircondition is switched off \n"); this.saveState(); } private boolean setOpM(OpMode mode) { this.opMode = mode; appendable.appendln("Operation mode set to: " + mode); return true; } private boolean setOpMode(byte mode) { switch (mode) { case 0x41: return setOpM(OpMode.AUTO); case 0x42: return setOpM(OpMode.COOLING); case 0x43: return setOpM(OpMode.HEATING); case 0x44: return setOpM(OpMode.DEHUMIDIFY); case 0x45: return setOpM(OpMode.FAN); default: appendable.appendln("Operation mode: bad request"); return false; } } private boolean setAutoTemp(byte auto) { if (auto == 0x41) { this.autotemp = true; appendable.append("Temperature mode set to: Auto\n"); return true; } if (auto == 0x42) { this.autotemp = false; appendable.append("Temperature mode set to: Manual\n"); return true; } appendable.appendln("Temperature mode: bad request (hex : " + Utils.toHexString(auto) + ")"); return false; } private boolean setTemp(byte temp) { if (temp < 18 || temp > 30) { appendable.appendln("Requested Temperature is out of the specification (" + temp + " degrees)."); return false; } this.temperature = temp; appendable.append("Temperature set to: " + temp + " degrees\n"); switch (opMode) { case COOLING: return setCoolTemp(temp); case HEATING: return setHeatTemp(temp); case DEHUMIDIFY: return setDehumidTemp(temp); } return true; } private boolean setCoolTemp(byte temp) { if (temp < 0 || temp > 50) { appendable.appendln("Requested Cooling Temperature is out of the specification (" + temp + " degrees)."); return false; } this.cooltemperature = temp; appendable.append("Cooling temperature set to: " + temp + " degrees\n"); return true; } private boolean setHeatTemp(byte temp) { if (temp < 0 || temp > 50) { appendable.appendln("Requested Heating Temperature is out of the specification (" + temp + " degrees)."); return false; } this.heattemperature = temp; appendable.append("Heating temperature set to: " + temp + " degrees\n"); return true; } private boolean setDehumidTemp(byte temp) { if (temp < 0 || temp > 50) { appendable.appendln("Requested Dehumidify Temperature is out of the specification (" + temp + " degrees)."); return false; } this.dehumidtemperature = temp; appendable.append("Dehumidify temperature set to: " + temp + " degrees\n"); return true; } private boolean setAirA(FlowAmount flowa) { flowAmount = flowa; appendable.appendln("Air flow has been set to: " + flowAmount); return true; } private boolean setAirAmount(byte amount) { switch (amount) { case 0x31: return setAirA(FlowAmount.LEAST); case 0x32: case 0x33: return setAirA(FlowAmount.LESS); case 0x34: case 0x35: return setAirA(FlowAmount.MEDIUM); case 0x36: case 0x37: return setAirA(FlowAmount.HIGH); case 0x38: return setAirA(FlowAmount.HIGHEST); case 0x41: return setAirA(FlowAmount.AUTO); default: appendable.appendln("Air flow: bad request"); return false; } } private boolean setAutoDirection(byte autodirect) { final String out = "Auto flow direction set to: "; switch (autodirect) { case 0x41: appendable.appendln(out + "TRUE"); this.autodirection = true; return true; case 0x42: appendable.appendln(out + "FALSE"); this.autodirection = false; return true; default: appendable.appendln(out + "Bad request"); return false; } } private boolean setAirD(FlowDirection flowd) { this.flowDirection = flowd; appendable.appendln("Air direction set to: " + flowDirection); return true; } private boolean setAirDirection(byte direction) { switch (direction) { case 0x41: return setAirD(FlowDirection.UP); case 0x42: return setAirD(FlowDirection.DOWN); case 0x43: return setAirD(FlowDirection.CENTER); case 0x44: return setAirD(FlowDirection.CENTERUP); case 0x45: return setAirD(FlowDirection.CENTERDOWN); default: appendable.appendln("Air direction: bad request"); return false; } } private boolean startOnTimer(byte setting) { String out = "On timer setting: "; switch (setting) { case 0x41: case 0x43: case 0x44: int minutesleft = convertRelativeTimeToMinutes(getTimerOnRelative()); if (minutesleft == 0) { appendable.appendln(out + "Request ignored, time is set to zero"); return false; } if (minutesleft < 0) { appendable.appendln(out + "Request ignored, invalid time requested"); return false; } else { appendable.appendln(out + " ON timer enabled"); return true; } case 0x42: appendable.appendln(out + "ON timer disabled"); return true; default: appendable.appendln(out + "bad request"); return false; } } private boolean startOffTimer(byte setting) { String out = "Off timer setting: "; switch (setting) { case 0x41: case 0x43: case 0x44: int minutesleft = convertRelativeTimeToMinutes(getTimerOffRelative()); if (minutesleft <= 0) { appendable.appendln(out + "Request ignored, time is set to "); return false; } if (minutesleft < 0) { appendable.appendln(out + "Request ignored, invalid time requested"); return false; } else { appendable.appendln(out + " OFF timer enabled"); return true; } case 0x42: appendable.appendln(out + "OFF timer disabled"); return true; default: appendable.appendln(out + "bad request"); return false; } } private boolean isTimerEnabled(byte timersetting) { switch (timersetting) { case 0x41: case 0x43: case 0x44: return true; default: return false; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.routing.allocation; import com.carrotsearch.hppc.cursors.ObjectCursor; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.test.ESAllocationTestCase; import org.hamcrest.Matcher; import org.hamcrest.Matchers; import java.util.ArrayList; import java.util.Collections; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; public class AddIncrementallyTests extends ESAllocationTestCase { private final ESLogger logger = Loggers.getLogger(AddIncrementallyTests.class); public void testAddNodesAndIndices() { Settings.Builder settings = Settings.builder(); settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()); AllocationService service = createAllocationService(settings.build()); ClusterState clusterState = initCluster(service, 1, 3, 3, 1); assertThat(clusterState.getRoutingNodes().node("node0").shardsWithState(STARTED).size(), Matchers.equalTo(9)); assertThat(clusterState.getRoutingNodes().unassigned().size(), Matchers.equalTo(9)); int nodeOffset = 1; clusterState = addNodes(clusterState, service, 1, nodeOffset++); assertThat(clusterState.getRoutingNodes().node("node0").shardsWithState(STARTED).size(), Matchers.equalTo(9)); assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(STARTED).size(), Matchers.equalTo(9)); assertThat(clusterState.getRoutingNodes().unassigned().size(), Matchers.equalTo(0)); assertNumIndexShardsPerNode(clusterState, Matchers.equalTo(3)); clusterState = addNodes(clusterState, service, 1, nodeOffset++); assertNumIndexShardsPerNode(clusterState, Matchers.equalTo(2)); clusterState = addNodes(clusterState, service, 1, nodeOffset++); assertNumIndexShardsPerNode(clusterState, Matchers.lessThanOrEqualTo(2)); assertAtLeastOneIndexShardPerNode(clusterState); clusterState = removeNodes(clusterState, service, 1); assertNumIndexShardsPerNode(clusterState, Matchers.equalTo(2)); clusterState = addIndex(clusterState, service, 3, 2, 3); assertThat(clusterState.getRoutingNodes().unassigned().size(), Matchers.equalTo(2)); assertNumIndexShardsPerNode(clusterState, "test3", Matchers.equalTo(2)); assertNumIndexShardsPerNode(clusterState, Matchers.lessThanOrEqualTo(2)); clusterState = addIndex(clusterState, service, 4, 2, 3); assertThat(clusterState.getRoutingNodes().unassigned().size(), Matchers.equalTo(4)); assertNumIndexShardsPerNode(clusterState, "test4", Matchers.equalTo(2)); assertNumIndexShardsPerNode(clusterState, Matchers.lessThanOrEqualTo(2)); clusterState = addNodes(clusterState, service, 1, nodeOffset++); assertNumIndexShardsPerNode(clusterState, Matchers.lessThanOrEqualTo(2)); assertThat(clusterState.getRoutingNodes().unassigned().size(), Matchers.equalTo(0)); clusterState = removeNodes(clusterState, service, 1); assertThat(clusterState.getRoutingNodes().unassigned().size(), Matchers.equalTo(4)); assertNumIndexShardsPerNode(clusterState, Matchers.lessThanOrEqualTo(2)); clusterState = addNodes(clusterState, service, 1, nodeOffset++); assertNumIndexShardsPerNode(clusterState, Matchers.lessThanOrEqualTo(2)); assertThat(clusterState.getRoutingNodes().unassigned().size(), Matchers.equalTo(0)); logger.debug("ClusterState: {}", clusterState.getRoutingNodes().prettyPrint()); } public void testMinimalRelocations() { Settings.Builder settings = Settings.builder(); settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()) .put("cluster.routing.allocation.node_concurrent_recoveries", 2); AllocationService service = createAllocationService(settings.build()); ClusterState clusterState = initCluster(service, 1, 3, 3, 1); assertThat(clusterState.getRoutingNodes().node("node0").shardsWithState(STARTED).size(), Matchers.equalTo(9)); assertThat(clusterState.getRoutingNodes().unassigned().size(), Matchers.equalTo(9)); int nodeOffset = 1; clusterState = addNodes(clusterState, service, 1, nodeOffset++); assertThat(clusterState.getRoutingNodes().node("node0").shardsWithState(STARTED).size(), Matchers.equalTo(9)); assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(STARTED).size(), Matchers.equalTo(9)); assertThat(clusterState.getRoutingNodes().unassigned().size(), Matchers.equalTo(0)); assertNumIndexShardsPerNode(clusterState, Matchers.equalTo(3)); logger.info("now, start one more node, check that rebalancing will happen because we set it to always"); DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(clusterState.nodes()); nodes.put(newNode("node2")); clusterState = ClusterState.builder(clusterState).nodes(nodes.build()).build(); RoutingTable routingTable = service.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); RoutingNodes routingNodes = clusterState.getRoutingNodes(); assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(INITIALIZING).size(), Matchers.equalTo(2)); assertThat(clusterState.getRoutingNodes().node("node0").shardsWithState(INITIALIZING).size(), Matchers.equalTo(0)); assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(INITIALIZING).size(), Matchers.equalTo(0)); RoutingTable prev = routingTable; routingTable = service.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); routingNodes = clusterState.getRoutingNodes(); assertThat(prev, Matchers.not(Matchers.sameInstance(routingTable))); assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(STARTED).size(), Matchers.equalTo(2)); assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(INITIALIZING).size(), Matchers.equalTo(2)); assertThat(clusterState.getRoutingNodes().node("node0").shardsWithState(INITIALIZING).size(), Matchers.equalTo(0)); assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(INITIALIZING).size(), Matchers.equalTo(0)); assertThat(prev, Matchers.not(Matchers.sameInstance(routingTable))); prev = routingTable; routingTable = service.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); routingNodes = clusterState.getRoutingNodes(); assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(STARTED).size(), Matchers.equalTo(4)); assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(INITIALIZING).size(), Matchers.equalTo(2)); assertThat(clusterState.getRoutingNodes().node("node0").shardsWithState(INITIALIZING).size(), Matchers.equalTo(0)); assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(INITIALIZING).size(), Matchers.equalTo(0)); assertThat(prev, Matchers.not(Matchers.sameInstance(routingTable))); prev = routingTable; routingTable = service.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); routingNodes = clusterState.getRoutingNodes(); assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(STARTED).size(), Matchers.equalTo(6)); assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(INITIALIZING).size(), Matchers.equalTo(0)); assertThat(clusterState.getRoutingNodes().node("node0").shardsWithState(INITIALIZING).size(), Matchers.equalTo(0)); assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(INITIALIZING).size(), Matchers.equalTo(0)); assertThat(prev, Matchers.not(Matchers.sameInstance(routingTable))); prev = routingTable; routingTable = service.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); assertThat(prev, Matchers.sameInstance(routingTable)); assertNumIndexShardsPerNode(clusterState, Matchers.equalTo(2)); logger.debug("ClusterState: {}", clusterState.getRoutingNodes().prettyPrint()); } public void testMinimalRelocationsNoLimit() { Settings.Builder settings = Settings.builder(); settings.put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()) .put("cluster.routing.allocation.node_concurrent_recoveries", 100) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 100); AllocationService service = createAllocationService(settings.build()); ClusterState clusterState = initCluster(service, 1, 3, 3, 1); assertThat(clusterState.getRoutingNodes().node("node0").shardsWithState(STARTED).size(), Matchers.equalTo(9)); assertThat(clusterState.getRoutingNodes().unassigned().size(), Matchers.equalTo(9)); int nodeOffset = 1; clusterState = addNodes(clusterState, service, 1, nodeOffset++); assertThat(clusterState.getRoutingNodes().node("node0").shardsWithState(STARTED).size(), Matchers.equalTo(9)); assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(STARTED).size(), Matchers.equalTo(9)); assertThat(clusterState.getRoutingNodes().unassigned().size(), Matchers.equalTo(0)); assertNumIndexShardsPerNode(clusterState, Matchers.equalTo(3)); logger.info("now, start one more node, check that rebalancing will happen because we set it to always"); DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(clusterState.nodes()); nodes.put(newNode("node2")); clusterState = ClusterState.builder(clusterState).nodes(nodes.build()).build(); RoutingTable routingTable = service.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); RoutingNodes routingNodes = clusterState.getRoutingNodes(); assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(INITIALIZING).size(), Matchers.equalTo(2)); assertThat(clusterState.getRoutingNodes().node("node0").shardsWithState(INITIALIZING).size(), Matchers.equalTo(0)); assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(INITIALIZING).size(), Matchers.equalTo(0)); RoutingTable prev = routingTable; routingTable = service.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); routingNodes = clusterState.getRoutingNodes(); assertThat(prev, Matchers.not(Matchers.sameInstance(routingTable))); assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(STARTED).size(), Matchers.equalTo(2)); assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(INITIALIZING).size(), Matchers.equalTo(2)); assertThat(clusterState.getRoutingNodes().node("node0").shardsWithState(INITIALIZING).size(), Matchers.equalTo(0)); assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(INITIALIZING).size(), Matchers.equalTo(0)); assertThat(prev, Matchers.not(Matchers.sameInstance(routingTable))); prev = routingTable; routingTable = service.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); routingNodes = clusterState.getRoutingNodes(); assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(STARTED).size(), Matchers.equalTo(4)); assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(INITIALIZING).size(), Matchers.equalTo(2)); assertThat(clusterState.getRoutingNodes().node("node0").shardsWithState(INITIALIZING).size(), Matchers.equalTo(0)); assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(INITIALIZING).size(), Matchers.equalTo(0)); assertThat(prev, Matchers.not(Matchers.sameInstance(routingTable))); prev = routingTable; routingTable = service.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); routingNodes = clusterState.getRoutingNodes(); assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(STARTED).size(), Matchers.equalTo(6)); assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(INITIALIZING).size(), Matchers.equalTo(0)); assertThat(clusterState.getRoutingNodes().node("node0").shardsWithState(INITIALIZING).size(), Matchers.equalTo(0)); assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(INITIALIZING).size(), Matchers.equalTo(0)); assertThat(prev, Matchers.not(Matchers.sameInstance(routingTable))); prev = routingTable; routingTable = service.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); routingNodes = clusterState.getRoutingNodes(); assertThat(prev, Matchers.sameInstance(routingTable)); assertNumIndexShardsPerNode(clusterState, Matchers.equalTo(2)); logger.debug("ClusterState: {}", clusterState.getRoutingNodes().prettyPrint()); } private void assertNumIndexShardsPerNode(ClusterState state, Matcher<Integer> matcher) { for (ObjectCursor<String> index : state.routingTable().indicesRouting().keys()) { assertNumIndexShardsPerNode(state, index.value, matcher); } } private void assertNumIndexShardsPerNode(ClusterState state, String index, Matcher<Integer> matcher) { for (RoutingNode node : state.getRoutingNodes()) { assertThat(node.shardsWithState(index, STARTED).size(), matcher); } } private void assertAtLeastOneIndexShardPerNode(ClusterState state) { for (ObjectCursor<String> index : state.routingTable().indicesRouting().keys()) { for (RoutingNode node : state.getRoutingNodes()) { assertThat(node.shardsWithState(index.value, STARTED).size(), Matchers.greaterThanOrEqualTo(1)); } } } private ClusterState addNodes(ClusterState clusterState, AllocationService service, int numNodes, int nodeOffset) { logger.info("now, start [{}] more node, check that rebalancing will happen because we set it to always", numNodes); DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(clusterState.nodes()); for (int i = 0; i < numNodes; i++) { nodes.put(newNode("node" + (i + nodeOffset))); } clusterState = ClusterState.builder(clusterState).nodes(nodes.build()).build(); RoutingTable routingTable = service.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); RoutingNodes routingNodes = clusterState.getRoutingNodes(); // move initializing to started RoutingTable prev = routingTable; while (true) { logger.debug("ClusterState: {}", clusterState.getRoutingNodes().prettyPrint()); routingTable = service.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); routingNodes = clusterState.getRoutingNodes(); if (routingTable == prev) break; prev = routingTable; } return clusterState; } private ClusterState initCluster(AllocationService service, int numberOfNodes, int numberOfIndices, int numberOfShards, int numberOfReplicas) { MetaData.Builder metaDataBuilder = MetaData.builder(); RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); for (int i = 0; i < numberOfIndices; i++) { IndexMetaData.Builder index = IndexMetaData.builder("test" + i).settings(settings(Version.CURRENT)).numberOfShards(numberOfShards).numberOfReplicas( numberOfReplicas); metaDataBuilder = metaDataBuilder.put(index); } MetaData metaData = metaDataBuilder.build(); for (ObjectCursor<IndexMetaData> cursor : metaData.indices().values()) { routingTableBuilder.addAsNew(cursor.value); } RoutingTable routingTable = routingTableBuilder.build(); logger.info("start {} nodes", numberOfNodes); DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(); for (int i = 0; i < numberOfNodes; i++) { nodes.put(newNode("node" + i)); } ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT).nodes(nodes).metaData(metaData).routingTable(routingTable).build(); routingTable = service.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); RoutingNodes routingNodes = clusterState.getRoutingNodes(); logger.info("restart all the primary shards, replicas will start initializing"); routingNodes = clusterState.getRoutingNodes(); routingTable = service.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); routingNodes = clusterState.getRoutingNodes(); logger.info("start the replica shards"); routingNodes = clusterState.getRoutingNodes(); routingTable = service.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); routingNodes = clusterState.getRoutingNodes(); logger.info("complete rebalancing"); RoutingTable prev = routingTable; while (true) { logger.debug("ClusterState: {}", clusterState.getRoutingNodes().prettyPrint()); routingTable = service.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); routingNodes = clusterState.getRoutingNodes(); if (routingTable == prev) break; prev = routingTable; } return clusterState; } private ClusterState addIndex(ClusterState clusterState, AllocationService service, int indexOrdinal, int numberOfShards, int numberOfReplicas) { MetaData.Builder metaDataBuilder = MetaData.builder(clusterState.getMetaData()); RoutingTable.Builder routingTableBuilder = RoutingTable.builder(clusterState.routingTable()); IndexMetaData.Builder index = IndexMetaData.builder("test" + indexOrdinal).settings(settings(Version.CURRENT)).numberOfShards(numberOfShards).numberOfReplicas( numberOfReplicas); IndexMetaData imd = index.build(); metaDataBuilder = metaDataBuilder.put(imd, true); routingTableBuilder.addAsNew(imd); MetaData metaData = metaDataBuilder.build(); RoutingTable routingTable = routingTableBuilder.build(); clusterState = ClusterState.builder(clusterState).metaData(metaData).routingTable(routingTable).build(); routingTable = service.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); RoutingNodes routingNodes = clusterState.getRoutingNodes(); logger.info("restart all the primary shards, replicas will start initializing"); routingNodes = clusterState.getRoutingNodes(); routingTable = service.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); routingNodes = clusterState.getRoutingNodes(); logger.info("start the replica shards"); routingNodes = clusterState.getRoutingNodes(); routingTable = service.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); routingNodes = clusterState.getRoutingNodes(); logger.info("complete rebalancing"); RoutingTable prev = routingTable; while (true) { logger.debug("ClusterState: {}", clusterState.getRoutingNodes().prettyPrint()); routingTable = service.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); routingNodes = clusterState.getRoutingNodes(); if (routingTable == prev) break; prev = routingTable; } return clusterState; } private ClusterState removeNodes(ClusterState clusterState, AllocationService service, int numNodes) { logger.info("Removing [{}] nodes", numNodes); DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(clusterState.nodes()); ArrayList<DiscoveryNode> discoveryNodes = CollectionUtils.iterableAsArrayList(clusterState.nodes()); Collections.shuffle(discoveryNodes, random()); for (DiscoveryNode node : discoveryNodes) { nodes.remove(node.getId()); numNodes--; if (numNodes <= 0) { break; } } clusterState = ClusterState.builder(clusterState).nodes(nodes.build()).build(); RoutingNodes routingNodes = clusterState.getRoutingNodes(); logger.info("start all the primary shards, replicas will start initializing"); RoutingTable routingTable = service.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); routingNodes = clusterState.getRoutingNodes(); logger.info("start the replica shards"); routingTable = service.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); routingNodes = clusterState.getRoutingNodes(); logger.info("rebalancing"); routingTable = service.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); routingNodes = clusterState.getRoutingNodes(); logger.info("complete rebalancing"); RoutingTable prev = routingTable; while (true) { logger.debug("ClusterState: {}", clusterState.getRoutingNodes().prettyPrint()); routingTable = service.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); routingNodes = clusterState.getRoutingNodes(); if (routingTable == prev) break; prev = routingTable; } return clusterState; } }
/** * <copyright> * </copyright> * * $Id$ */ package org.wso2.developerstudio.eclipse.gmf.esb.impl; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage; import org.wso2.developerstudio.eclipse.gmf.esb.NamespacedProperty; import org.wso2.developerstudio.eclipse.gmf.esb.RMSequenceMediator; import org.wso2.developerstudio.eclipse.gmf.esb.RMSequenceMediatorInputConnector; import org.wso2.developerstudio.eclipse.gmf.esb.RMSequenceMediatorOutputConnector; import org.wso2.developerstudio.eclipse.gmf.esb.RMSequenceType; import org.wso2.developerstudio.eclipse.gmf.esb.RMSpecVersion; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>RM Sequence Mediator</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.RMSequenceMediatorImpl#getRmSpecVersion <em>Rm Spec Version</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.RMSequenceMediatorImpl#getSequenceType <em>Sequence Type</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.RMSequenceMediatorImpl#getCorrelationXpath <em>Correlation Xpath</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.RMSequenceMediatorImpl#getLastMessageXpath <em>Last Message Xpath</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.RMSequenceMediatorImpl#getInputConnector <em>Input Connector</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.RMSequenceMediatorImpl#getOutputConnector <em>Output Connector</em>}</li> * </ul> * * @generated */ public class RMSequenceMediatorImpl extends MediatorImpl implements RMSequenceMediator { /** * The default value of the '{@link #getRmSpecVersion() <em>Rm Spec Version</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getRmSpecVersion() * @generated * @ordered */ protected static final RMSpecVersion RM_SPEC_VERSION_EDEFAULT = RMSpecVersion.VERSION_10; /** * The cached value of the '{@link #getRmSpecVersion() <em>Rm Spec Version</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getRmSpecVersion() * @generated * @ordered */ protected RMSpecVersion rmSpecVersion = RM_SPEC_VERSION_EDEFAULT; /** * The default value of the '{@link #getSequenceType() <em>Sequence Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSequenceType() * @generated * @ordered */ protected static final RMSequenceType SEQUENCE_TYPE_EDEFAULT = RMSequenceType.SINGLE_MESSAGE; /** * The cached value of the '{@link #getSequenceType() <em>Sequence Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSequenceType() * @generated * @ordered */ protected RMSequenceType sequenceType = SEQUENCE_TYPE_EDEFAULT; /** * The cached value of the '{@link #getCorrelationXpath() <em>Correlation Xpath</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getCorrelationXpath() * @generated * @ordered */ protected NamespacedProperty correlationXpath; /** * The cached value of the '{@link #getLastMessageXpath() <em>Last Message Xpath</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getLastMessageXpath() * @generated * @ordered */ protected NamespacedProperty lastMessageXpath; /** * The cached value of the '{@link #getInputConnector() <em>Input Connector</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getInputConnector() * @generated * @ordered */ protected RMSequenceMediatorInputConnector inputConnector; /** * The cached value of the '{@link #getOutputConnector() <em>Output Connector</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getOutputConnector() * @generated * @ordered */ protected RMSequenceMediatorOutputConnector outputConnector; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated NOT */ protected RMSequenceMediatorImpl() { super(); // Correlation xpath. NamespacedProperty correlationXpath = EsbFactoryImpl.eINSTANCE.createNamespacedProperty(); correlationXpath.setPrettyName("Sequence XPath"); correlationXpath.setPropertyName("correlation"); correlationXpath.setPropertyValue(DEFAULT_XPATH_PROPERTY_VALUE); setCorrelationXpath(correlationXpath); // Last message xpath. NamespacedProperty lastMessageXpath = EsbFactoryImpl.eINSTANCE.createNamespacedProperty(); lastMessageXpath.setPrettyName("Last message XPath"); lastMessageXpath.setPropertyName("last-message"); lastMessageXpath.setPropertyValue(""); setLastMessageXpath(lastMessageXpath); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return EsbPackage.Literals.RM_SEQUENCE_MEDIATOR; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public RMSpecVersion getRmSpecVersion() { return rmSpecVersion; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setRmSpecVersion(RMSpecVersion newRmSpecVersion) { RMSpecVersion oldRmSpecVersion = rmSpecVersion; rmSpecVersion = newRmSpecVersion == null ? RM_SPEC_VERSION_EDEFAULT : newRmSpecVersion; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.RM_SEQUENCE_MEDIATOR__RM_SPEC_VERSION, oldRmSpecVersion, rmSpecVersion)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public RMSequenceType getSequenceType() { return sequenceType; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setSequenceType(RMSequenceType newSequenceType) { RMSequenceType oldSequenceType = sequenceType; sequenceType = newSequenceType == null ? SEQUENCE_TYPE_EDEFAULT : newSequenceType; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.RM_SEQUENCE_MEDIATOR__SEQUENCE_TYPE, oldSequenceType, sequenceType)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NamespacedProperty getCorrelationXpath() { return correlationXpath; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetCorrelationXpath(NamespacedProperty newCorrelationXpath, NotificationChain msgs) { NamespacedProperty oldCorrelationXpath = correlationXpath; correlationXpath = newCorrelationXpath; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.RM_SEQUENCE_MEDIATOR__CORRELATION_XPATH, oldCorrelationXpath, newCorrelationXpath); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setCorrelationXpath(NamespacedProperty newCorrelationXpath) { if (newCorrelationXpath != correlationXpath) { NotificationChain msgs = null; if (correlationXpath != null) msgs = ((InternalEObject)correlationXpath).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.RM_SEQUENCE_MEDIATOR__CORRELATION_XPATH, null, msgs); if (newCorrelationXpath != null) msgs = ((InternalEObject)newCorrelationXpath).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.RM_SEQUENCE_MEDIATOR__CORRELATION_XPATH, null, msgs); msgs = basicSetCorrelationXpath(newCorrelationXpath, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.RM_SEQUENCE_MEDIATOR__CORRELATION_XPATH, newCorrelationXpath, newCorrelationXpath)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NamespacedProperty getLastMessageXpath() { return lastMessageXpath; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetLastMessageXpath(NamespacedProperty newLastMessageXpath, NotificationChain msgs) { NamespacedProperty oldLastMessageXpath = lastMessageXpath; lastMessageXpath = newLastMessageXpath; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.RM_SEQUENCE_MEDIATOR__LAST_MESSAGE_XPATH, oldLastMessageXpath, newLastMessageXpath); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setLastMessageXpath(NamespacedProperty newLastMessageXpath) { if (newLastMessageXpath != lastMessageXpath) { NotificationChain msgs = null; if (lastMessageXpath != null) msgs = ((InternalEObject)lastMessageXpath).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.RM_SEQUENCE_MEDIATOR__LAST_MESSAGE_XPATH, null, msgs); if (newLastMessageXpath != null) msgs = ((InternalEObject)newLastMessageXpath).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.RM_SEQUENCE_MEDIATOR__LAST_MESSAGE_XPATH, null, msgs); msgs = basicSetLastMessageXpath(newLastMessageXpath, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.RM_SEQUENCE_MEDIATOR__LAST_MESSAGE_XPATH, newLastMessageXpath, newLastMessageXpath)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public RMSequenceMediatorInputConnector getInputConnector() { return inputConnector; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetInputConnector(RMSequenceMediatorInputConnector newInputConnector, NotificationChain msgs) { RMSequenceMediatorInputConnector oldInputConnector = inputConnector; inputConnector = newInputConnector; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.RM_SEQUENCE_MEDIATOR__INPUT_CONNECTOR, oldInputConnector, newInputConnector); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setInputConnector(RMSequenceMediatorInputConnector newInputConnector) { if (newInputConnector != inputConnector) { NotificationChain msgs = null; if (inputConnector != null) msgs = ((InternalEObject)inputConnector).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.RM_SEQUENCE_MEDIATOR__INPUT_CONNECTOR, null, msgs); if (newInputConnector != null) msgs = ((InternalEObject)newInputConnector).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.RM_SEQUENCE_MEDIATOR__INPUT_CONNECTOR, null, msgs); msgs = basicSetInputConnector(newInputConnector, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.RM_SEQUENCE_MEDIATOR__INPUT_CONNECTOR, newInputConnector, newInputConnector)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public RMSequenceMediatorOutputConnector getOutputConnector() { return outputConnector; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetOutputConnector(RMSequenceMediatorOutputConnector newOutputConnector, NotificationChain msgs) { RMSequenceMediatorOutputConnector oldOutputConnector = outputConnector; outputConnector = newOutputConnector; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.RM_SEQUENCE_MEDIATOR__OUTPUT_CONNECTOR, oldOutputConnector, newOutputConnector); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setOutputConnector(RMSequenceMediatorOutputConnector newOutputConnector) { if (newOutputConnector != outputConnector) { NotificationChain msgs = null; if (outputConnector != null) msgs = ((InternalEObject)outputConnector).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.RM_SEQUENCE_MEDIATOR__OUTPUT_CONNECTOR, null, msgs); if (newOutputConnector != null) msgs = ((InternalEObject)newOutputConnector).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.RM_SEQUENCE_MEDIATOR__OUTPUT_CONNECTOR, null, msgs); msgs = basicSetOutputConnector(newOutputConnector, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.RM_SEQUENCE_MEDIATOR__OUTPUT_CONNECTOR, newOutputConnector, newOutputConnector)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case EsbPackage.RM_SEQUENCE_MEDIATOR__CORRELATION_XPATH: return basicSetCorrelationXpath(null, msgs); case EsbPackage.RM_SEQUENCE_MEDIATOR__LAST_MESSAGE_XPATH: return basicSetLastMessageXpath(null, msgs); case EsbPackage.RM_SEQUENCE_MEDIATOR__INPUT_CONNECTOR: return basicSetInputConnector(null, msgs); case EsbPackage.RM_SEQUENCE_MEDIATOR__OUTPUT_CONNECTOR: return basicSetOutputConnector(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case EsbPackage.RM_SEQUENCE_MEDIATOR__RM_SPEC_VERSION: return getRmSpecVersion(); case EsbPackage.RM_SEQUENCE_MEDIATOR__SEQUENCE_TYPE: return getSequenceType(); case EsbPackage.RM_SEQUENCE_MEDIATOR__CORRELATION_XPATH: return getCorrelationXpath(); case EsbPackage.RM_SEQUENCE_MEDIATOR__LAST_MESSAGE_XPATH: return getLastMessageXpath(); case EsbPackage.RM_SEQUENCE_MEDIATOR__INPUT_CONNECTOR: return getInputConnector(); case EsbPackage.RM_SEQUENCE_MEDIATOR__OUTPUT_CONNECTOR: return getOutputConnector(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case EsbPackage.RM_SEQUENCE_MEDIATOR__RM_SPEC_VERSION: setRmSpecVersion((RMSpecVersion)newValue); return; case EsbPackage.RM_SEQUENCE_MEDIATOR__SEQUENCE_TYPE: setSequenceType((RMSequenceType)newValue); return; case EsbPackage.RM_SEQUENCE_MEDIATOR__CORRELATION_XPATH: setCorrelationXpath((NamespacedProperty)newValue); return; case EsbPackage.RM_SEQUENCE_MEDIATOR__LAST_MESSAGE_XPATH: setLastMessageXpath((NamespacedProperty)newValue); return; case EsbPackage.RM_SEQUENCE_MEDIATOR__INPUT_CONNECTOR: setInputConnector((RMSequenceMediatorInputConnector)newValue); return; case EsbPackage.RM_SEQUENCE_MEDIATOR__OUTPUT_CONNECTOR: setOutputConnector((RMSequenceMediatorOutputConnector)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case EsbPackage.RM_SEQUENCE_MEDIATOR__RM_SPEC_VERSION: setRmSpecVersion(RM_SPEC_VERSION_EDEFAULT); return; case EsbPackage.RM_SEQUENCE_MEDIATOR__SEQUENCE_TYPE: setSequenceType(SEQUENCE_TYPE_EDEFAULT); return; case EsbPackage.RM_SEQUENCE_MEDIATOR__CORRELATION_XPATH: setCorrelationXpath((NamespacedProperty)null); return; case EsbPackage.RM_SEQUENCE_MEDIATOR__LAST_MESSAGE_XPATH: setLastMessageXpath((NamespacedProperty)null); return; case EsbPackage.RM_SEQUENCE_MEDIATOR__INPUT_CONNECTOR: setInputConnector((RMSequenceMediatorInputConnector)null); return; case EsbPackage.RM_SEQUENCE_MEDIATOR__OUTPUT_CONNECTOR: setOutputConnector((RMSequenceMediatorOutputConnector)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case EsbPackage.RM_SEQUENCE_MEDIATOR__RM_SPEC_VERSION: return rmSpecVersion != RM_SPEC_VERSION_EDEFAULT; case EsbPackage.RM_SEQUENCE_MEDIATOR__SEQUENCE_TYPE: return sequenceType != SEQUENCE_TYPE_EDEFAULT; case EsbPackage.RM_SEQUENCE_MEDIATOR__CORRELATION_XPATH: return correlationXpath != null; case EsbPackage.RM_SEQUENCE_MEDIATOR__LAST_MESSAGE_XPATH: return lastMessageXpath != null; case EsbPackage.RM_SEQUENCE_MEDIATOR__INPUT_CONNECTOR: return inputConnector != null; case EsbPackage.RM_SEQUENCE_MEDIATOR__OUTPUT_CONNECTOR: return outputConnector != null; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (rmSpecVersion: "); result.append(rmSpecVersion); result.append(", sequenceType: "); result.append(sequenceType); result.append(')'); return result.toString(); } } //RMSequenceMediatorImpl
/* * Copyright (C) 2006-2013 Bitronix Software (http://www.bitronix.be) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package bitronix.tm.internal; import bitronix.tm.BitronixXid; import bitronix.tm.TransactionManagerServices; import bitronix.tm.resource.common.XAResourceHolder; import bitronix.tm.utils.Scheduler; import bitronix.tm.utils.Uid; import bitronix.tm.utils.UidGenerator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.transaction.xa.XAException; import javax.transaction.xa.XAResource; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.SortedSet; /** * Every {@link bitronix.tm.BitronixTransaction} contains an instance of this class that is used to register * and keep track of resources enlisted in a transaction. * * @author Ludovic Orban */ public class XAResourceManager { private final static Logger log = LoggerFactory.getLogger(XAResourceManager.class); private final Uid gtrid; private final Scheduler<XAResourceHolderState> resources = new Scheduler<XAResourceHolderState>(); /** * Create a resource manager for the specified GTRID. * @param gtrid the transaction's GTRID this XAResourceManager will be assigned to. */ public XAResourceManager(Uid gtrid) { this.gtrid = gtrid; } /** * Enlist the specified {@link XAResourceHolderState}. A XID is generated and the resource is started with * XAResource.TMNOFLAGS or XAResource.TMJOIN if it could be joined with another previously enlisted one. * <br> * There are 3 different cases that can happen when a {@link XAResourceHolderState} is enlisted: * <ul> * <li>already enlisted and not ended: do nothing</li> * <li>already enlisted and ended: try to join. if you can join, keep a reference on the passed-in * {@link XAResourceHolderState} and drop the previous one. if you cannot join, it's the same as case 3</li> * <li>not enlisted: create a new branch and keep a reference on the passed-in {@link XAResourceHolderState}</li> * </ul> * * @param xaResourceHolderState the {@link XAResourceHolderState} to be enlisted. * @throws XAException if a resource error occured. * @throws BitronixSystemException if an internal error occured. */ public void enlist(XAResourceHolderState xaResourceHolderState) throws XAException, BitronixSystemException { XAResourceHolderState alreadyEnlistedHolder = findXAResourceHolderState(xaResourceHolderState.getXAResource()); if (alreadyEnlistedHolder != null && !alreadyEnlistedHolder.isEnded()) { xaResourceHolderState.setXid(alreadyEnlistedHolder.getXid()); log.warn("ignoring enlistment of already enlisted but not ended resource " + xaResourceHolderState); return; } XAResourceHolderState toBeJoinedHolderState = null; if (alreadyEnlistedHolder != null) { if (log.isDebugEnabled()) { log.debug("resource already enlisted but has been ended eligible for join: " + alreadyEnlistedHolder); } toBeJoinedHolderState = getManagedResourceWithSameRM(xaResourceHolderState); } BitronixXid xid; int flag; if (toBeJoinedHolderState != null) { if (log.isDebugEnabled()) { log.debug("joining " + xaResourceHolderState + " with " + toBeJoinedHolderState); } xid = toBeJoinedHolderState.getXid(); flag = XAResource.TMJOIN; } else { xid = UidGenerator.generateXid(gtrid); if (log.isDebugEnabled()) { log.debug("creating new branch with " + xid); } flag = XAResource.TMNOFLAGS; } // check for enlistment of a 2nd LRC resource, forbid this if the 2nd resource cannot be joined with the 1st one // unless this is explicitly allowed in the config if (flag != XAResource.TMJOIN && xaResourceHolderState.getTwoPcOrderingPosition() == Scheduler.ALWAYS_LAST_POSITION && !TransactionManagerServices.getConfiguration().isAllowMultipleLrc()) { List<XAResourceHolderState> alwaysLastResources = resources.getByNaturalOrderForPosition(Scheduler.ALWAYS_LAST_POSITION); if (alwaysLastResources != null && !alwaysLastResources.isEmpty()) throw new BitronixSystemException("cannot enlist more than one non-XA resource, tried enlisting " + xaResourceHolderState + ", already enlisted: " + alwaysLastResources.get(0)); } xaResourceHolderState.setXid(xid); xaResourceHolderState.start(flag); // in case of a JOIN, the resource holder is already in the scheduler -> do not add it twice if (toBeJoinedHolderState != null) { resources.remove(toBeJoinedHolderState); } // this must be done only after start() successfully returned resources.add(xaResourceHolderState, xaResourceHolderState.getTwoPcOrderingPosition()); } /** * Delist the specified {@link XAResourceHolderState}. A reference to the resource is kept anyway. * @param xaResourceHolderState the {@link XAResourceHolderState} to be delisted. * @param flag the delistment flag. * @return true if the resource could be delisted, false otherwise. * @throws XAException if the resource threw an exception during delistment. * @throws BitronixSystemException if an internal error occured. */ public boolean delist(XAResourceHolderState xaResourceHolderState, int flag) throws XAException, BitronixSystemException { if (findXAResourceHolderState(xaResourceHolderState.getXAResource()) != null) { if (log.isDebugEnabled()) { log.debug("delisting resource " + xaResourceHolderState); } xaResourceHolderState.end(flag); return true; } log.warn("trying to delist resource that has not been previously enlisted: " + xaResourceHolderState); return false; } /** * Suspend all enlisted resources from the current transaction context. * @throws XAException if the resource threw an exception during suspend. */ public void suspend() throws XAException { for (XAResourceHolderState xaResourceHolderState : resources) { if (!xaResourceHolderState.isEnded()) { if (log.isDebugEnabled()) { log.debug("suspending " + xaResourceHolderState); } xaResourceHolderState.end(XAResource.TMSUCCESS); } } // while } /** * Resume all enlisted resources in the current transaction context. * @throws XAException if the resource threw an exception during resume. */ public void resume() throws XAException { // all XAResource needs to be re-enlisted but this must happen // outside the Scheduler's iteration as enlist() can change the // collection's content and confuse the iterator. List<XAResourceHolderState> toBeReEnlisted = new ArrayList<XAResourceHolderState>(); for (XAResourceHolderState xaResourceHolderState : resources) { if (log.isDebugEnabled()) { log.debug("resuming " + xaResourceHolderState); } // If a prepared statement is (re-)used after suspend/resume is performed its XAResource needs to be // re-enlisted. This must be done outside this loop or that will confuse the iterator! toBeReEnlisted.add(new XAResourceHolderState(xaResourceHolderState)); } if (toBeReEnlisted.size() > 0 && log.isDebugEnabled()) log.debug("re-enlisting " + toBeReEnlisted.size() + " resource(s)"); for (XAResourceHolderState xaResourceHolderState : toBeReEnlisted) { if (log.isDebugEnabled()) { log.debug("re-enlisting resource " + xaResourceHolderState); } try { enlist(xaResourceHolderState); xaResourceHolderState.getXAResourceHolder().putXAResourceHolderState(xaResourceHolderState.getXid(), xaResourceHolderState); } catch (BitronixSystemException ex) { throw new BitronixXAException("error re-enlisting resource during resume: " + xaResourceHolderState, XAException.XAER_RMERR, ex); } } } /** * Look if an {@link XAResource} has already been enlisted. * @param xaResource the {@link XAResource} to look for. * @return the {@link XAResourceHolderState} of the enlisted resource or null if the {@link XAResource} has not * been enlisted in this {@link XAResourceManager}. * @throws BitronixSystemException if an internal error happens. */ public XAResourceHolderState findXAResourceHolderState(XAResource xaResource) throws BitronixSystemException { for (XAResourceHolderState xaResourceHolderState : resources) { if (xaResourceHolderState.getXAResource() == xaResource) return xaResourceHolderState; } return null; } /** * Search for an eventually already enlisted {@link XAResourceHolderState} that could be joined with the * {@link XAResourceHolderState} passed as parameter.<br/> * If datasource configuration property <code>bitronix.useTmJoin=false</code> is set this method always returns null. * @param xaResourceHolderState a {@link XAResourceHolderState} looking to be joined. * @return another enlisted {@link XAResourceHolderState} that can be joined with the one passed in or null if none is found. * @throws XAException if call to XAResource.isSameRM() fails. */ private XAResourceHolderState getManagedResourceWithSameRM(XAResourceHolderState xaResourceHolderState) throws XAException { if (!xaResourceHolderState.getUseTmJoin()) { if (log.isDebugEnabled()) { log.debug("join disabled on resource " + xaResourceHolderState); } return null; } for (XAResourceHolderState alreadyEnlistedHolderState : resources) { if (log.isDebugEnabled()) log.debug("checking joinability of " + xaResourceHolderState + " with " + alreadyEnlistedHolderState); if (alreadyEnlistedHolderState.isEnded() && !alreadyEnlistedHolderState.isSuspended() && xaResourceHolderState.getXAResource().isSameRM(alreadyEnlistedHolderState.getXAResource())) { if (log.isDebugEnabled()) { log.debug("resources are joinable"); } return alreadyEnlistedHolderState; } if (log.isDebugEnabled()) { log.debug("resources are not joinable"); } } if (log.isDebugEnabled()) { log.debug("no joinable resource found for " + xaResourceHolderState); } return null; } /** * Remove this transaction's {@link XAResourceHolderState} from all enlisted * {@link bitronix.tm.resource.common.XAResourceHolder}s. */ public void clearXAResourceHolderStates() { if (log.isDebugEnabled()) { log.debug("clearing XAResourceHolder states on " + resources.size() + " resource(s)"); } Iterator<XAResourceHolderState> it = resources.iterator(); while (it.hasNext()) { XAResourceHolderState xaResourceHolderState = it.next(); XAResourceHolder resourceHolder = xaResourceHolderState.getXAResourceHolder(); // clear out the current state resourceHolder.removeXAResourceHolderState(xaResourceHolderState.getXid()); boolean stillExists = resourceHolder.isExistXAResourceHolderStatesForGtrid(gtrid); if (stillExists) log.warn("resource " + resourceHolder + " did not clean up " + resourceHolder.getXAResourceHolderStateCountForGtrid(gtrid) + "transaction states for GTRID [" + gtrid + "]"); else if (log.isDebugEnabled()) { log.debug("resource " + resourceHolder + " cleaned up all transaction states for GTRID [" + gtrid + "]"); } it.remove(); } } /** * Get a {@link Set} of unique names of all the enlisted {@link XAResourceHolderState}s. * @return a {@link Set} of unique names of all the enlisted {@link XAResourceHolderState}s. */ public Set<String> collectUniqueNames() { Set<String> names = new HashSet<String>(resources.size()); for (XAResourceHolderState xaResourceHolderState : resources) { names.add(xaResourceHolderState.getUniqueName()); } return Collections.unmodifiableSet(names); } public SortedSet<Integer> getNaturalOrderPositions() { return Collections.unmodifiableSortedSet(resources.getNaturalOrderPositions()); } public SortedSet<Integer> getReverseOrderPositions() { return Collections.unmodifiableSortedSet(resources.getReverseOrderPositions()); } public List<XAResourceHolderState> getNaturalOrderResourcesForPosition(Integer position) { return Collections.unmodifiableList(resources.getByNaturalOrderForPosition(position)); } public List<XAResourceHolderState> getReverseOrderResourcesForPosition(Integer position) { return Collections.unmodifiableList(resources.getByReverseOrderForPosition(position)); } public List<XAResourceHolderState> getAllResources() { List<XAResourceHolderState> result = new ArrayList<XAResourceHolderState>(resources.size()); for (Integer positionKey : resources.getNaturalOrderPositions()) { result.addAll(resources.getByNaturalOrderForPosition(positionKey)); } return Collections.unmodifiableList(result); } /** * Get the enlisted resources count. * @return the enlisted resources count. */ public int size() { return resources.size(); } /** * Get the GTRID of the transaction the {@link XAResourceManager} instance is attached to. * @return the GTRID of the transaction the {@link XAResourceManager} instance is attached to. */ public Uid getGtrid() { return gtrid; } /** * Return a human-readable representation of this object. * @return a human-readable representation of this object. */ @Override public String toString() { return "a XAResourceManager with GTRID [" + gtrid + "] and " + resources; } }
/******************************************************************************* * Copyright (c) 2009 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation * Zend Technologies *******************************************************************************/ package org.eclipse.php.internal.core.ast.nodes; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.eclipse.php.internal.core.PHPVersion; import org.eclipse.php.internal.core.ast.match.ASTMatcher; import org.eclipse.php.internal.core.ast.visitor.Visitor; /** * Represents namespace name: * * <pre>e.g. * * <pre> * MyNamespace; * MyProject\Sub\Level; * namespace\MyProject\Sub\Level; */ public class NamespaceName extends Identifier { protected ASTNode.NodeList<Identifier> segments = new ASTNode.NodeList<Identifier>( ELEMENTS_PROPERTY); /** * Whether the namespace name has '\' prefix, which means it relates to the * global scope */ private boolean global; /** * Whether the namespace name has 'namespace' prefix, which means it relates * to the current namespace scope */ private boolean current; /** * The "namespace" structural property of this node type. */ public static final ChildListPropertyDescriptor ELEMENTS_PROPERTY = new ChildListPropertyDescriptor( NamespaceName.class, "segments", Identifier.class, NO_CYCLE_RISK); //$NON-NLS-1$ public static final SimplePropertyDescriptor GLOBAL_PROPERTY = new SimplePropertyDescriptor( UseStatementPart.class, "global", Boolean.class, MANDATORY); //$NON-NLS-1$ public static final SimplePropertyDescriptor CURRENT_PROPERTY = new SimplePropertyDescriptor( UseStatementPart.class, "current", Boolean.class, MANDATORY); //$NON-NLS-1$ /** * A list of property descriptors (element type: * {@link StructuralPropertyDescriptor}), or null if uninitialized. */ private static final List<StructuralPropertyDescriptor> PROPERTY_DESCRIPTORS; static { List<StructuralPropertyDescriptor> properyList = new ArrayList<StructuralPropertyDescriptor>( 2); properyList.add(NAME_PROPERTY); properyList.add(ELEMENTS_PROPERTY); properyList.add(GLOBAL_PROPERTY); properyList.add(CURRENT_PROPERTY); PROPERTY_DESCRIPTORS = Collections.unmodifiableList(properyList); } public NamespaceName(AST ast) { super(ast); } public NamespaceName(int start, int end, AST ast, Identifier[] segments, boolean global, boolean current) { super(start, end, ast, buildName(segments, global, current)); if (segments == null) { throw new IllegalArgumentException(); } for (Identifier name : segments) { this.segments.add(name); } this.global = global; this.current = current; } public NamespaceName(int start, int end, AST ast, List segments, boolean global, boolean current) { super(start, end, ast, buildName((Identifier[]) segments .toArray(new Identifier[segments.size()]), global, current)); if (segments == null) { throw new IllegalArgumentException(); } Iterator<Identifier> it = segments.iterator(); while (it.hasNext()) { this.segments.add(it.next()); } this.global = global; this.current = current; } protected static String buildName(Identifier[] segments, boolean global, boolean current) { StringBuilder buf = new StringBuilder(); if (global) { buf.append('\\'); } else if (current) { buf.append("namespace\\"); //$NON-NLS-1$ } for (int i = 0; i < segments.length; ++i) { if (i > 0) { buf.append('\\'); } buf.append(segments[i].getName()); } return buf.toString(); } public void childrenAccept(Visitor visitor) { for (ASTNode node : this.segments) { node.accept(visitor); } } public void traverseTopDown(Visitor visitor) { accept(visitor); for (ASTNode node : this.segments) { node.traverseTopDown(visitor); } } public void traverseBottomUp(Visitor visitor) { for (ASTNode node : this.segments) { node.traverseBottomUp(visitor); } accept(visitor); } public void toString(StringBuffer buffer, String tab) { buffer.append(tab).append("<NamespaceName"); //$NON-NLS-1$ appendInterval(buffer); buffer.append(" global='").append(global).append('\''); //$NON-NLS-1$ buffer.append(" current='").append(current).append('\''); //$NON-NLS-1$ buffer.append(">\n"); //$NON-NLS-1$ for (ASTNode node : this.segments) { node.toString(buffer, TAB + tab); buffer.append("\n"); //$NON-NLS-1$ } buffer.append(tab).append("</NamespaceName>"); //$NON-NLS-1$ } public void accept0(Visitor visitor) { final boolean visit = visitor.visit(this); if (visit) { childrenAccept(visitor); } visitor.endVisit(this); } public int getType() { return ASTNode.NAMESPACE_NAME; } /** * Returns whether this namespace name has global context (starts with '\') * * @return */ public boolean isGlobal() { return global; } public void setGlobal(boolean global) { preValueChange(GLOBAL_PROPERTY); this.global = global; postValueChange(GLOBAL_PROPERTY); } /** * Returns whether this namespace name has current namespace context (starts * with 'namespace') * * @return */ public boolean isCurrent() { return current; } public void setCurrent(boolean current) { preValueChange(CURRENT_PROPERTY); this.current = current; postValueChange(CURRENT_PROPERTY); } /** * Retrieves names parts of the namespace * * @return segments. If names list is empty, that means that this namespace * is global. */ public List<Identifier> segments() { return this.segments; } /* * (omit javadoc for this method) Method declared on ASTNode. */ public boolean subtreeMatch(ASTMatcher matcher, Object other) { // dispatch to correct overloaded match method return matcher.match(this, other); } /* * (omit javadoc for this method) Method declared on ASTNode. */ protected ASTNode clone0(AST target) { final List segments = ASTNode.copySubtrees(target, segments()); final boolean global = isGlobal(); final boolean current = isCurrent(); final NamespaceName result = new NamespaceName(this.getStart(), this .getEnd(), target, segments, global, current); return result; } @Override protected List<StructuralPropertyDescriptor> internalStructuralPropertiesForType( PHPVersion apiLevel) { return PROPERTY_DESCRIPTORS; } boolean internalGetSetBooleanProperty(SimplePropertyDescriptor property, boolean get, boolean value) { if (property == GLOBAL_PROPERTY) { if (get) { return isGlobal(); } else { setGlobal(value); return false; } } if (property == CURRENT_PROPERTY) { if (get) { return isCurrent(); } else { setCurrent(value); return false; } } return super.internalGetSetBooleanProperty(property, get, value); } /* * (omit javadoc for this method) Method declared on ASTNode. */ final List internalGetChildListProperty(ChildListPropertyDescriptor property) { if (property == ELEMENTS_PROPERTY) { return segments(); } // allow default implementation to flag the error return super.internalGetChildListProperty(property); } }
/* * Copyright 2019, EnMasse authors. * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). */ package io.enmasse.controller.router.config; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.*; import java.util.stream.Collectors; public class RouterConfig { private static final ObjectMapper mapper = new ObjectMapper(); private final Router router; private final List<SslProfile> sslProfiles; private final List<AuthServicePlugin> authServicePlugins; private final List<Listener> listeners; private final List<Policy> policies; private final List<Connector> connectors; private final List<AutoLink> autoLinks; private final List<LinkRoute> linkRoutes; private final List<Address> addresses; private final List<VhostPolicy> vhosts; public RouterConfig(Router router, List<SslProfile> sslProfiles, List<AuthServicePlugin> authServicePlugins, List<Listener> listeners, List<Policy> policies, List<Connector> connectors, List<AutoLink> autoLinks, List<LinkRoute> linkRoutes, List<Address> addresses, List<VhostPolicy> vhosts) { this.router = router; this.sslProfiles = sslProfiles; this.authServicePlugins = authServicePlugins; this.listeners = listeners; this.policies = policies; this.connectors = connectors; this.autoLinks = autoLinks; this.linkRoutes = linkRoutes; this.addresses = addresses; this.vhosts = vhosts; } @Override public String toString() { return "RouterConfig{" + "router=" + router + ", sslProfiles=" + sslProfiles + ", authServicePlugins=" + authServicePlugins + ", listeners=" + listeners + ", policies=" + policies + ", connectors=" + connectors + ", autoLinks=" + autoLinks + ", linkRoutes=" + linkRoutes + ", addresses=" + addresses + ", vhosts=" + vhosts + '}'; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; RouterConfig that = (RouterConfig) o; return Objects.equals(router, that.router) && Objects.equals(sslProfiles, that.sslProfiles) && Objects.equals(authServicePlugins, that.authServicePlugins) && Objects.equals(listeners, that.listeners) && Objects.equals(policies, that.policies) && Objects.equals(connectors, that.connectors) && Objects.equals(autoLinks, that.autoLinks) && Objects.equals(linkRoutes, that.linkRoutes) && Objects.equals(addresses, that.addresses) && Objects.equals(vhosts, that.vhosts); } @Override public int hashCode() { return Objects.hash(router, sslProfiles, authServicePlugins, listeners, policies, connectors, autoLinks, linkRoutes, addresses, vhosts); } public Map<String, String> toMap() throws JsonProcessingException { byte [] json = this.asJson(); return Collections.singletonMap("qdrouterd.json", new String(json, StandardCharsets.UTF_8)); } public byte[] asJson() throws JsonProcessingException { List<Object> data = new ArrayList<>(); data.add(Arrays.asList("router", router)); data.addAll(entriesToList("sslProfile", sslProfiles)); data.addAll(entriesToList("authServicePlugin", authServicePlugins)); data.addAll(entriesToList("listener", listeners)); data.addAll(entriesToList("policy", policies)); data.addAll(entriesToList("autoLink", autoLinks)); data.addAll(entriesToList("linkRoute", linkRoutes)); data.addAll(entriesToList("address", addresses)); data.addAll(entriesToList("connector", connectors)); data.addAll(entriesToList("vhost", vhosts)); return mapper.writeValueAsBytes(data); } private <T> List<List<Object>> entriesToList(String entryName, List<T> entries) { return entries.stream() .map(e -> Arrays.asList(entryName, e)) .collect(Collectors.toList()); } public static RouterConfig fromMap(Map<String, String> data) throws IOException { byte [] json = Optional.ofNullable(data.get("qdrouterd.json")).orElse("[]").getBytes(StandardCharsets.UTF_8); return RouterConfig.fromJson(json); } public static RouterConfig fromJson(byte [] json) throws IOException { Router router = null; List<SslProfile> sslProfiles = new ArrayList<>(); List<AuthServicePlugin> authServicePlugins = new ArrayList<>(); List<Listener> listeners = new ArrayList<>(); List<Policy> policies = new ArrayList<>(); List<AutoLink> autoLinks = new ArrayList<>(); List<LinkRoute> linkRoutes = new ArrayList<>(); List<Address> addresses = new ArrayList<>(); List<Connector> connectors = new ArrayList<>(); List<VhostPolicy> vhostPolicies = new ArrayList<>(); ArrayNode entries = mapper.readValue(json, ArrayNode.class); for (int i = 0; i < entries.size(); i++) { ArrayNode entry = (ArrayNode) entries.get(i); String type = entry.get(0).asText(); JsonNode value = entry.get(1); switch (type) { case "router": router = mapper.treeToValue(value, Router.class); break; case "sslProfile": sslProfiles.add(mapper.treeToValue(value, SslProfile.class)); break; case "authServicePlugin": authServicePlugins.add(mapper.treeToValue(value, AuthServicePlugin.class)); break; case "listener": listeners.add(mapper.treeToValue(value, Listener.class)); break; case "policy": policies.add(mapper.treeToValue(value, Policy.class)); break; case "autoLink": autoLinks.add(mapper.treeToValue(value, AutoLink.class)); break; case "linkRoute": linkRoutes.add(mapper.treeToValue(value, LinkRoute.class)); break; case "address": addresses.add(mapper.treeToValue(value, Address.class)); break; case "connector": connectors.add(mapper.treeToValue(value, Connector.class)); break; case "vhost": vhostPolicies.add(mapper.treeToValue(value, VhostPolicy.class)); break; } } return new RouterConfig(router, sslProfiles, authServicePlugins, listeners, policies, connectors, autoLinks, linkRoutes, addresses, vhostPolicies); } public Router getRouter() { return router; } public List<SslProfile> getSslProfiles() { return sslProfiles; } public List<AuthServicePlugin> getAuthServicePlugins() { return authServicePlugins; } public List<Listener> getListeners() { return listeners; } public List<Policy> getPolicies() { return policies; } public List<Connector> getConnectors() { return connectors; } public List<AutoLink> getAutoLinks() { return autoLinks; } public List<LinkRoute> getLinkRoutes() { return linkRoutes; } public List<Address> getAddresses() { return addresses; } public List<VhostPolicy> getVhosts() { return vhosts; } }
<%# Copyright 2013-2018 the original author or authors from the JHipster project. This file is part of the JHipster project, see http://www.jhipster.tech/ for more information. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -%> <%_ let cacheManagerIsAvailable = false; if (['ehcache', 'hazelcast', 'infinispan'].includes(cacheProvider) || applicationType === 'gateway') { cacheManagerIsAvailable = true; } _%> package <%=packageName%>.service; <%_ if (cacheManagerIsAvailable === true) { _%> import <%=packageName%>.config.CacheConfiguration; <%_ } _%> <%_ if (databaseType === 'sql' || databaseType === 'mongodb' || databaseType === 'couchbase') { _%> import <%=packageName%>.domain.Authority;<% } %> import <%=packageName%>.domain.User;<% if (databaseType === 'sql' || databaseType === 'mongodb' || databaseType === 'couchbase') { %> import <%=packageName%>.repository.AuthorityRepository;<% if (authenticationType === 'session') { %> import <%=packageName%>.repository.PersistentTokenRepository;<% } %><% } %> import <%=packageName%>.config.Constants; import <%=packageName%>.repository.UserRepository;<% if (searchEngine === 'elasticsearch') { %> import <%=packageName%>.repository.search.UserSearchRepository;<% } %> <%_ if (authenticationType !== 'oauth2') { _%> import <%=packageName%>.security.AuthoritiesConstants; <%_ } _%> import <%=packageName%>.security.SecurityUtils; <%_ if (authenticationType !== 'oauth2') { _%> import <%=packageName%>.service.util.RandomUtil; <%_ } _%> import <%=packageName%>.service.dto.UserDTO; import org.slf4j.Logger; import org.slf4j.LoggerFactory; <%_ if (cacheManagerIsAvailable === true) { _%> import org.springframework.cache.CacheManager; <%_ } _%> <%_ if (databaseType === 'sql' || databaseType === 'mongodb' || databaseType === 'couchbase') { _%> import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; <%_ if (authenticationType !== 'oauth2') { _%> import org.springframework.scheduling.annotation.Scheduled; <%_ } _%> <%_ } _%> <%_ if (authenticationType === 'oauth2' && applicationType === 'monolith') { _%> import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.core.GrantedAuthority; import org.springframework.security.core.authority.SimpleGrantedAuthority; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.security.core.userdetails.UserDetails; import org.springframework.security.oauth2.provider.OAuth2Authentication; <%_ } _%> <%_ if (authenticationType !== 'oauth2') { _%> import org.springframework.security.crypto.password.PasswordEncoder; <%_ } _%> import org.springframework.stereotype.Service;<% if (databaseType === 'sql') { %> import org.springframework.transaction.annotation.Transactional;<% } %> <%_ if ((databaseType === 'sql' || databaseType === 'mongodb' || databaseType === 'couchbase') && authenticationType === 'session') { _%> import java.time.LocalDate; <%_ } _%> <%_ if (authenticationType !== 'oauth2' || applicationType === 'monolith') { _%> import java.time.Instant; <%_ } _%> <%_ if (authenticationType !== 'oauth2' && (databaseType === 'sql' || databaseType === 'mongodb' || databaseType === 'couchbase')) { _%> import java.time.temporal.ChronoUnit; <%_ } _%> import java.util.*; import java.util.stream.Collectors; <%_ if (authenticationType === 'oauth2' && applicationType === 'monolith') { _%> import java.util.stream.Stream; <%_ } _%> /** * Service class for managing users. */ @Service<% if (databaseType === 'sql') { %> @Transactional<% } %> public class UserService { private final Logger log = LoggerFactory.getLogger(UserService.class); private final UserRepository userRepository; <%_ if (authenticationType !== 'oauth2') { _%> private final PasswordEncoder passwordEncoder; <%_ } _%> <%_ if (enableSocialSignIn) { _%> private final SocialService socialService; <%_ } _%> <%_ if (searchEngine === 'elasticsearch') { _%> private final UserSearchRepository userSearchRepository; <%_ } _%> <%_ if (databaseType === 'sql' || databaseType === 'mongodb' || databaseType === 'couchbase') { _%> <%_ if (authenticationType === 'session') { _%> private final PersistentTokenRepository persistentTokenRepository; <%_ } _%> private final AuthorityRepository authorityRepository; <%_ } _%> <%_ if (cacheManagerIsAvailable === true) { _%> private final CacheManager cacheManager; <%_ } _%> public UserService(UserRepository userRepository<% if (authenticationType !== 'oauth2') { %>, PasswordEncoder passwordEncoder<% } %><% if (enableSocialSignIn) { %>, SocialService socialService<% } %><% if (searchEngine === 'elasticsearch') { %>, UserSearchRepository userSearchRepository<% } %><% if (databaseType === 'sql' || databaseType === 'mongodb' || databaseType === 'couchbase') { %><% if (authenticationType === 'session') { %>, PersistentTokenRepository persistentTokenRepository<% } %>, AuthorityRepository authorityRepository<% } %><% if (cacheManagerIsAvailable === true) { %>, CacheManager cacheManager<% } %>) { this.userRepository = userRepository; <%_ if (authenticationType !== 'oauth2') { _%> this.passwordEncoder = passwordEncoder; <%_ } _%> <%_ if (enableSocialSignIn) { _%> this.socialService = socialService; <%_ } _%> <%_ if (searchEngine === 'elasticsearch') { _%> this.userSearchRepository = userSearchRepository; <%_ } _%> <%_ if (databaseType === 'sql' || databaseType === 'mongodb' || databaseType === 'couchbase') { _%> <%_ if (authenticationType === 'session') { _%> this.persistentTokenRepository = persistentTokenRepository; <%_ } _%> this.authorityRepository = authorityRepository; <%_ } _%> <%_ if (cacheManagerIsAvailable === true) { _%> this.cacheManager = cacheManager; <%_ } _%> } <%_ if (authenticationType !== 'oauth2') { _%> public Optional<User> activateRegistration(String key) { log.debug("Activating user for activation key {}", key); return userRepository.findOneByActivationKey(key) .map(user -> { // activate given user for the registration key. user.setActivated(true); user.setActivationKey(null); <%_ if (databaseType === 'mongodb' || databaseType === 'couchbase' || databaseType === 'cassandra') { _%> userRepository.save(user); <%_ } _%> <%_ if (searchEngine === 'elasticsearch') { _%> userSearchRepository.save(user); <%_ } _%> <%_ if (cacheManagerIsAvailable === true) { _%> cacheManager.getCache(UserRepository.USERS_BY_LOGIN_CACHE).evict(user.getLogin()); cacheManager.getCache(UserRepository.USERS_BY_EMAIL_CACHE).evict(user.getEmail()); <%_ } _%> log.debug("Activated user: {}", user); return user; }); } public Optional<User> completePasswordReset(String newPassword, String key) { log.debug("Reset user password for reset key {}", key); return userRepository.findOneByResetKey(key) .filter(user -> user.getResetDate().isAfter(Instant.now().minusSeconds(86400))) .map(user -> { user.setPassword(passwordEncoder.encode(newPassword)); user.setResetKey(null); user.setResetDate(null); <%_ if (databaseType === 'mongodb' || databaseType === 'couchbase' || databaseType === 'cassandra') { _%> userRepository.save(user); <%_ } _%> <%_ if (cacheManagerIsAvailable === true) { _%> cacheManager.getCache(UserRepository.USERS_BY_LOGIN_CACHE).evict(user.getLogin()); cacheManager.getCache(UserRepository.USERS_BY_EMAIL_CACHE).evict(user.getEmail()); <%_ } _%> return user; }); } public Optional<User> requestPasswordReset(String mail) { return userRepository.findOneByEmailIgnoreCase(mail) .filter(User::getActivated) .map(user -> { user.setResetKey(RandomUtil.generateResetKey()); user.setResetDate(Instant.now()); <%_ if (databaseType === 'mongodb' || databaseType === 'couchbase' || databaseType === 'cassandra') { _%> userRepository.save(user); <%_ } _%> <%_ if (cacheManagerIsAvailable === true) { _%> cacheManager.getCache(UserRepository.USERS_BY_LOGIN_CACHE).evict(user.getLogin()); cacheManager.getCache(UserRepository.USERS_BY_EMAIL_CACHE).evict(user.getEmail()); <%_ } _%> return user; }); } public User registerUser(UserDTO userDTO, String password) { User newUser = new User();<% if (databaseType === 'sql' || databaseType === 'mongodb') { %> Authority authority = authorityRepository.findOne(AuthoritiesConstants.USER); Set<Authority> authorities = new HashSet<>();<% } %><% if (databaseType === 'cassandra') { %> newUser.setId(UUID.randomUUID().toString());<% } %><% if (databaseType === 'cassandra' || databaseType === 'couchbase') { %> Set<String> authorities = new HashSet<>();<% } %> String encryptedPassword = passwordEncoder.encode(password); newUser.setLogin(userDTO.getLogin()); // new user gets initially a generated password newUser.setPassword(encryptedPassword); newUser.setFirstName(userDTO.getFirstName()); newUser.setLastName(userDTO.getLastName()); newUser.setEmail(userDTO.getEmail()); <%_ if (databaseType === 'sql' || databaseType === 'mongodb' || databaseType === 'couchbase') { _%> newUser.setImageUrl(userDTO.getImageUrl()); <%_ } _%> newUser.setLangKey(userDTO.getLangKey()); // new user is not active newUser.setActivated(false); // new user gets registration key newUser.setActivationKey(RandomUtil.generateActivationKey()); <%_ if (databaseType === 'sql' || databaseType === 'mongodb') { _%> authorities.add(authority); <%_ } _%> <%_ if (databaseType === 'cassandra' || databaseType === 'couchbase') { _%> authorities.add(AuthoritiesConstants.USER); <%_ } _%> newUser.setAuthorities(authorities); userRepository.save(newUser);<% if (searchEngine === 'elasticsearch') { %> userSearchRepository.save(newUser);<% } %> <%_ if (cacheManagerIsAvailable === true) { _%> cacheManager.getCache(UserRepository.USERS_BY_LOGIN_CACHE).evict(newUser.getLogin()); cacheManager.getCache(UserRepository.USERS_BY_EMAIL_CACHE).evict(newUser.getEmail()); <%_ } _%> log.debug("Created Information for User: {}", newUser); return newUser; } public User createUser(UserDTO userDTO) { User user = new User();<% if (databaseType === 'cassandra') { %> user.setId(UUID.randomUUID().toString());<% } %> user.setLogin(userDTO.getLogin()); user.setFirstName(userDTO.getFirstName()); user.setLastName(userDTO.getLastName()); user.setEmail(userDTO.getEmail()); <%_ if (databaseType === 'sql' || databaseType === 'mongodb' || databaseType === 'couchbase') { _%> user.setImageUrl(userDTO.getImageUrl()); <%_ } _%> if (userDTO.getLangKey() == null) { user.setLangKey(Constants.DEFAULT_LANGUAGE); // default language } else { user.setLangKey(userDTO.getLangKey()); } <%_ if (databaseType === 'sql' || databaseType === 'mongodb') { _%> if (userDTO.getAuthorities() != null) { Set<Authority> authorities = userDTO.getAuthorities().stream() .map(authorityRepository::findOne) .collect(Collectors.toSet()); user.setAuthorities(authorities); } <%_ } _%> <%_ if (databaseType === 'cassandra' || databaseType === 'couchbase') { _%> user.setAuthorities(userDTO.getAuthorities()); <%_ } _%> String encryptedPassword = passwordEncoder.encode(RandomUtil.generatePassword()); user.setPassword(encryptedPassword); user.setResetKey(RandomUtil.generateResetKey()); user.setResetDate(Instant.now()); user.setActivated(true); userRepository.save(user);<% if (searchEngine === 'elasticsearch') { %> userSearchRepository.save(user);<% } %> <%_ if (cacheManagerIsAvailable === true) { _%> cacheManager.getCache(UserRepository.USERS_BY_LOGIN_CACHE).evict(user.getLogin()); cacheManager.getCache(UserRepository.USERS_BY_EMAIL_CACHE).evict(user.getEmail()); <%_ } _%> log.debug("Created Information for User: {}", user); return user; } <%_ } _%> /** * Update basic information (first name, last name, email, language) for the current user. * * @param firstName first name of user * @param lastName last name of user * @param email email id of user * @param langKey language key <%_ if (databaseType === 'mongodb' || databaseType === 'sql' || databaseType === 'couchbase') { _%> * @param imageUrl image URL of user <%_ } _%> */ public void updateUser(String firstName, String lastName, String email, String langKey<% if (databaseType === 'mongodb' || databaseType === 'couchbase' || databaseType === 'sql') { %>, String imageUrl<% } %>) { SecurityUtils.getCurrentUserLogin() .flatMap(userRepository::findOneByLogin) .ifPresent(user -> { user.setFirstName(firstName); user.setLastName(lastName); user.setEmail(email); user.setLangKey(langKey); <%_ if (databaseType === 'mongodb' || databaseType === 'couchbase' || databaseType === 'sql') { _%> user.setImageUrl(imageUrl); <%_ } _%> <%_ if (databaseType === 'mongodb' || databaseType === 'couchbase' || databaseType === 'cassandra') { _%> userRepository.save(user); <%_ } _%> <%_ if (searchEngine === 'elasticsearch') { _%> userSearchRepository.save(user); <%_ } _%> <%_ if (cacheManagerIsAvailable === true) { _%> cacheManager.getCache(UserRepository.USERS_BY_LOGIN_CACHE).evict(user.getLogin()); cacheManager.getCache(UserRepository.USERS_BY_EMAIL_CACHE).evict(user.getEmail()); <%_ } _%> log.debug("Changed Information for User: {}", user); }); } /** * Update all information for a specific user, and return the modified user. * * @param userDTO user to update * @return updated user */ public Optional<UserDTO> updateUser(UserDTO userDTO) { return Optional.of(userRepository .findOne(userDTO.getId())) .map(user -> { <%_ if (databaseType === 'couchbase') { _%> if (!user.getLogin().equals(userDTO.getLogin())) { userRepository.delete(userDTO.getId()); } <%_ } _%> user.setLogin(userDTO.getLogin()); user.setFirstName(userDTO.getFirstName()); user.setLastName(userDTO.getLastName()); user.setEmail(userDTO.getEmail()); <%_ if (databaseType === 'sql' || databaseType === 'mongodb'|| databaseType === 'couchbase') { _%> user.setImageUrl(userDTO.getImageUrl()); <%_ } _%> user.setActivated(userDTO.isActivated()); user.setLangKey(userDTO.getLangKey()); <%_ if (databaseType === 'sql' || databaseType === 'mongodb') { _%> Set<Authority> managedAuthorities = user.getAuthorities(); managedAuthorities.clear(); userDTO.getAuthorities().stream() .map(authorityRepository::findOne) .forEach(managedAuthorities::add); <%_ } else { // Cassandra & Couchbase _%> user.setAuthorities(userDTO.getAuthorities()); <%_ } _%> <%_ if (databaseType === 'mongodb' || databaseType === 'couchbase' || databaseType === 'cassandra') { _%> userRepository.save(user); <%_ } _%> <%_ if (searchEngine === 'elasticsearch') { _%> userSearchRepository.save(user); <%_ } _%> <%_ if (cacheManagerIsAvailable === true) { _%> cacheManager.getCache(UserRepository.USERS_BY_LOGIN_CACHE).evict(user.getLogin()); cacheManager.getCache(UserRepository.USERS_BY_EMAIL_CACHE).evict(user.getEmail()); <%_ } _%> log.debug("Changed Information for User: {}", user); return user; }) .map(UserDTO::new); } public void deleteUser(String login) { userRepository.findOneByLogin(login).ifPresent(user -> { <%_ if (enableSocialSignIn) { _%> socialService.deleteUserSocialConnection(user.getLogin()); <%_ } _%> userRepository.delete(user); <%_ if (searchEngine === 'elasticsearch') { _%> userSearchRepository.delete(user); <%_ } _%> <%_ if (cacheManagerIsAvailable === true) { _%> cacheManager.getCache(UserRepository.USERS_BY_LOGIN_CACHE).evict(user.getLogin()); cacheManager.getCache(UserRepository.USERS_BY_EMAIL_CACHE).evict(user.getEmail()); <%_ } _%> log.debug("Deleted User: {}", user); }); } <%_ if (authenticationType !== 'oauth2') { _%> public void changePassword(String password) { SecurityUtils.getCurrentUserLogin() .flatMap(userRepository::findOneByLogin) .ifPresent(user -> { String encryptedPassword = passwordEncoder.encode(password); user.setPassword(encryptedPassword); <%_ if (databaseType === 'mongodb' || databaseType === 'couchbase' || databaseType === 'cassandra') { _%> userRepository.save(user); <%_ } _%> <%_ if (cacheManagerIsAvailable === true) { _%> cacheManager.getCache(UserRepository.USERS_BY_LOGIN_CACHE).evict(user.getLogin()); cacheManager.getCache(UserRepository.USERS_BY_EMAIL_CACHE).evict(user.getEmail()); <%_ } _%> log.debug("Changed password for User: {}", user); }); } <%_ } _%> <%_ if (databaseType === 'sql') { _%> @Transactional(readOnly = true) <%_ } _%> <%_ if (databaseType === 'sql' || databaseType === 'mongodb' || databaseType === 'couchbase') { _%> public Page<UserDTO> getAllManagedUsers(Pageable pageable) { return userRepository.findAllByLoginNot(pageable, Constants.ANONYMOUS_USER).map(UserDTO::new); }<% } else { // Cassandra %> public List<UserDTO> getAllManagedUsers() { return userRepository.findAll().stream() .filter(user -> !Constants.ANONYMOUS_USER.equals(user.getLogin())) .map(UserDTO::new) .collect(Collectors.toList()); }<% } %> <%_ if (databaseType === 'sql') { _%> @Transactional(readOnly = true) <%_ } _%> public Optional<User> getUserWithAuthoritiesByLogin(String login) { <%_ if (databaseType === 'sql') { _%> return userRepository.findOneWithAuthoritiesByLogin(login); <%_ } else { // MongoDB, Couchbase and Cassandra _%> return userRepository.findOneByLogin(login); <%_ } _%> } <%_ if (databaseType === 'sql') { _%> @Transactional(readOnly = true) <%_ } _%> public Optional<User> getUserWithAuthorities(<%= pkType %> id) { <%_ if (databaseType === 'sql') { _%> return userRepository.findOneWithAuthoritiesById(id); <%_ } else { // MongoDB, Couchbase and and Cassandra _%> return Optional.ofNullable(userRepository.findOne(id)); <%_ } _%> } <%_ if (databaseType === 'sql') { _%> @Transactional(readOnly = true) <%_ } _%> public Optional<User> getUserWithAuthorities() { return SecurityUtils.getCurrentUserLogin().flatMap(userRepository::findOne<% if (databaseType === 'sql') { %>WithAuthorities<% } %>ByLogin); } <%_ if ((databaseType === 'sql' || databaseType === 'mongodb' || databaseType === 'couchbase') && authenticationType === 'session') { _%> /** * Persistent Token are used for providing automatic authentication, they should be automatically deleted after * 30 days. * <p> * This is scheduled to get fired everyday, at midnight. */ @Scheduled(cron = "0 0 0 * * ?") public void removeOldPersistentTokens() { LocalDate now = LocalDate.now(); persistentTokenRepository.findByTokenDateBefore(now.minusMonths(1)).forEach(token -> { log.debug("Deleting token {}", token.getSeries());<% if (databaseType === 'sql') { %> User user = token.getUser(); user.getPersistentTokens().remove(token);<% } %> persistentTokenRepository.delete(token); }); } <%_ } _%> <%_ if (authenticationType !== 'oauth2' && (databaseType === 'sql' || databaseType === 'mongodb' || databaseType === 'couchbase')) { _%> /** * Not activated users should be automatically deleted after 3 days. * <p> * This is scheduled to get fired everyday, at 01:00 (am). */ @Scheduled(cron = "0 0 1 * * ?") public void removeNotActivatedUsers() { List<User> users = userRepository.findAllByActivatedIsFalseAndCreatedDateBefore(Instant.now().minus(3, ChronoUnit.DAYS)); for (User user : users) { log.debug("Deleting not activated user {}", user.getLogin()); userRepository.delete(user); <%_ if (searchEngine === 'elasticsearch') { _%> userSearchRepository.delete(user); <%_ } _%> <%_ if (cacheManagerIsAvailable === true) { _%> cacheManager.getCache(UserRepository.USERS_BY_LOGIN_CACHE).evict(user.getLogin()); cacheManager.getCache(UserRepository.USERS_BY_EMAIL_CACHE).evict(user.getEmail()); <%_ } _%> } } <%_ } if (databaseType === 'sql' || databaseType === 'mongodb' || databaseType === 'couchbase') { _%> /** * @return a list of all the authorities */ public List<String> getAuthorities() { return authorityRepository.findAll().stream().map(Authority::getName).collect(Collectors.toList()); } <%_ } _%> <%_ if (authenticationType === 'oauth2' && applicationType === 'monolith') { _%> /** * Returns the user for a OAuth2 authentication. * Synchronizes the user in the local repository * * @param authentication OAuth2 authentication * @return the user from the authentication */ public UserDTO getUserFromAuthentication(OAuth2Authentication authentication) { Map<String, Object> details = (Map<String, Object>) authentication.getUserAuthentication().getDetails(); User user = getUser(details); Set<<% if (databaseType === 'couchbase') { %>String<% } else { %>Authority<% } %>> userAuthorities = extractAuthorities(authentication, details); user.setAuthorities(userAuthorities); // convert Authorities to GrantedAuthorities Set<GrantedAuthority> grantedAuthorities = userAuthorities.stream() <%_ if (databaseType !== 'couchbase') { _%> .map(Authority::getName) <%_ } _%> .map(SimpleGrantedAuthority::new) .collect(Collectors.toSet()); UsernamePasswordAuthenticationToken token = getToken(details, user, grantedAuthorities); authentication = new OAuth2Authentication(authentication.getOAuth2Request(), token); SecurityContextHolder.getContext().setAuthentication(authentication); return new UserDTO(syncUserWithIdP(details, user)); } private User syncUserWithIdP(Map<String, Object> details, User user) { // save account in to sync users between IdP and JHipster's local database Optional<User> existingUser = userRepository.findOneByLogin(user.getLogin()); if (existingUser.isPresent()) { // if IdP sends last updated information, use it to determine if an update should happen if (details.get("updated_at") != null) { Instant dbModifiedDate = existingUser.get().getLastModifiedDate(); Instant idpModifiedDate = new Date(Long.valueOf((Integer) details.get("updated_at"))).toInstant(); if (idpModifiedDate.isAfter(dbModifiedDate)) { log.debug("Updating user '{}' in local database...", user.getLogin()); updateUser(user.getFirstName(), user.getLastName(), user.getEmail(), user.getLangKey(), user.getImageUrl()); } // no last updated info, blindly update } else { log.debug("Updating user '{}' in local database...", user.getLogin()); updateUser(user.getFirstName(), user.getLastName(), user.getEmail(), user.getLangKey(), user.getImageUrl()); } } else { log.debug("Saving user '{}' in local database...", user.getLogin()); userRepository.save(user); <%_ if (cacheManagerIsAvailable === true) { _%> cacheManager.getCache(UserRepository.USERS_BY_LOGIN_CACHE).evict(user.getLogin()); cacheManager.getCache(UserRepository.USERS_BY_EMAIL_CACHE).evict(user.getEmail()); <%_ } _%> } return user; } private static UsernamePasswordAuthenticationToken getToken(Map<String, Object> details, User user, Set<GrantedAuthority> grantedAuthorities) { // create UserDetails so #{principal.username} works UserDetails userDetails = new org.springframework.security.core.userdetails.User(user.getLogin(), "N/A", grantedAuthorities); // update Spring Security Authorities to match groups claim from IdP UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken( userDetails, "N/A", grantedAuthorities); token.setDetails(details); return token; } private static Set<<% if (databaseType === 'couchbase') { %>String<% } else { %>Authority<% } %>> extractAuthorities(OAuth2Authentication authentication, Map<String, Object> details) { Set<<% if (databaseType === 'couchbase') { %>String<% } else { %>Authority<% } %>> userAuthorities; // get roles from details if (details.get("roles") != null) { userAuthorities = extractAuthorities((List<String>) details.get("roles")); // if roles don't exist, try groups } else if (details.get("groups") != null) { userAuthorities = extractAuthorities((List<String>) details.get("groups")); } else { userAuthorities = authoritiesFromStringStream( authentication.getAuthorities().stream() .map(GrantedAuthority::getAuthority) ); } return userAuthorities; } private static User getUser(Map<String, Object> details) { User user = new User(); user.setLogin((String) details.get("preferred_username")); if (details.get("given_name") != null) { user.setFirstName((String) details.get("given_name")); } if (details.get("family_name") != null) { user.setLastName((String) details.get("family_name")); } if (details.get("email_verified") != null) { user.setActivated((Boolean) details.get("email_verified")); } if (details.get("email") != null) { user.setEmail((String) details.get("email")); } if (details.get("langKey") != null) { user.setLangKey((String) details.get("langKey")); } else if (details.get("locale") != null) { String locale = (String) details.get("locale"); String langKey = locale.substring(0, locale.indexOf("-")); user.setLangKey(langKey); } if (details.get("picture") != null) { user.setImageUrl((String) details.get("picture")); } return user; } private static Set<<% if (databaseType === 'couchbase') { %>String<% } else { %>Authority<% } %>> extractAuthorities(List<String> values) { return authoritiesFromStringStream( values.stream().filter(role -> role.startsWith("ROLE_")) ); } private static Set<<% if (databaseType === 'couchbase') { %>String<% } else { %>Authority<% } %>> authoritiesFromStringStream(Stream<String> strings) { return strings<% if (databaseType !== 'couchbase') { %> .map(string -> { Authority auth = new Authority(); auth.setName(string); return auth; })<% } %>.collect(Collectors.toSet()); } <%_ } _%> }
/* * Copyright 2014 CyberVision, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaaproject.kaa.server.transports.http.transport.commands; import static io.netty.handler.codec.http.HttpHeaders.Names.CONNECTION; import static io.netty.handler.codec.http.HttpHeaders.Names.CONTENT_LENGTH; import static io.netty.handler.codec.http.HttpHeaders.Names.CONTENT_TYPE; import static io.netty.handler.codec.http.HttpResponseStatus.OK; import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1; import io.netty.buffer.Unpooled; import io.netty.handler.codec.http.DefaultFullHttpResponse; import io.netty.handler.codec.http.FullHttpResponse; import io.netty.handler.codec.http.HttpHeaders; import io.netty.handler.codec.http.HttpResponse; import io.netty.handler.codec.http.multipart.Attribute; import io.netty.handler.codec.http.multipart.DefaultHttpDataFactory; import io.netty.handler.codec.http.multipart.HttpDataFactory; import io.netty.handler.codec.http.multipart.HttpPostRequestDecoder; import io.netty.handler.codec.http.multipart.InterfaceHttpData; import io.netty.handler.codec.http.multipart.InterfaceHttpData.HttpDataType; import java.io.IOException; import java.security.GeneralSecurityException; import java.util.List; import org.apache.commons.codec.binary.Base64; import org.kaaproject.kaa.common.Constants; import org.kaaproject.kaa.common.endpoint.CommonEPConstans; import org.kaaproject.kaa.common.endpoint.security.MessageEncoderDecoder; import org.kaaproject.kaa.server.common.server.BadRequestException; import org.kaaproject.kaa.server.transport.channel.ChannelType; import org.kaaproject.kaa.server.transports.http.transport.netty.AbstractCommand; /** * The Class AbstractOperationsCommand. * * @param <T> * the generic type * @param <R> * the generic type */ public abstract class AbstractHttpSyncCommand extends AbstractCommand { /** The signature. */ private byte[] requestSignature; /** The encoded request session key. */ private byte[] requestKey; /** The request data. */ private byte[] requestData; /** The response body. */ private byte[] responseBody; /** The signature. */ private byte[] responseSignature; private int nextProtocol = Constants.KAA_PLATFORM_PROTOCOL_AVRO_ID; /** * Gets the type of channel that issued this command. * * @return the response converter class */ public abstract ChannelType getChannelType(); /** * Instantiates a new abstract operations command. */ public AbstractHttpSyncCommand() { super(); } /* * (non-Javadoc) * * @see * org.kaaproject.kaa.server.common.http.server.CommandProcessor#parse() */ @Override public void parse() throws Exception { LOG.trace("CommandName: " + COMMAND_NAME + ": Parse.."); HttpDataFactory factory = new DefaultHttpDataFactory(DefaultHttpDataFactory.MINSIZE); HttpPostRequestDecoder decoder = new HttpPostRequestDecoder(factory, getRequest()); if (decoder.isMultipart()) { LOG.trace("Chunked: " + HttpHeaders.isTransferEncodingChunked(getRequest())); LOG.trace(": Multipart.."); List<InterfaceHttpData> datas = decoder.getBodyHttpDatas(); if (!datas.isEmpty()) { for (InterfaceHttpData data : datas) { LOG.trace("Multipart1 name " + data.getName() + " type " + data.getHttpDataType().name()); if (data.getHttpDataType() == HttpDataType.Attribute) { Attribute attribute = (Attribute) data; if (CommonEPConstans.REQUEST_SIGNATURE_ATTR_NAME.equals(data.getName())) { requestSignature = attribute.get(); if (LOG.isTraceEnabled()) { LOG.trace("Multipart name " + data.getName() + " type " + data.getHttpDataType().name() + " Signature set. size: " + requestSignature.length); LOG.trace(MessageEncoderDecoder.bytesToHex(requestSignature)); } } else if (CommonEPConstans.REQUEST_KEY_ATTR_NAME.equals(data.getName())) { requestKey = attribute.get(); if (LOG.isTraceEnabled()) { LOG.trace("Multipart name " + data.getName() + " type " + data.getHttpDataType().name() + " requestKey set. size: " + requestKey.length); LOG.trace(MessageEncoderDecoder.bytesToHex(requestKey)); } } else if (CommonEPConstans.REQUEST_DATA_ATTR_NAME.equals(data.getName())) { requestData = attribute.get(); if (LOG.isTraceEnabled()) { LOG.trace("Multipart name " + data.getName() + " type " + data.getHttpDataType().name() + " requestData set. size: " + requestData.length); LOG.trace(MessageEncoderDecoder.bytesToHex(requestData)); } } else if (CommonEPConstans.NEXT_PROTOCOL_ATTR_NAME.equals(data.getName())) { nextProtocol = Integer.valueOf(attribute.getString()); LOG.trace("[{}] next protocol is {}", getSessionUuid(), nextProtocol); } } } } else { LOG.error("Multipart.. size 0"); throw new BadRequestException("HTTP Request inccorect, multiprat size is 0"); } } } /* * (non-Javadoc) * * @see * org.kaaproject.kaa.server.common.http.server.CommandProcessor#Process () */ @Override public void process() throws BadRequestException, GeneralSecurityException, IOException { } public byte[] getRequestSignature() { return requestSignature; } public byte[] getRequestkey() { return requestKey; } public byte[] getRequestData() { return requestData; } public byte[] getResponseBody() { return responseBody; } public void setResponseBody(byte[] responseBody) { this.responseBody = responseBody; } public byte[] getResponseSignature() { return responseSignature; } public void setResponseSignature(byte[] responseSignature) { this.responseSignature = responseSignature; } /* * (non-Javadoc) * * @see org.kaaproject.kaa.server.common.http.server.CommandProcessor# * getHttpResponse() */ @Override public HttpResponse getResponse() { LOG.trace("CommandName: " + COMMAND_NAME + ": getHttpResponse.."); FullHttpResponse httpResponse = new DefaultFullHttpResponse(HTTP_1_1, OK, Unpooled.copiedBuffer(responseBody)); httpResponse.headers().set(CONTENT_TYPE, CommonEPConstans.RESPONSE_CONTENT_TYPE); httpResponse.headers().set(CONTENT_LENGTH, httpResponse.content().readableBytes()); httpResponse.headers().set(CommonEPConstans.RESPONSE_TYPE, CommonEPConstans.RESPONSE_TYPE_OPERATION); if(responseSignature != null){ httpResponse.headers().set(CommonEPConstans.SIGNATURE_HEADER_NAME, Base64.encodeBase64String(responseSignature)); } if (isNeedConnectionClose()) { httpResponse.headers().set(CONNECTION, HttpHeaders.Values.CLOSE); } else { if (HttpHeaders.isKeepAlive(getRequest())) { httpResponse.headers().set(CONNECTION, HttpHeaders.Values.KEEP_ALIVE); } else { httpResponse.headers().set(CONNECTION, HttpHeaders.Values.CLOSE); } } return httpResponse; } public static String getCommandName() { return COMMAND_NAME; } @Override public int getNextProtocol() { return nextProtocol; } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.rest.history; import static com.jayway.restassured.RestAssured.expect; import static com.jayway.restassured.RestAssured.given; import static com.jayway.restassured.path.json.JsonPath.from; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.nullValue; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import java.util.ArrayList; import java.util.Date; import java.util.List; import javax.ws.rs.core.Response.Status; import javax.xml.registry.InvalidRequestException; import org.camunda.bpm.engine.history.HistoricDetail; import org.camunda.bpm.engine.history.HistoricDetailQuery; import org.camunda.bpm.engine.history.HistoricFormField; import org.camunda.bpm.engine.history.HistoricVariableUpdate; import org.camunda.bpm.engine.impl.calendar.DateTimeUtil; import org.camunda.bpm.engine.impl.core.variable.type.ObjectTypeImpl; import org.camunda.bpm.engine.rest.AbstractRestServiceTest; import org.camunda.bpm.engine.rest.helper.MockHistoricVariableUpdateBuilder; import org.camunda.bpm.engine.rest.helper.MockProvider; import org.camunda.bpm.engine.rest.helper.VariableTypeHelper; import org.camunda.bpm.engine.variable.Variables; import org.camunda.bpm.engine.variable.type.ValueType; import org.camunda.bpm.engine.variable.value.ObjectValue; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.InOrder; import org.mockito.Mockito; import com.jayway.restassured.http.ContentType; import com.jayway.restassured.response.Response; /** * @author Roman Smirnov * */ public abstract class AbstractHistoricDetailRestServiceQueryTest extends AbstractRestServiceTest { protected static final String HISTORIC_DETAIL_RESOURCE_URL = TEST_RESOURCE_ROOT_PATH + "/history/detail"; protected static final String HISTORIC_DETAIL_COUNT_RESOURCE_URL = HISTORIC_DETAIL_RESOURCE_URL + "/count"; protected HistoricDetailQuery mockedQuery; protected HistoricVariableUpdate historicUpdateMock; protected MockHistoricVariableUpdateBuilder historicUpdateBuilder; protected HistoricFormField historicFormFieldMock; @Before public void setUpRuntimeData() { List<HistoricDetail> details = new ArrayList<HistoricDetail>(); historicUpdateBuilder = MockProvider.mockHistoricVariableUpdate(); historicUpdateMock = historicUpdateBuilder.build(); historicFormFieldMock = MockProvider.createMockHistoricFormField(); details.add(historicUpdateMock); details.add(historicFormFieldMock); mockedQuery = setUpMockedDetailsQuery(details); } protected HistoricDetailQuery setUpMockedDetailsQuery(List<HistoricDetail> detailMocks) { HistoricDetailQuery mock = mock(HistoricDetailQuery.class); when(mock.list()).thenReturn(detailMocks); when(mock.count()).thenReturn((long) detailMocks.size()); when(processEngine.getHistoryService().createHistoricDetailQuery()).thenReturn(mock); return mock; } @Test public void testEmptyQuery() { String queryKey = ""; given() .queryParam("processInstanceId", queryKey) .then() .expect() .statusCode(Status.OK.getStatusCode()) .when() .get(HISTORIC_DETAIL_RESOURCE_URL); } @Test public void testNoParametersQuery() { expect() .statusCode(Status.OK.getStatusCode()) .when() .get(HISTORIC_DETAIL_RESOURCE_URL); verify(mockedQuery).list(); verify(mockedQuery).disableBinaryFetching(); verifyNoMoreInteractions(mockedQuery); } @Test public void testNoParametersQueryDisableObjectDeserialization() { given() .queryParam("deserializeValues", false) .expect() .statusCode(Status.OK.getStatusCode()) .when() .get(HISTORIC_DETAIL_RESOURCE_URL); verify(mockedQuery).list(); verify(mockedQuery).disableBinaryFetching(); verify(mockedQuery).disableCustomObjectDeserialization(); verifyNoMoreInteractions(mockedQuery); } @Test public void testInvalidSortingOptions() { executeAndVerifySorting("anInvalidSortByOption", "asc", Status.BAD_REQUEST); executeAndVerifySorting("processInstanceId", "anInvalidSortOrderOption", Status.BAD_REQUEST); } protected void executeAndVerifySorting(String sortBy, String sortOrder, Status expectedStatus) { given() .queryParam("sortBy", sortBy) .queryParam("sortOrder", sortOrder) .then() .expect() .statusCode(expectedStatus.getStatusCode()) .when() .get(HISTORIC_DETAIL_RESOURCE_URL); } @Test public void testSortOrderParameterOnly() { given() .queryParam("sortOrder", "asc") .then() .expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .contentType(ContentType.JSON) .body("type", equalTo(InvalidRequestException.class.getSimpleName())) .body("message", containsString("Only a single sorting parameter specified. sortBy and sortOrder required")) .when() .get(HISTORIC_DETAIL_RESOURCE_URL); } @Test public void testSortingParameters() { InOrder inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("processInstanceId", "asc", Status.OK); inOrder.verify(mockedQuery).orderByProcessInstanceId(); inOrder.verify(mockedQuery).asc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("processInstanceId", "desc", Status.OK); inOrder.verify(mockedQuery).orderByProcessInstanceId(); inOrder.verify(mockedQuery).desc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("variableName", "asc", Status.OK); inOrder.verify(mockedQuery).orderByVariableName(); inOrder.verify(mockedQuery).asc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("variableName", "desc", Status.OK); inOrder.verify(mockedQuery).orderByVariableName(); inOrder.verify(mockedQuery).desc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("formPropertyId", "asc", Status.OK); inOrder.verify(mockedQuery).orderByFormPropertyId(); inOrder.verify(mockedQuery).asc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("formPropertyId", "desc", Status.OK); inOrder.verify(mockedQuery).orderByFormPropertyId(); inOrder.verify(mockedQuery).desc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("variableType", "asc", Status.OK); inOrder.verify(mockedQuery).orderByVariableType(); inOrder.verify(mockedQuery).asc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("variableType", "desc", Status.OK); inOrder.verify(mockedQuery).orderByVariableType(); inOrder.verify(mockedQuery).desc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("variableRevision", "asc", Status.OK); inOrder.verify(mockedQuery).orderByVariableRevision(); inOrder.verify(mockedQuery).asc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("variableRevision", "desc", Status.OK); inOrder.verify(mockedQuery).orderByVariableRevision(); inOrder.verify(mockedQuery).desc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("time", "asc", Status.OK); inOrder.verify(mockedQuery).orderByTime(); inOrder.verify(mockedQuery).asc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("time", "desc", Status.OK); inOrder.verify(mockedQuery).orderByTime(); inOrder.verify(mockedQuery).desc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("occurrence", "asc", Status.OK); inOrder.verify(mockedQuery).orderPartiallyByOccurrence(); inOrder.verify(mockedQuery).asc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("occurrence", "desc", Status.OK); inOrder.verify(mockedQuery).orderPartiallyByOccurrence(); inOrder.verify(mockedQuery).desc(); } @Test public void testSuccessfulPagination() { int firstResult = 0; int maxResults = 10; given() .queryParam("firstResult", firstResult) .queryParam("maxResults", maxResults) .then() .expect() .statusCode(Status.OK.getStatusCode()) .when() .get(HISTORIC_DETAIL_RESOURCE_URL); verify(mockedQuery).listPage(firstResult, maxResults); } @Test public void testMissingFirstResultParameter() { int maxResults = 10; given() .queryParam("maxResults", maxResults) .then() .expect() .statusCode(Status.OK.getStatusCode()) .when() .get(HISTORIC_DETAIL_RESOURCE_URL); verify(mockedQuery).listPage(0, maxResults); } @Test public void testMissingMaxResultsParameter() { int firstResult = 10; given() .queryParam("firstResult", firstResult) .then() .expect() .statusCode(Status.OK.getStatusCode()) .when() .get(HISTORIC_DETAIL_RESOURCE_URL); verify(mockedQuery).listPage(firstResult, Integer.MAX_VALUE); } @Test public void testQueryCount() { expect() .statusCode(Status.OK.getStatusCode()) .body("count", equalTo(2)) .when() .get(HISTORIC_DETAIL_COUNT_RESOURCE_URL); verify(mockedQuery).count(); } @Test public void testSimpleHistoricActivityQuery() { Response response = given() .then() .expect() .statusCode(Status.OK.getStatusCode()) .and() .body("[0].id", equalTo(historicUpdateBuilder.getId())) .body("[0].variableName", equalTo(historicUpdateBuilder.getName())) .body("[0].variableInstanceId", equalTo(historicUpdateBuilder.getVariableInstanceId())) .body("[0].variableType", equalTo(VariableTypeHelper.toExpectedValueTypeName( historicUpdateBuilder.getTypedValue().getType()))) .body("[0].value", equalTo(historicUpdateBuilder.getTypedValue().getValue())) .body("[0].processDefinitionKey", equalTo(historicUpdateBuilder.getProcessDefinitionKey())) .body("[0].processDefinitionId", equalTo(historicUpdateBuilder.getProcessDefinitionId())) .body("[0].processInstanceId", equalTo(historicUpdateBuilder.getProcessInstanceId())) .body("[0].errorMessage", equalTo(historicUpdateBuilder.getErrorMessage())) .body("[0].activityInstanceId", equalTo(historicUpdateBuilder.getActivityInstanceId())) .body("[0].revision", equalTo(historicUpdateBuilder.getRevision())) .body("[0].time", equalTo(historicUpdateBuilder.getTime())) .body("[0].taskId", equalTo(historicUpdateBuilder.getTaskId())) .body("[0].executionId", equalTo(historicUpdateBuilder.getExecutionId())) .body("[0].type", equalTo("variableUpdate")) .body("[0].caseDefinitionKey", equalTo(historicUpdateBuilder.getCaseDefinitionKey())) .body("[0].caseDefinitionId", equalTo(historicUpdateBuilder.getCaseDefinitionId())) .body("[0].caseInstanceId", equalTo(historicUpdateBuilder.getCaseInstanceId())) .body("[0].caseExecutionId", equalTo(historicUpdateBuilder.getCaseExecutionId())) .when() .get(HISTORIC_DETAIL_RESOURCE_URL); InOrder inOrder = inOrder(mockedQuery); inOrder.verify(mockedQuery).list(); String content = response.asString(); List<String> details = from(content).getList(""); Assert.assertEquals("There should be two activity instance returned.", 2, details.size()); Assert.assertNotNull("The returned details should not be null.", details.get(0)); Assert.assertNotNull("The returned details should not be null.", details.get(1)); // note: element [0] is asserted as part of the fluent rest-assured invocation String returnedId2 = from(content).getString("[1].id"); String returnedProcessDefinitionKey2 = from(content).getString("[1].processDefinitionKey"); String returnedProcessDefinitionId2 = from(content).getString("[1].processDefinitionId"); String returnedProcessInstanceId2 = from(content).getString("[1].processInstanceId"); String returnedActivityInstanceId2 = from(content).getString("[1].activityInstanceId"); String returnedExecutionId2 = from(content).getString("[1].executionId"); String returnedTaskId2 = from(content).getString("[1].taskId"); Date returnedTime2 = DateTimeUtil.parseDate(from(content).getString("[1].time")); String returnedFieldId = from(content).getString("[1].fieldId"); String returnedFieldValue = from(content).getString("[1].fieldValue"); String returnedType = from(content).getString("[1].type"); String returnedCaseDefinitionKey2 = from(content).getString("[1].caseDefinitionKey"); String returnedCaseDefinitionId2 = from(content).getString("[1].caseDefinitionId"); String returnedCaseInstanceId2 = from(content).getString("[1].caseInstanceId"); String returnedCaseExecutionId2 = from(content).getString("[1].caseExecutionId"); Assert.assertEquals(MockProvider.EXAMPLE_HISTORIC_FORM_FIELD_ID, returnedId2); Assert.assertEquals(MockProvider.EXAMPLE_HISTORIC_FORM_FIELD_PROC_DEF_KEY, returnedProcessDefinitionKey2); Assert.assertEquals(MockProvider.EXAMPLE_HISTORIC_FORM_FIELD_PROC_DEF_ID, returnedProcessDefinitionId2); Assert.assertEquals(MockProvider.EXAMPLE_HISTORIC_FORM_FIELD_PROC_INST_ID, returnedProcessInstanceId2); Assert.assertEquals(MockProvider.EXAMPLE_HISTORIC_FORM_FIELD_ACT_INST_ID, returnedActivityInstanceId2); Assert.assertEquals(MockProvider.EXAMPLE_HISTORIC_FORM_FIELD_EXEC_ID, returnedExecutionId2); Assert.assertEquals(MockProvider.EXAMPLE_HISTORIC_FORM_FIELD_TASK_ID, returnedTaskId2); Assert.assertEquals(DateTimeUtil.parseDate(MockProvider.EXAMPLE_HISTORIC_FORM_FIELD_TIME), returnedTime2); Assert.assertEquals(MockProvider.EXAMPLE_HISTORIC_FORM_FIELD_FIELD_ID, returnedFieldId); Assert.assertEquals(MockProvider.EXAMPLE_HISTORIC_FORM_FIELD_VALUE, returnedFieldValue); Assert.assertEquals("formField", returnedType); Assert.assertEquals(MockProvider.EXAMPLE_HISTORIC_FORM_FIELD_CASE_DEF_ID, returnedCaseDefinitionId2); Assert.assertEquals(MockProvider.EXAMPLE_HISTORIC_FORM_FIELD_CASE_DEF_KEY, returnedCaseDefinitionKey2); Assert.assertEquals(MockProvider.EXAMPLE_HISTORIC_FORM_FIELD_CASE_INST_ID, returnedCaseInstanceId2); Assert.assertEquals(MockProvider.EXAMPLE_HISTORIC_FORM_FIELD_CASE_EXEC_ID, returnedCaseExecutionId2); } @Test public void testSerializableVariableInstanceRetrieval() { ObjectValue serializedValue = Variables.serializedObjectValue("a serialized value").create(); MockHistoricVariableUpdateBuilder builder = MockProvider.mockHistoricVariableUpdate() .typedValue(serializedValue); List<HistoricDetail> details = new ArrayList<HistoricDetail>(); details.add(builder.build()); mockedQuery = setUpMockedDetailsQuery(details); given() .then().expect().statusCode(Status.OK.getStatusCode()) .and() .body("[0].value", equalTo("a serialized value")) .body("[0].variableType", equalTo(VariableTypeHelper.toExpectedValueTypeName(serializedValue.getType()))) .body("[0].errorMessage", nullValue()) .when().get(HISTORIC_DETAIL_RESOURCE_URL); // should not resolve custom objects but existing API requires it // verify(mockedQuery).disableCustomObjectDeserialization(); verify(mockedQuery, never()).disableCustomObjectDeserialization(); } @Test public void testSpinVariableInstanceRetrieval() { MockHistoricVariableUpdateBuilder builder = MockProvider.mockHistoricVariableUpdate() .typedValue(Variables .serializedObjectValue("aSerializedValue") .serializationDataFormat("aDataFormat") .objectTypeName("aRootType") .create()); List<HistoricDetail> details = new ArrayList<HistoricDetail>(); details.add(builder.build()); mockedQuery = setUpMockedDetailsQuery(details); given() .then().expect().statusCode(Status.OK.getStatusCode()) .and() .body("[0].variableType", equalTo(VariableTypeHelper.toExpectedValueTypeName(ValueType.OBJECT))) .body("[0].errorMessage", nullValue()) .body("[0].value", equalTo("aSerializedValue")) .body("[0].valueInfo." + ObjectTypeImpl.VALUE_INFO_OBJECT_TYPE_NAME, equalTo("aRootType")) .body("[0].valueInfo." + ObjectTypeImpl.VALUE_INFO_SERIALIZATION_DATA_FORMAT, equalTo("aDataFormat")) .when().get(HISTORIC_DETAIL_RESOURCE_URL); } @Test public void testQueryByProcessInstanceId() { String processInstanceId = MockProvider.EXAMPLE_HISTORIC_VAR_UPDATE_PROC_INST_ID; given() .queryParam("processInstanceId", processInstanceId) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(HISTORIC_DETAIL_RESOURCE_URL); verify(mockedQuery).processInstanceId(processInstanceId); } @Test public void testQueryByExecutionId() { String executionId = MockProvider.EXAMPLE_HISTORIC_VAR_UPDATE_EXEC_ID; given() .queryParam("executionId", executionId) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(HISTORIC_DETAIL_RESOURCE_URL); verify(mockedQuery).executionId(executionId); } @Test public void testQueryByActivityInstanceId() { String activityInstanceId = MockProvider.EXAMPLE_HISTORIC_VAR_UPDATE_ACT_INST_ID; given() .queryParam("activityInstanceId", activityInstanceId) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(HISTORIC_DETAIL_RESOURCE_URL); verify(mockedQuery).activityInstanceId(activityInstanceId); } @Test public void testQueryByTaskId() { String taskId = MockProvider.EXAMPLE_HISTORIC_VAR_UPDATE_TASK_ID; given() .queryParam("taskId", taskId) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(HISTORIC_DETAIL_RESOURCE_URL); verify(mockedQuery).taskId(taskId); } @Test public void testQueryByVariableInstanceId() { String variableInstanceId = MockProvider.EXAMPLE_HISTORIC_VAR_UPDATE_ID; given() .queryParam("variableInstanceId", variableInstanceId) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(HISTORIC_DETAIL_RESOURCE_URL); verify(mockedQuery).variableInstanceId(variableInstanceId); } @Test public void testQueryByFormFields() { given() .queryParam("formFields", "true") .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(HISTORIC_DETAIL_RESOURCE_URL); verify(mockedQuery).formFields(); } @Test public void testQueryByVariableUpdates() { given() .queryParam("variableUpdates", "true") .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(HISTORIC_DETAIL_RESOURCE_URL); verify(mockedQuery).variableUpdates(); } @Test public void testQueryByExcludeTaskDetails() { given() .queryParam("excludeTaskDetails", "true") .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(HISTORIC_DETAIL_RESOURCE_URL); verify(mockedQuery).excludeTaskDetails(); } @Test public void testQueryByCaseInstanceId() { given() .queryParam("caseInstanceId", MockProvider.EXAMPLE_CASE_INSTANCE_ID) .then().expect() .statusCode(Status.OK.getStatusCode()) .when() .get(HISTORIC_DETAIL_RESOURCE_URL); verify(mockedQuery).caseInstanceId(MockProvider.EXAMPLE_CASE_INSTANCE_ID); } @Test public void testQueryByCaseExecutionId() { given() .queryParam("caseExecutionId", MockProvider.EXAMPLE_CASE_EXECUTION_ID) .then().expect() .statusCode(Status.OK.getStatusCode()) .when() .get(HISTORIC_DETAIL_RESOURCE_URL); verify(mockedQuery).caseExecutionId(MockProvider.EXAMPLE_CASE_EXECUTION_ID); } }
/** * Copyright (C) 2009 eXo Platform SAS. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.exoplatform.portal.webui.application; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.exoplatform.webui.application.WebuiRequestContext; import org.exoplatform.webui.config.annotation.ComponentConfig; import org.exoplatform.webui.config.annotation.ComponentConfigs; import org.exoplatform.webui.config.annotation.EventConfig; import org.exoplatform.webui.core.UIDropDownControl; import org.exoplatform.webui.core.model.SelectItemOption; import org.exoplatform.webui.event.Event; import org.exoplatform.webui.event.Event.Phase; import org.exoplatform.webui.event.EventListener; import org.exoplatform.webui.form.UIForm; import org.exoplatform.webui.form.UIFormInput; import org.exoplatform.webui.form.UIFormInputBase; /** * Created by The eXo Platform SARL Author : Tung Pham [email protected] Modify : dang.tung [email protected] Nov 5, * 2007 */ @ComponentConfigs({ @ComponentConfig(template = "system:/groovy/webui/form/UIItemThemeSelector.gtmpl", events = { @EventConfig(listeners = UIItemThemeSelector.SelectThemeActionListener.class, phase = Phase.DECODE), @EventConfig(listeners = UIItemThemeSelector.SetDefaultActionListener.class, phase = Phase.DECODE) }), @ComponentConfig(type = UIDropDownControl.class, id = "ThemeDropDown", template = "system:/groovy/webui/core/UIDropDownControl.gtmpl", events = { @EventConfig(listeners = UIItemThemeSelector.ChangeOptionActionListener.class) } ) }) public class UIItemThemeSelector extends UIFormInputBase<String> { private String selectedTheme; private List<ThemeCategory> categories = new ArrayList<ThemeCategory>(); private ThemeCategory selectedCategory; public static final String DEFAULT_THEME = "DefaultTheme"; public UIItemThemeSelector(String name, String bindingField) throws Exception { super(name, bindingField, String.class); setComponentConfig(UIItemThemeSelector.class, null); addChild(UIDropDownControl.class, "ThemeDropDown", null); } @SuppressWarnings("unchecked") public UIFormInput setValue(String value) { setSelectedTheme(value); return this; } public String getValue() { return getSelectedTheme(); } public void decode(Object input, WebuiRequestContext context) { String value = String.valueOf(input); if (value.equals("null") || value.trim().length() < 1) selectedTheme = null; else selectedTheme = value; } public String event(String name, String beanId) throws Exception { UIForm uiForm = getAncestorOfType(UIForm.class); return uiForm.event(name, beanId); } public String event(String name) throws Exception { UIForm uiForm = getAncestorOfType(UIForm.class); return uiForm.event(name); } public void reset() { super.reset(); selectedCategory = null; selectedTheme = DEFAULT_THEME; getChild(UIDropDownControl.class).setValue(0); } public void setValues(Map<String, Set<String>> themeSet) { categories.clear(); if (themeSet == null) { selectedCategory = null; selectedTheme = null; return; } Iterator<Entry<String, Set<String>>> itr = themeSet.entrySet().iterator(); while (itr.hasNext()) { Entry<String, Set<String>> cateEntry = itr.next(); ThemeCategory category = new ThemeCategory(cateEntry.getKey()); List<String> themes = new ArrayList<String>(cateEntry.getValue()); for (String theme : themes) { category.addTheme(theme); } categories.add(category); } setSelectedCategory(categories.get(0)); getChild(UIDropDownControl.class).setOptions(getDropDownOptions()); } public ThemeCategory getSelectedCategory() { if (selectedCategory == null && categories.size() > 0) return categories.get(0); return selectedCategory; } public void setSelectedCategory(ThemeCategory selectedCate) { if (selectedCate == null) { setSelectedCategory((String) null); return; } String cateName = selectedCate.getName(); setSelectedCategory(cateName); } public void setSelectedCategory(String cateName) { selectedCategory = null; if (cateName == null) return; UIDropDownControl uiDropDown = getChild(UIDropDownControl.class); for (ThemeCategory cate : categories) { if (cate.getName().equals(cateName)) { selectedCategory = cate; uiDropDown.setValue(cateName); return; } } } public String getSelectedTheme() { if (selectedTheme == null || selectedTheme.trim().length() < 1) selectedTheme = DEFAULT_THEME; return selectedTheme; } public void setSelectedTheme(String value) { selectedTheme = null; for (ThemeCategory cate : categories) { List<String> themes = cate.getThemes(); if (themes == null) continue; for (String theme : themes) { if (theme.equals(value)) { selectedTheme = value; setSelectedCategory(cate); return; } } } } public List<ThemeCategory> getCategories() { if (categories == null) return new ArrayList<ThemeCategory>(); return categories; } public void setCategories(List<ThemeCategory> list) { categories = list; getChild(UIDropDownControl.class).setOptions(getDropDownOptions()); } private List<SelectItemOption<String>> getDropDownOptions() { List<SelectItemOption<String>> options = new ArrayList<SelectItemOption<String>>(); if (categories != null) { for (ThemeCategory ele : categories) { String cateName = ele.getName(); options.add(new SelectItemOption<String>(cateName, cateName)); } } return options; } public static class SelectThemeActionListener extends EventListener<UIItemThemeSelector> { public void execute(Event<UIItemThemeSelector> event) throws Exception { UIItemThemeSelector uiFormInput = event.getSource(); String theme = event.getRequestContext().getRequestParameter(OBJECTID); uiFormInput.setSelectedTheme(theme); UIForm uiForm = uiFormInput.getAncestorOfType(UIForm.class); event.getRequestContext().addUIComponentToUpdateByAjax(uiForm.getParent()); } } public static class ChangeOptionActionListener extends EventListener<UIDropDownControl> { public void execute(Event<UIDropDownControl> event) throws Exception { UIDropDownControl uiDropDown = event.getSource(); String category = event.getRequestContext().getRequestParameter(OBJECTID); UIItemThemeSelector uiFormInput = uiDropDown.getParent(); uiFormInput.setSelectedCategory(category); UIPortletForm uiportletForm = uiFormInput.getAncestorOfType(UIPortletForm.class); uiportletForm.setSelectedTab(uiportletForm.getChild(UIFormInputThemeSelector.class).getId()); event.getRequestContext().addUIComponentToUpdateByAjax(uiFormInput); } } public static class SetDefaultActionListener extends EventListener<UIItemThemeSelector> { public void execute(Event<UIItemThemeSelector> event) throws Exception { UIItemThemeSelector uiFormInput = event.getSource(); uiFormInput.reset(); UIForm uiForm = uiFormInput.getAncestorOfType(UIForm.class); event.getRequestContext().addUIComponentToUpdateByAjax(uiForm.getParent()); } } public static class ThemeCategory { private String name_; private String description_; private List<String> themes_; public ThemeCategory(String name) { name_ = name; description_ = name; } public ThemeCategory(String name, String description) { name_ = name; description_ = description; } public String getName() { return name_; } public void setName(String name) { name_ = name; } public String getDescription() { return description_; } public void setDescription(String description) { description_ = description; } public List<String> getThemes() { return themes_; } public void setThemes(List<String> themes) { themes_ = themes; } public void addTheme(String theme) { if (themes_ == null) themes_ = new ArrayList<String>(); themes_.add(theme); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.openapi; import org.apache.camel.CamelContext; import org.apache.camel.Exchange; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.impl.DefaultCamelContext; import org.apache.camel.support.DefaultExchange; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; public class RestOpenApiProcessorTest { @Test public void testRestOpenApiProcessor() throws Exception { CamelContext context = new DefaultCamelContext(); context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { rest().get("/foo").description("Foo endpoint").route().log("Hello /foo").endRest() .post("/bar").description("Bar endpoint").route().log("Hello /foo").endRest(); } }); RestOpenApiProcessor processor = new RestOpenApiProcessor(null, false, null, context.getRestConfiguration()); Exchange exchange = new DefaultExchange(context); processor.process(exchange); String json = exchange.getMessage().getBody(String.class); assertNotNull(json); assertTrue(json.contains("\"/foo\"")); assertTrue(json.contains("\"/bar\"")); assertTrue(json.contains("\"summary\" : \"Foo endpoint\"")); assertTrue(json.contains("\"summary\" : \"Bar endpoint\"")); } @Test public void testRestOpenApiProcessorOpenApiJsonPath() throws Exception { CamelContext context = new DefaultCamelContext(); context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { rest().get("/foo").description("Foo endpoint").route().log("Hello /foo").endRest() .post("/bar").description("Bar endpoint").route().log("Hello /foo").endRest(); } }); RestOpenApiProcessor processor = new RestOpenApiProcessor(null, false, null, context.getRestConfiguration()); Exchange exchange = new DefaultExchange(context); exchange.getMessage().setHeader(Exchange.HTTP_PATH, "/openapi.json"); processor.process(exchange); String json = exchange.getMessage().getBody(String.class); assertNotNull(json); assertEquals("application/json", exchange.getMessage().getHeader(Exchange.CONTENT_TYPE)); assertTrue(json.contains("\"/foo\"")); assertTrue(json.contains("\"/bar\"")); assertTrue(json.contains("\"summary\" : \"Foo endpoint\"")); assertTrue(json.contains("\"summary\" : \"Bar endpoint\"")); } @Test public void testRestOpenApiProcessorOpenApiYamlPath() throws Exception { CamelContext context = new DefaultCamelContext(); context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { rest().get("/foo").description("Foo endpoint").route().log("Hello /foo").endRest() .post("/bar").description("Bar endpoint").route().log("Hello /foo").endRest(); } }); RestOpenApiProcessor processor = new RestOpenApiProcessor(null, false, null, context.getRestConfiguration()); Exchange exchange = new DefaultExchange(context); exchange.getMessage().setHeader(Exchange.HTTP_PATH, "/openapi.yaml"); processor.process(exchange); String yaml = exchange.getMessage().getBody(String.class); assertNotNull(yaml); assertEquals("text/yaml", exchange.getMessage().getHeader(Exchange.CONTENT_TYPE)); assertTrue(yaml.contains("/foo:")); assertTrue(yaml.contains("/bar:")); assertTrue(yaml.contains("summary: \"Foo endpoint\"")); assertTrue(yaml.contains("summary: \"Bar endpoint\"")); } @Test public void testRestOpenApiProcessorCustomPath() throws Exception { CamelContext context = new DefaultCamelContext(); context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { rest().get("/foo").description("Foo endpoint").route().log("Hello /foo").endRest() .post("/bar").description("Bar endpoint").route().log("Hello /foo").endRest(); } }); RestOpenApiProcessor processor = new RestOpenApiProcessor(null, false, null, context.getRestConfiguration()); Exchange exchange = new DefaultExchange(context); exchange.getMessage().setHeader(Exchange.HTTP_PATH, "/some/custom/path/api.json"); processor.process(exchange); String json = exchange.getMessage().getBody(String.class); assertNotNull(json); assertEquals("application/json", exchange.getMessage().getHeader(Exchange.CONTENT_TYPE)); assertTrue(json.contains("\"/foo\"")); assertTrue(json.contains("\"/bar\"")); assertTrue(json.contains("\"summary\" : \"Foo endpoint\"")); assertTrue(json.contains("\"summary\" : \"Bar endpoint\"")); } @Test public void testRestOpenApiProcessorAcceptHeaderJson() throws Exception { CamelContext context = new DefaultCamelContext(); context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { rest().get("/foo").description("Foo endpoint").route().log("Hello /foo").endRest() .post("/bar").description("Bar endpoint").route().log("Hello /foo").endRest(); } }); RestOpenApiProcessor processor = new RestOpenApiProcessor(null, false, null, context.getRestConfiguration()); Exchange exchange = new DefaultExchange(context); exchange.getMessage().setHeader(Exchange.HTTP_PATH, "/some/custom/path/api"); exchange.getMessage().setHeader("Accept", "application/json"); processor.process(exchange); String json = exchange.getMessage().getBody(String.class); assertNotNull(json); assertEquals("application/json", exchange.getMessage().getHeader(Exchange.CONTENT_TYPE)); assertTrue(json.contains("\"/foo\"")); assertTrue(json.contains("\"/bar\"")); assertTrue(json.contains("\"summary\" : \"Foo endpoint\"")); assertTrue(json.contains("\"summary\" : \"Bar endpoint\"")); } @Test public void testRestOpenApiProcessorAcceptHeaderYaml() throws Exception { CamelContext context = new DefaultCamelContext(); context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { rest().get("/foo").description("Foo endpoint").route().log("Hello /foo").endRest() .post("/bar").description("Bar endpoint").route().log("Hello /foo").endRest(); } }); RestOpenApiProcessor processor = new RestOpenApiProcessor(null, false, null, context.getRestConfiguration()); Exchange exchange = new DefaultExchange(context); exchange.getMessage().setHeader(Exchange.HTTP_PATH, "/some/custom/path/api"); exchange.getMessage().setHeader("Accept", "application/yaml"); processor.process(exchange); String yaml = exchange.getMessage().getBody(String.class); assertNotNull(yaml); assertEquals("text/yaml", exchange.getMessage().getHeader(Exchange.CONTENT_TYPE)); assertTrue(yaml.contains("/foo:")); assertTrue(yaml.contains("/bar:")); assertTrue(yaml.contains("summary: \"Foo endpoint\"")); assertTrue(yaml.contains("summary: \"Bar endpoint\"")); } @Test public void testRestOpenApiProcessorContextIdListingEnabledForDefaultPath() throws Exception { CamelContext context = new DefaultCamelContext(); context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { rest().get("/foo").description("Foo endpoint").route().log("Hello /foo").endRest() .post("/bar").description("Bar endpoint").route().log("Hello /foo").endRest(); } }); context.getRegistry().bind("dummy", new DummyRestConsumerFactory()); RestOpenApiProcessor processor = new RestOpenApiProcessor(".*camel.*", true, null, context.getRestConfiguration()); Exchange exchange = new DefaultExchange(context); exchange.getMessage().setHeader(Exchange.HTTP_PATH, "/openapi.json"); context.start(); try { processor.process(exchange); String json = exchange.getMessage().getBody(String.class); assertNotNull(json); assertEquals("[{\"name\":\"" + context.getName() + "\"}]", json.replaceAll("\\s+", "")); } finally { context.stop(); } } @Test public void testRestOpenApiProcessorContextIdListingForNamePlaceholder() throws Exception { CamelContext context = new DefaultCamelContext(); context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { rest().get("/foo").description("Foo endpoint").route().log("Hello /foo").endRest() .post("/bar").description("Bar endpoint").route().log("Hello /foo").endRest(); } }); RestOpenApiProcessor processor = new RestOpenApiProcessor("#name#", false, null, context.getRestConfiguration()); Exchange exchange = new DefaultExchange(context); exchange.getMessage().setHeader(Exchange.HTTP_PATH, "/openapi.json"); processor.process(exchange); String json = exchange.getMessage().getBody(String.class); assertNotNull(json); assertTrue(json.contains("\"/foo\"")); assertTrue(json.contains("\"/bar\"")); assertTrue(json.contains("\"summary\" : \"Foo endpoint\"")); assertTrue(json.contains("\"summary\" : \"Bar endpoint\"")); } @Test public void testRestOpenApiProcessorContextIdListingEnabledForCustomPath() throws Exception { CamelContext context = new DefaultCamelContext(); context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { rest("/rest").get("/foo").description("Foo endpoint").route().id("foo-route").log("Hello /foo").endRest() .post("/bar").description("Bar endpoint").route().id("bar-route").log("Hello /foo").endRest(); } }); RestOpenApiProcessor processor = new RestOpenApiProcessor(".*camel.*", true, null, context.getRestConfiguration()); Exchange exchange = new DefaultExchange(context); exchange.getMessage().setHeader(Exchange.HTTP_PATH, "/" + context.getName() + "/rest"); processor.process(exchange); String json = exchange.getMessage().getBody(String.class); assertNotNull(json); assertTrue(json.contains("\"/rest/foo\"")); assertTrue(json.contains("\"/rest/bar\"")); assertTrue(json.contains("\"summary\" : \"Foo endpoint\"")); assertTrue(json.contains("\"summary\" : \"Bar endpoint\"")); } @Test public void testRestOpenApiProcessorContextIdPatternNoMatches() throws Exception { CamelContext context = new DefaultCamelContext(); context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { rest("/").get("/foo").description("Foo endpoint").route().log("Hello /foo").endRest().post("/bar") .description("Bar endpoint").route().log("Hello /foo").endRest(); } }); RestOpenApiProcessor processor = new RestOpenApiProcessor("an-invalid-pattern", false, null, context.getRestConfiguration()); Exchange exchange = new DefaultExchange(context); exchange.getMessage().setHeader("/some/rest/api/document.json", Exchange.HTTP_PATH); processor.process(exchange); assertEquals(204, exchange.getMessage().getHeader(Exchange.HTTP_RESPONSE_CODE)); assertNull(exchange.getMessage().getBody()); } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.packages; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.cmdline.LabelSyntaxException; import com.google.devtools.build.lib.events.EventHandler; import com.google.devtools.build.lib.packages.Attribute.StarlarkComputedDefaultTemplate.CannotPrecomputeDefaultsException; import com.google.devtools.build.lib.packages.Package.NameConflictException; import com.google.devtools.build.lib.packages.PackageFactory.PackageContext; import com.google.devtools.build.lib.packages.semantics.BuildLanguageOptions; import java.util.Map; import java.util.Set; import javax.annotation.Nullable; import net.starlark.java.eval.StarlarkSemantics; import net.starlark.java.eval.StarlarkThread; import net.starlark.java.eval.StarlarkThread.CallStackEntry; import net.starlark.java.syntax.Location; /** * Given a {@link RuleClass} and a set of attribute values, returns a {@link Rule} instance. Also * performs a number of checks and associates the {@link Rule} and the owning {@link Package} * with each other. * * <p>This class is immutable, once created the set of managed {@link RuleClass}es will not change. * * <p>Note: the code that actually populates the RuleClass map has been moved to {@link * RuleClassProvider}. */ public class RuleFactory { /** * Maps rule class name to the metaclass instance for that rule. */ private final ImmutableMap<String, RuleClass> ruleClassMap; /** Constructs a RuleFactory instance. */ public RuleFactory(RuleClassProvider provider) { this.ruleClassMap = ImmutableMap.copyOf(provider.getRuleClassMap()); } /** * Returns the (immutable, unordered) set of names of all the known rule classes. */ public Set<String> getRuleClassNames() { return ruleClassMap.keySet(); } /** * Returns the RuleClass for the specified rule class name. */ public RuleClass getRuleClass(String ruleClassName) { return ruleClassMap.get(ruleClassName); } /** * Creates and returns a rule instance. * * <p>It is the caller's responsibility to add the rule to the package (the caller may choose not * to do so if, for example, the rule has errors). */ static Rule createRule( Package.Builder pkgBuilder, RuleClass ruleClass, BuildLangTypedAttributeValuesMap attributeValues, EventHandler eventHandler, StarlarkSemantics semantics, ImmutableList<StarlarkThread.CallStackEntry> callstack) throws InvalidRuleException, InterruptedException { Preconditions.checkNotNull(ruleClass); String ruleClassName = ruleClass.getName(); Object nameObject = attributeValues.getAttributeValue("name"); if (nameObject == null) { throw new InvalidRuleException(ruleClassName + " rule has no 'name' attribute"); } else if (!(nameObject instanceof String)) { throw new InvalidRuleException(ruleClassName + " 'name' attribute must be a string"); } String name = (String) nameObject; Label label; try { // Test that this would form a valid label name -- in particular, this // catches cases where Makefile variables $(foo) appear in "name". label = pkgBuilder.createLabel(name); } catch (LabelSyntaxException e) { throw new InvalidRuleException("illegal rule name: " + name + ": " + e.getMessage()); } boolean inWorkspaceFile = pkgBuilder.isWorkspace(); if (ruleClass.getWorkspaceOnly() && !inWorkspaceFile) { throw new RuleFactory.InvalidRuleException( ruleClass + " must be in the WORKSPACE file " + "(used by " + label + ")"); } else if (!ruleClass.getWorkspaceOnly() && inWorkspaceFile) { throw new RuleFactory.InvalidRuleException( ruleClass + " cannot be in the WORKSPACE file " + "(used by " + label + ")"); } boolean recordRuleInstantiationCallstack = semantics.getBool(BuildLanguageOptions.RECORD_RULE_INSTANTIATION_CALLSTACK); AttributesAndLocation generator = generatorAttributesForMacros( pkgBuilder, attributeValues, callstack, label, recordRuleInstantiationCallstack); // The raw stack is of the form [<toplevel>@BUILD:1, [email protected]:1, cc_library@<builtin>]. // Pop the innermost frame for the rule, since it's obvious. callstack = recordRuleInstantiationCallstack ? callstack.subList(0, callstack.size() - 1) // pop : ImmutableList.of(); // save space try { // Examines --incompatible_disable_third_party_license_checking to see if we should check // third party targets for license existence. // // This flag is overridable by RuleClass.ThirdPartyLicenseEnforcementPolicy (which is checked // in RuleClass). This lets Bazel and Blaze migrate away from license logic on independent // timelines. See --incompatible_disable_third_party_license_checking comments for details. boolean checkThirdPartyLicenses = !semantics.getBool( BuildLanguageOptions.INCOMPATIBLE_DISABLE_THIRD_PARTY_LICENSE_CHECKING); return ruleClass.createRule( pkgBuilder, label, generator.attributes, eventHandler, generator.location, // see b/23974287 for rationale callstack, checkThirdPartyLicenses); } catch (LabelSyntaxException | CannotPrecomputeDefaultsException e) { throw new RuleFactory.InvalidRuleException(ruleClass + " " + e.getMessage()); } } /** * Creates a {@link Rule} instance, adds it to the {@link Package.Builder} and returns it. * * @param pkgBuilder the under-construction {@link Package.Builder} to which the rule belongs * @param ruleClass the {@link RuleClass} of the rule * @param attributeValues a {@link BuildLangTypedAttributeValuesMap} mapping attribute names to * attribute values of build-language type. Each attribute must be defined for this class of * rule, and have a build-language-typed value which can be converted to the appropriate * native type of the attribute (i.e. via {@link BuildType#selectableConvert}). There must be * a map entry for each non-optional attribute of this class of rule. * @param eventHandler a eventHandler on which errors and warnings are reported during rule * creation * @param semantics the Starlark semantics * @param callstack the stack of active calls in the Starlark thread * @throws InvalidRuleException if the rule could not be constructed for any reason (e.g. no * {@code name} attribute is defined) * @throws NameConflictException if the rule's name or output files conflict with others in this * package * @throws InterruptedException if interrupted */ static Rule createAndAddRuleImpl( Package.Builder pkgBuilder, RuleClass ruleClass, BuildLangTypedAttributeValuesMap attributeValues, EventHandler eventHandler, StarlarkSemantics semantics, ImmutableList<StarlarkThread.CallStackEntry> callstack) throws InvalidRuleException, NameConflictException, InterruptedException { Rule rule = createRule(pkgBuilder, ruleClass, attributeValues, eventHandler, semantics, callstack); pkgBuilder.addRule(rule); return rule; } /** * Creates a {@link Rule} instance, adds it to the {@link Package.Builder} and returns it. * * @param context the package-building context in which this rule was declared * @param ruleClass the {@link RuleClass} of the rule * @param attributeValues a {@link BuildLangTypedAttributeValuesMap} mapping attribute names to * attribute values of build-language type. Each attribute must be defined for this class of * rule, and have a build-language-typed value which can be converted to the appropriate * native type of the attribute (i.e. via {@link BuildType#selectableConvert}). There must be * a map entry for each non-optional attribute of this class of rule. * @throws InvalidRuleException if the rule could not be constructed for any reason (e.g. no * {@code name} attribute is defined) * @throws NameConflictException if the rule's name or output files conflict with others in this * package * @throws InterruptedException if interrupted */ public static Rule createAndAddRule( PackageContext context, RuleClass ruleClass, BuildLangTypedAttributeValuesMap attributeValues, StarlarkSemantics semantics, ImmutableList<StarlarkThread.CallStackEntry> callstack) throws InvalidRuleException, NameConflictException, InterruptedException { return createAndAddRuleImpl( context.pkgBuilder, ruleClass, attributeValues, context.eventHandler, semantics, callstack); } /** * InvalidRuleException is thrown by {@link Rule} creation methods if the {@link Rule} could * not be constructed. It contains an error message. */ public static class InvalidRuleException extends Exception { private InvalidRuleException(String message) { super(message); } } /** A pair of attributes and location. */ private static final class AttributesAndLocation { final BuildLangTypedAttributeValuesMap attributes; final Location location; AttributesAndLocation(BuildLangTypedAttributeValuesMap attributes, Location location) { this.attributes = attributes; this.location = location; } } /** * A wrapper around an map of named attribute values that specifies whether the map's values * are of "build-language" or of "native" types. */ public interface AttributeValues<T> { /** * Returns {@code true} if all the map's values are "build-language typed", i.e., resulting * from the evaluation of an expression in the build language. Returns {@code false} if all * the map's values are "natively typed", i.e. of a type returned by {@link * BuildType#selectableConvert}. */ boolean valuesAreBuildLanguageTyped(); Iterable<T> getAttributeAccessors(); String getName(T attributeAccessor); Object getValue(T attributeAccessor); boolean isExplicitlySpecified(T attributeAccessor); } /** A {@link AttributeValues} of explicit "build-language" values. */ public static final class BuildLangTypedAttributeValuesMap implements AttributeValues<Map.Entry<String, Object>> { private final Map<String, Object> attributeValues; public BuildLangTypedAttributeValuesMap(Map<String, Object> attributeValues) { this.attributeValues = attributeValues; } private boolean containsAttributeNamed(String attributeName) { return attributeValues.containsKey(attributeName); } private Object getAttributeValue(String attributeName) { return attributeValues.get(attributeName); } @Override public boolean valuesAreBuildLanguageTyped() { return true; } @Override public Iterable<Map.Entry<String, Object>> getAttributeAccessors() { return attributeValues.entrySet(); } @Override public String getName(Map.Entry<String, Object> attributeAccessor) { return attributeAccessor.getKey(); } @Override public Object getValue(Map.Entry<String, Object> attributeAccessor) { return attributeAccessor.getValue(); } @Override public boolean isExplicitlySpecified(Map.Entry<String, Object> attributeAccessor) { return true; } } /** * If the rule was created by a macro, this method sets the appropriate values for the attributes * generator_{name, function, location} and returns all attributes. * * <p>Otherwise, it returns the given attributes without any changes. */ private static AttributesAndLocation generatorAttributesForMacros( Package.Builder pkgBuilder, BuildLangTypedAttributeValuesMap args, ImmutableList<CallStackEntry> stack, Label label, boolean recordRuleInstantiationCallstack) { // For a callstack [BUILD <toplevel>, .bzl <function>, <rule>], // location is that of the caller of 'rule' (the .bzl function). Location location = stack.size() < 2 ? Location.BUILTIN : stack.get(stack.size() - 2).location; boolean hasName = args.containsAttributeNamed("generator_name"); boolean hasFunc = args.containsAttributeNamed("generator_function"); // TODO(bazel-team): resolve cases in our code where hasName && !hasFunc, or hasFunc && !hasName if (hasName || hasFunc) { return new AttributesAndLocation(args, location); } // The "generator" of a rule is the function (sometimes called "macro") // outermost in the call stack. // The stack must contain at least two entries: // 0: the outermost function (e.g. a BUILD file), // 1: the function called by it (e.g. a "macro" in a .bzl file). // optionally followed by other Starlark or built-in functions, // and finally the rule instantiation function. if (stack.size() < 2 || !stack.get(1).location.file().endsWith(".bzl")) { return new AttributesAndLocation(args, location); // macro is not a Starlark function } Location generatorLocation = stack.get(0).location; // location of call to generator String generatorFunction = stack.get(1).name; ImmutableMap.Builder<String, Object> builder = ImmutableMap.builder(); for (Map.Entry<String, Object> attributeAccessor : args.getAttributeAccessors()) { String attributeName = args.getName(attributeAccessor); builder.put(attributeName, args.getValue(attributeAccessor)); } String generatorName = pkgBuilder.getGeneratorNameByLocation().get(generatorLocation); if (generatorName == null) { generatorName = (String) args.getAttributeValue("name"); } builder.put("generator_name", generatorName); if (!recordRuleInstantiationCallstack) { // When we are recording the callstack, we can materialize the value from callstack // as needed. So save memory by not recording it. builder.put("generator_function", generatorFunction); String relativePath = maybeGetRelativeLocation(generatorLocation, label); if (relativePath != null) { builder.put("generator_location", relativePath); } } try { args = new BuildLangTypedAttributeValuesMap(builder.build()); } catch (IllegalArgumentException unused) { // We just fall back to the default case and swallow any messages. } // TODO(adonovan): is it appropriate to use generatorLocation as the rule's main location? // Or would 'location' (the immediate call) be more informative? When there are errors, the // location of the toplevel call of the generator may be quite unrelated to the error message. return new AttributesAndLocation(args, generatorLocation); } /** * Uses the given label to retrieve the workspace-relative path of the given location (including * the line number). * * <p>For example, the location /usr/local/workspace/my/cool/package/BUILD:3:1 and the label * //my/cool/package:BUILD would lead to "my/cool/package:BUILD:3". * * @return The workspace-relative path of the given location, or null if it could not be computed. */ // TODO(b/151151653): make Starlark file Locations relative from the outset. @Nullable private static String maybeGetRelativeLocation(@Nullable Location location, Label label) { if (location == null) { return null; } // Determining the workspace root only works reliably if both location and label point to files // in the same package. // It would be preferable to construct the path from the label itself, but this doesn't work for // rules created from function calls in a subincluded file, even if both files share a path // prefix (for example, when //a/package:BUILD subincludes //a/package/with/a/subpackage:BUILD). // We can revert to that approach once subincludes aren't supported anymore. // // TODO(b/151165647): this logic has always been wrong: // it spuriously matches occurrences of the package name earlier in the path. String absolutePath = location.toString(); int pos = absolutePath.indexOf(label.getPackageName()); return (pos < 0) ? null : absolutePath.substring(pos); } }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.jps.model.serialization; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.JDOMUtil; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.io.FileUtilRt; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.concurrency.AppExecutorUtil; import com.intellij.util.containers.ContainerUtil; import gnu.trove.THashSet; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.jps.TimingLog; import org.jetbrains.jps.model.JpsDummyElement; import org.jetbrains.jps.model.JpsElement; import org.jetbrains.jps.model.JpsElementFactory; import org.jetbrains.jps.model.JpsProject; import org.jetbrains.jps.model.java.JpsJavaModuleType; import org.jetbrains.jps.model.library.sdk.JpsSdkType; import org.jetbrains.jps.model.module.JpsModule; import org.jetbrains.jps.model.serialization.artifact.JpsArtifactSerializer; import org.jetbrains.jps.model.serialization.facet.JpsFacetSerializer; import org.jetbrains.jps.model.serialization.impl.JpsModuleSerializationDataExtensionImpl; import org.jetbrains.jps.model.serialization.impl.JpsProjectSerializationDataExtensionImpl; import org.jetbrains.jps.model.serialization.impl.JpsSerializationFormatException; import org.jetbrains.jps.model.serialization.library.JpsLibraryTableSerializer; import org.jetbrains.jps.model.serialization.library.JpsSdkTableSerializer; import org.jetbrains.jps.model.serialization.module.JpsModuleClasspathSerializer; import org.jetbrains.jps.model.serialization.module.JpsModulePropertiesSerializer; import org.jetbrains.jps.model.serialization.module.JpsModuleRootModelSerializer; import org.jetbrains.jps.model.serialization.runConfigurations.JpsRunConfigurationSerializer; import org.jetbrains.jps.service.SharedThreadPool; import org.jetbrains.jps.util.JpsPathUtil; import java.io.File; import java.io.IOException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.*; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; public class JpsProjectLoader extends JpsLoaderBase { public static final String CLASSPATH_ATTRIBUTE = "classpath"; public static final String CLASSPATH_DIR_ATTRIBUTE = "classpath-dir"; private static final Logger LOG = Logger.getInstance(JpsProjectLoader.class); private static final ExecutorService ourThreadPool = AppExecutorUtil.createBoundedApplicationPoolExecutor( "JpsProjectLoader Pool", SharedThreadPool.getInstance(), Runtime.getRuntime().availableProcessors()); private final JpsProject myProject; private final Map<String, String> myPathVariables; private final boolean myLoadUnloadedModules; private JpsProjectLoader(JpsProject project, Map<String, String> pathVariables, Path baseDir, boolean loadUnloadedModules) { super(createProjectMacroExpander(pathVariables, baseDir)); myProject = project; myPathVariables = pathVariables; myProject.getContainer().setChild(JpsProjectSerializationDataExtensionImpl.ROLE, new JpsProjectSerializationDataExtensionImpl(baseDir)); myLoadUnloadedModules = loadUnloadedModules; } static JpsMacroExpander createProjectMacroExpander(Map<String, String> pathVariables, @NotNull Path baseDir) { JpsMacroExpander expander = new JpsMacroExpander(pathVariables); expander.addFileHierarchyReplacements(PathMacroUtil.PROJECT_DIR_MACRO_NAME, baseDir.toFile()); return expander; } public static void loadProject(JpsProject project, Map<String, String> pathVariables, String projectPath) throws IOException { loadProject(project, pathVariables, projectPath, false); } public static void loadProject(JpsProject project, Map<String, String> pathVariables, String projectPath, boolean loadUnloadedModules) throws IOException { Path file = Paths.get(FileUtil.toCanonicalPath(projectPath)); if (Files.isRegularFile(file) && projectPath.endsWith(".ipr")) { new JpsProjectLoader(project, pathVariables, file.getParent(), loadUnloadedModules).loadFromIpr(file); } else { Path dotIdea = file.resolve(PathMacroUtil.DIRECTORY_STORE_NAME); Path directory; if (Files.isDirectory(dotIdea)) { directory = dotIdea; } else if (Files.isDirectory(file) && file.endsWith(PathMacroUtil.DIRECTORY_STORE_NAME)) { directory = file; } else { throw new IOException("Cannot find IntelliJ IDEA project files at " + projectPath); } new JpsProjectLoader(project, pathVariables, directory.getParent(), loadUnloadedModules).loadFromDirectory(directory); } } @NotNull public static String getDirectoryBaseProjectName(@NotNull Path dir) { String name = JpsPathUtil.readProjectName(dir); return name != null ? name : JpsPathUtil.getDefaultProjectName(dir); } @Nullable @Override protected <E extends JpsElement> Element loadComponentData(@NotNull JpsElementExtensionSerializerBase<E> serializer, @NotNull Path configFile) { Path externalConfigDir = resolveExternalProjectConfig("project"); Element data = super.loadComponentData(serializer, configFile); String componentName = serializer.getComponentName(); if (externalConfigDir == null || !(componentName.equals("CompilerConfiguration"))) { return data; } String prefixedComponentName = "External" + componentName; Element externalData = null; for (Element child : JDOMUtil.getChildren(loadRootElement(externalConfigDir.resolve(configFile.getFileName())))) { // be ready to handle both original name and prefixed if (child.getName().equals(prefixedComponentName) || JDomSerializationUtil.isComponent(prefixedComponentName, child) || child.getName().equals(componentName) || JDomSerializationUtil.isComponent(componentName, child)) { externalData = child; break; } } if (data == null) { return externalData; } else if (externalData != null) { return JDOMUtil.deepMerge(data, externalData); } return data; } private void loadFromDirectory(@NotNull Path dir) { myProject.setName(getDirectoryBaseProjectName(dir)); Path defaultConfigFile = dir.resolve("misc.xml"); JpsSdkType<?> projectSdkType = loadProjectRoot(loadRootElement(defaultConfigFile)); for (JpsModelSerializerExtension extension : JpsModelSerializerExtension.getExtensions()) { for (JpsProjectExtensionSerializer serializer : extension.getProjectExtensionSerializers()) { loadComponents(dir, defaultConfigFile, serializer, myProject); } } Path externalConfigDir = resolveExternalProjectConfig("project"); if (externalConfigDir != null) { LOG.info("External project config dir is used: " + externalConfigDir); } Element moduleData = JDomSerializationUtil.findComponent(loadRootElement(dir.resolve("modules.xml")), "ProjectModuleManager"); Element externalModuleData; if (externalConfigDir == null) { externalModuleData = null; } else { Element rootElement = loadRootElement(externalConfigDir.resolve("modules.xml")); if (rootElement == null) { externalModuleData = null; } else { externalModuleData = JDomSerializationUtil.findComponent(rootElement, "ExternalProjectModuleManager"); if (externalModuleData == null) { externalModuleData = JDomSerializationUtil.findComponent(rootElement, "ExternalModuleListStorage"); } // old format (root tag is "component") if (externalModuleData == null && rootElement.getName().equals(JDomSerializationUtil.COMPONENT_ELEMENT)) { externalModuleData = rootElement; } } } if (externalModuleData != null) { String componentName = externalModuleData.getAttributeValue("name"); LOG.assertTrue(componentName != null && componentName.startsWith("External")); externalModuleData.setAttribute("name", componentName.substring("External".length())); if (moduleData == null) { moduleData = externalModuleData; } else { JDOMUtil.deepMerge(moduleData, externalModuleData); } } Path workspaceFile = dir.resolve("workspace.xml"); loadModules(moduleData, projectSdkType, workspaceFile); Runnable timingLog = TimingLog.startActivity("loading project libraries"); for (Path libraryFile : listXmlFiles(dir.resolve("libraries"))) { loadProjectLibraries(loadRootElement(libraryFile)); } if (externalConfigDir != null) { loadProjectLibraries(loadRootElement(externalConfigDir.resolve("libraries.xml"))); } timingLog.run(); Runnable artifactsTimingLog = TimingLog.startActivity("loading artifacts"); for (Path artifactFile : listXmlFiles(dir.resolve("artifacts"))) { loadArtifacts(loadRootElement(artifactFile)); } if (externalConfigDir != null) { loadArtifacts(loadRootElement(externalConfigDir.resolve("artifacts.xml"))); } artifactsTimingLog.run(); if (hasRunConfigurationSerializers()) { Runnable runConfTimingLog = TimingLog.startActivity("loading run configurations"); for (Path configurationFile : listXmlFiles(dir.resolve("runConfigurations"))) { JpsRunConfigurationSerializer.loadRunConfigurations(myProject, loadRootElement(configurationFile)); } JpsRunConfigurationSerializer.loadRunConfigurations(myProject, JDomSerializationUtil.findComponent(loadRootElement(workspaceFile), "RunManager")); runConfTimingLog.run(); } } private static boolean hasRunConfigurationSerializers() { for (JpsModelSerializerExtension extension : JpsModelSerializerExtension.getExtensions()) { if (!extension.getRunConfigurationPropertiesSerializers().isEmpty()) { return true; } } return false; } @NotNull private static List<Path> listXmlFiles(@NotNull Path dir) { try { try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir, it -> it.getFileName().toString().endsWith(".xml") && Files.isRegularFile(it))) { return ContainerUtil.collect(stream.iterator()); } } catch (IOException e) { return Collections.emptyList(); } } private void loadFromIpr(@NotNull Path iprFile) { final Element iprRoot = loadRootElement(iprFile); String projectName = FileUtilRt.getNameWithoutExtension(iprFile.getFileName().toString()); myProject.setName(projectName); Path iwsFile = iprFile.getParent().resolve(projectName + ".iws"); Element iwsRoot = loadRootElement(iwsFile); JpsSdkType<?> projectSdkType = loadProjectRoot(iprRoot); for (JpsModelSerializerExtension extension : JpsModelSerializerExtension.getExtensions()) { for (JpsProjectExtensionSerializer serializer : extension.getProjectExtensionSerializers()) { Element rootTag = JpsProjectExtensionSerializer.WORKSPACE_FILE.equals(serializer.getConfigFileName()) ? iwsRoot : iprRoot; Element component = JDomSerializationUtil.findComponent(rootTag, serializer.getComponentName()); if (component != null) { serializer.loadExtension(myProject, component); } else { serializer.loadExtensionWithDefaultSettings(myProject); } } } loadModules(JDomSerializationUtil.findComponent(iprRoot, "ProjectModuleManager"), projectSdkType, iwsFile); loadProjectLibraries(JDomSerializationUtil.findComponent(iprRoot, "libraryTable")); loadArtifacts(JDomSerializationUtil.findComponent(iprRoot, "ArtifactManager")); if (hasRunConfigurationSerializers()) { JpsRunConfigurationSerializer.loadRunConfigurations(myProject, JDomSerializationUtil.findComponent(iprRoot, "ProjectRunConfigurationManager")); JpsRunConfigurationSerializer.loadRunConfigurations(myProject, JDomSerializationUtil.findComponent(iwsRoot, "RunManager")); } } private void loadArtifacts(@Nullable Element artifactManagerComponent) { JpsArtifactSerializer.loadArtifacts(myProject, artifactManagerComponent); } @Nullable private JpsSdkType<?> loadProjectRoot(@Nullable Element root) { JpsSdkType<?> sdkType = null; Element rootManagerElement = JDomSerializationUtil.findComponent(root, "ProjectRootManager"); if (rootManagerElement != null) { String sdkName = rootManagerElement.getAttributeValue("project-jdk-name"); String sdkTypeId = rootManagerElement.getAttributeValue("project-jdk-type"); if (sdkName != null) { sdkType = JpsSdkTableSerializer.getSdkType(sdkTypeId); JpsSdkTableSerializer.setSdkReference(myProject.getSdkReferencesTable(), sdkName, sdkType); } } return sdkType; } private void loadProjectLibraries(@Nullable Element libraryTableElement) { JpsLibraryTableSerializer.loadLibraries(libraryTableElement, myProject.getLibraryCollection()); } private void loadModules(@Nullable Element componentElement, final @Nullable JpsSdkType<?> projectSdkType, @NotNull Path workspaceFile) { Runnable timingLog = TimingLog.startActivity("loading modules"); if (componentElement == null) { return; } Set<String> unloadedModules = new HashSet<>(); if (!myLoadUnloadedModules && workspaceFile.toFile().exists()) { Element unloadedModulesList = JDomSerializationUtil.findComponent(loadRootElement(workspaceFile), "UnloadedModulesList"); for (Element element : JDOMUtil.getChildren(unloadedModulesList, "module")) { unloadedModules.add(element.getAttributeValue("name")); } } final Set<Path> foundFiles = new THashSet<>(); final List<Path> moduleFiles = new ArrayList<>(); for (Element moduleElement : JDOMUtil.getChildren(componentElement.getChild("modules"), "module")) { final String path = moduleElement.getAttributeValue("filepath"); if (path != null) { final Path file = Paths.get(path); if (foundFiles.add(file) && !unloadedModules.contains(getModuleName(file))) { moduleFiles.add(file); } } } List<JpsModule> modules = loadModules(moduleFiles, projectSdkType, myPathVariables); for (JpsModule module : modules) { myProject.addModule(module); } timingLog.run(); } @Nullable private static Path resolveExternalProjectConfig(@NotNull String subDirName) { String externalProjectConfigDir = System.getProperty("external.project.config"); return StringUtil.isEmptyOrSpaces(externalProjectConfigDir) ? null : Paths.get(externalProjectConfigDir, subDirName); } @NotNull public static List<JpsModule> loadModules(@NotNull List<? extends Path> moduleFiles, @Nullable JpsSdkType<?> projectSdkType, @NotNull Map<String, String> pathVariables) { List<JpsModule> modules = new ArrayList<>(); List<Future<Pair<Path, Element>>> futureModuleFilesContents = new ArrayList<>(); Path externalModuleDir = resolveExternalProjectConfig("modules"); if (externalModuleDir != null) { LOG.info("External project config dir is used for modules: " + externalModuleDir); } for (Path file : moduleFiles) { futureModuleFilesContents.add(ourThreadPool.submit(() -> { JpsMacroExpander expander = createModuleMacroExpander(pathVariables, file); Element data = loadRootElement(file, expander); if (externalModuleDir != null) { String externalName = FileUtilRt.getNameWithoutExtension(file.getFileName().toString()) + ".xml"; Element externalData = loadRootElement(externalModuleDir.resolve(externalName), expander); if (externalData != null) { if (data == null) { data = externalData; } else { JDOMUtil.merge(data, externalData); } } } if (data == null) { LOG.info("Module '" + getModuleName(file) + "' is skipped: " + file.toAbsolutePath() + " doesn't exist"); } return Pair.create(file, data); })); } try { final List<String> classpathDirs = new ArrayList<>(); for (Future<Pair<Path, Element>> moduleFile : futureModuleFilesContents) { Element rootElement = moduleFile.get().getSecond(); if (rootElement != null) { final String classpathDir = rootElement.getAttributeValue(CLASSPATH_DIR_ATTRIBUTE); if (classpathDir != null) { classpathDirs.add(classpathDir); } } } List<Future<JpsModule>> futures = new ArrayList<>(); for (final Future<Pair<Path, Element>> futureModuleFile : futureModuleFilesContents) { final Pair<Path, Element> moduleFile = futureModuleFile.get(); if (moduleFile.getSecond() != null) { futures.add(ourThreadPool.submit( () -> loadModule(moduleFile.getFirst(), moduleFile.getSecond(), classpathDirs, projectSdkType, pathVariables))); } } for (Future<JpsModule> future : futures) { JpsModule module = future.get(); if (module != null) { modules.add(module); } } return modules; } catch (Exception e) { throw new RuntimeException(e); } } @NotNull private static JpsModule loadModule(@NotNull Path file, @NotNull Element moduleRoot, List<String> paths, @Nullable JpsSdkType<?> projectSdkType, Map<String, String> pathVariables) { String name = getModuleName(file); final String typeId = moduleRoot.getAttributeValue("type"); final JpsModulePropertiesSerializer<?> serializer = getModulePropertiesSerializer(typeId); final JpsModule module = createModule(name, moduleRoot, serializer); module.getContainer().setChild(JpsModuleSerializationDataExtensionImpl.ROLE, new JpsModuleSerializationDataExtensionImpl(file.getParent())); for (JpsModelSerializerExtension extension : JpsModelSerializerExtension.getExtensions()) { extension.loadModuleOptions(module, moduleRoot); } String baseModulePath = FileUtil.toSystemIndependentName(file.getParent().toString()); String classpath = moduleRoot.getAttributeValue(CLASSPATH_ATTRIBUTE); if (classpath == null) { try { JpsModuleRootModelSerializer.loadRootModel(module, JDomSerializationUtil.findComponent(moduleRoot, "NewModuleRootManager"), projectSdkType); } catch (JpsSerializationFormatException e) { LOG.warn("Failed to load module configuration from " + file.toString() + ": " + e.getMessage(), e); } } else { for (JpsModelSerializerExtension extension : JpsModelSerializerExtension.getExtensions()) { JpsModuleClasspathSerializer classpathSerializer = extension.getClasspathSerializer(); if (classpathSerializer != null && classpathSerializer.getClasspathId().equals(classpath)) { String classpathDir = moduleRoot.getAttributeValue(CLASSPATH_DIR_ATTRIBUTE); final JpsMacroExpander expander = createModuleMacroExpander(pathVariables, file); classpathSerializer.loadClasspath(module, classpathDir, baseModulePath, expander, paths, projectSdkType); } } } Element facetsTag = JDomSerializationUtil.findComponent(moduleRoot, JpsFacetSerializer.FACET_MANAGER_COMPONENT_NAME); Element externalFacetsTag = JDomSerializationUtil.findComponent(moduleRoot, "ExternalFacetManager"); Element mergedFacetsTag; if (facetsTag == null) { mergedFacetsTag = externalFacetsTag; } else if (externalFacetsTag != null) { mergedFacetsTag = JDOMUtil.deepMerge(facetsTag, externalFacetsTag); } else { mergedFacetsTag = facetsTag; } JpsFacetSerializer.loadFacets(module, mergedFacetsTag); return module; } @NotNull private static String getModuleName(@NotNull Path file) { return FileUtilRt.getNameWithoutExtension(file.getFileName().toString()); } static JpsMacroExpander createModuleMacroExpander(final Map<String, String> pathVariables, @NotNull Path moduleFile) { final JpsMacroExpander expander = new JpsMacroExpander(pathVariables); String moduleDirPath = PathMacroUtil.getModuleDir(moduleFile.toAbsolutePath().toString()); if (moduleDirPath != null) { expander.addFileHierarchyReplacements(PathMacroUtil.MODULE_DIR_MACRO_NAME, new File(FileUtil.toSystemDependentName(moduleDirPath))); } return expander; } private static <P extends JpsElement> JpsModule createModule(String name, Element moduleRoot, JpsModulePropertiesSerializer<P> loader) { String componentName = loader.getComponentName(); Element component = componentName != null ? JDomSerializationUtil.findComponent(moduleRoot, componentName) : null; return JpsElementFactory.getInstance().createModule(name, loader.getType(), loader.loadProperties(component)); } private static JpsModulePropertiesSerializer<?> getModulePropertiesSerializer(@Nullable String typeId) { for (JpsModelSerializerExtension extension : JpsModelSerializerExtension.getExtensions()) { for (JpsModulePropertiesSerializer<?> loader : extension.getModulePropertiesSerializers()) { if (loader.getTypeId().equals(typeId)) { return loader; } } } return new JpsModulePropertiesSerializer<JpsDummyElement>(JpsJavaModuleType.INSTANCE, "JAVA_MODULE", null) { @Override public JpsDummyElement loadProperties(@Nullable Element componentElement) { return JpsElementFactory.getInstance().createDummyElement(); } @Override public void saveProperties(@NotNull JpsDummyElement properties, @NotNull Element componentElement) { } }; } }
/* * Copyright (C) 2017 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.gapid.models; import static com.google.gapid.proto.service.memory.Memory.PoolNames.Application_VALUE; import static com.google.gapid.util.Paths.command; import static com.google.gapid.util.Paths.commandTree; import static com.google.gapid.util.Paths.lastCommand; import static com.google.gapid.util.Paths.observationsAfter; import static com.google.gapid.widgets.Widgets.submitIfNotDisposed; import static java.util.logging.Level.FINE; import com.google.common.base.Objects; import com.google.common.base.Preconditions; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.SettableFuture; import com.google.gapid.models.ApiContext.FilteringContext; import com.google.gapid.proto.device.Device.Instance; import com.google.gapid.proto.service.Service; import com.google.gapid.proto.service.api.API; import com.google.gapid.proto.service.path.Path; import com.google.gapid.rpc.Rpc; import com.google.gapid.rpc.RpcException; import com.google.gapid.rpc.UiCallback; import com.google.gapid.server.Client; import com.google.gapid.util.Events; import com.google.gapid.util.Loadable; import com.google.gapid.util.MoreFutures; import com.google.gapid.util.Paths; import org.eclipse.swt.widgets.Shell; import java.util.concurrent.ExecutionException; import java.util.function.Consumer; import java.util.function.Supplier; import java.util.logging.Logger; /** * Model containing the API commands of the capture. */ public class CommandStream extends DeviceDependentModel.ForPath<CommandStream.Node, Void, CommandStream.Listener> implements ApiContext.Listener, Capture.Listener, Devices.Listener { protected static final Logger LOG = Logger.getLogger(CommandStream.class.getName()); private final Capture capture; private final ApiContext context; private final ConstantSets constants; private CommandIndex selection; public CommandStream(Shell shell, Analytics analytics, Client client, Capture capture, Devices devices, ApiContext context, ConstantSets constants) { super(LOG, shell, analytics, client, Listener.class, devices); this.capture = capture; this.context = context; this.constants = constants; capture.addListener(this); devices.addListener(this); context.addListener(this); } @Override public void onCaptureLoadingStart(boolean maintainState) { if (!maintainState) { selection = null; } reset(); } @Override public void onCaptureLoaded(Loadable.Message error) { if (error == null && selection != null) { selection = selection.withCapture(capture.getData().path); if (isLoaded()) { resolve(selection.getCommand(), node -> selectCommands(selection.withNode(node), true)); } } } @Override public void onReplayDeviceChanged(Instance dev) { if (selection != null && selection.getNode() != null) { // Clear the node, so the selection will be re-resolved once the context has updated. selection = selection.withNode(null); } } @Override public void onContextsLoaded() { onContextSelected(context.getSelectedContext()); } @Override public void onContextSelected(FilteringContext ctx) { if (selection != null && selection.getNode() != null) { // Clear the node, so the selection will be re-resolved once the context has updated. selection = selection.withNode(null); } load(commandTree(capture.getData().path, ctx), false); } @Override protected ListenableFuture<Node> doLoad(Path.Any path, Path.Device device) { return MoreFutures.transformAsync(client.get(path, device), tree -> MoreFutures.transform(client.get(commandTree(tree.getCommandTree().getRoot()), device), val -> new RootNode( device, tree.getCommandTree().getRoot().getTree(), val.getCommandTreeNode()))); } public ListenableFuture<Node> load(Node node) { return node.load(shell, () -> MoreFutures.transformAsync( client.get(commandTree(node.getPath(Path.CommandTreeNode.newBuilder())), node.device), v1 -> { Service.CommandTreeNode data = v1.getCommandTreeNode(); if (data.getGroup().isEmpty() && data.hasCommands()) { return MoreFutures.transform(loadCommand(lastCommand(data.getCommands()), node.device), cmd -> new NodeData(data, cmd)); } return Futures.immediateFuture(new NodeData(data, null)); })); } public ListenableFuture<API.Command> loadCommand(Path.Command path, Path.Device device) { return MoreFutures.transformAsync(client.get(command(path), device), value -> MoreFutures.transform(constants.loadConstants(value.getCommand()), ignore -> value.getCommand())); } public void load(Node node, Runnable callback) { ListenableFuture<Node> future = load(node); if (future != null) { Rpc.listen(future, new UiCallback<Node, Node>(shell, LOG) { @Override protected Node onRpcThread(Rpc.Result<Node> result) throws RpcException, ExecutionException { return result.get(); } @Override protected void onUiThread(Node result) { callback.run(); } }); } } public ListenableFuture<Service.FindResponse> search( CommandStream.Node parent, String text, boolean regex) { SettableFuture<Service.FindResponse> result = SettableFuture.create(); client.streamSearch(searchRequest(parent, text, regex), result::set); return result; } private static Service.FindRequest searchRequest( CommandStream.Node parent, String text, boolean regex) { return Service.FindRequest.newBuilder() .setCommandTreeNode(parent.getPath(Path.CommandTreeNode.newBuilder())) .setText(text) .setIsRegex(regex) .setMaxItems(1) .setWrap(true) .setConfig(Path.ResolveConfig.newBuilder() .setReplayDevice(parent.device)) .build(); } @Override protected void fireLoadStartEvent() { listeners.fire().onCommandsLoadingStart(); } @Override protected void fireLoadedEvent() { listeners.fire().onCommandsLoaded(); if (selection != null) { selectCommands(selection, true); } } public CommandIndex getSelectedCommands() { return (selection != null && selection.getNode() != null) ? selection : null; } public void selectCommands(CommandIndex index, boolean force) { if (!force && Objects.equal(selection, index)) { return; } else if (!isLoaded()) { this.selection = index; return; } RootNode root = (RootNode)getData(); if (index.getNode() == null) { resolve(index.getCommand(), node -> selectCommands(index.withNode(node), force)); } else if (!index.getNode().getTree().equals(root.tree)) { // TODO throw new UnsupportedOperationException("This is not yet supported, needs API clarification"); } else { selection = index; listeners.fire().onCommandsSelected(selection); } } private void resolve(Path.Command command, Consumer<Path.CommandTreeNode> cb) { RootNode root = (RootNode)getData(); Rpc.listen(client.get(commandTree(root.tree, command), root.device), new UiCallback<Service.Value, Path.CommandTreeNode>(shell, LOG) { @Override protected Path.CommandTreeNode onRpcThread(Rpc.Result<Service.Value> result) throws RpcException, ExecutionException { Service.Value value = result.get(); LOG.log(FINE, "Resolved selection to {0}", value); return value.getPath().getCommandTreeNode(); } @Override protected void onUiThread(Path.CommandTreeNode result) { cb.accept(result); } }); } public ListenableFuture<Service.Memory> getMemory(Path.Device device, CommandIndex index) { return MoreFutures.transform( client.get(observationsAfter(index, Application_VALUE), device), v -> { return v.getMemory(); }); } /** * An index into the command stream, representing a specific "point in time" in the trace. */ public static class CommandIndex implements Comparable<CommandIndex> { private final Path.Command command; private final Path.CommandTreeNode node; private final boolean group; private CommandIndex(Path.Command command, Path.CommandTreeNode node, boolean group) { this.command = command; this.node = node; this.group = group; } /** * Create an index pointing to the given command and node. */ public static CommandIndex forNode(Path.Command command, Path.CommandTreeNode node) { return new CommandIndex(command, node, false); } /** * Create an index pointing to the given command, without knowing the tree node. * The tree nodes is then resolved when it is needed. */ public static CommandIndex forCommand(Path.Command command) { return new CommandIndex(command, null, false); } /** * Same as {@link #forCommand}, except that group selection is to be preferred when * resolving to a tree node. */ public static CommandIndex forGroup(Path.Command command) { return new CommandIndex(command, null, true); } public CommandIndex withNode(Path.CommandTreeNode newNode) { return new CommandIndex(command, newNode, group); } public CommandIndex withCapture(Path.Capture capture) { return new CommandIndex(command.toBuilder().setCapture(capture).build(), null, group); } public Path.Command getCommand() { return command; } public Path.CommandTreeNode getNode() { return node; } public boolean isGroup() { return group; } @Override public String toString() { return command.getIndicesList().toString(); } @Override public int hashCode() { return command.getIndicesList().hashCode(); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } else if (!(obj instanceof CommandIndex)) { return false; } return command.getIndicesList().equals(((CommandIndex)obj).command.getIndicesList()); } @Override public int compareTo(CommandIndex o) { return Paths.compare(command, o.command); } } public static class Node extends DeviceDependentModel.Data { private final Node parent; private final int index; private Node[] children; private Service.CommandTreeNode data; private API.Command command; private ListenableFuture<Node> loadFuture; public Node(Path.Device device, Service.CommandTreeNode data) { super(device); this.parent = null; this.index = 0; this.data = data; } public Node(Node parent, int index) { super(parent.device); this.parent = parent; this.index = index; } public Node getParent() { return parent; } public int getChildCount() { return (data == null) ? 0 : (int)data.getNumChildren(); } public Node getChild(int child) { return getOrCreateChildren()[child]; } public Node[] getChildren() { return getOrCreateChildren().clone(); } private Node[] getOrCreateChildren() { if (children == null) { Preconditions.checkState(data != null, "Querying children before loaded"); children = new Node[(int)data.getNumChildren()]; for (int i = 0; i < children.length; i++) { children[i] = new Node(this, i); } } return children; } public boolean isLastChild() { return parent == null || (parent.getChildCount() - 1 == index); } public Service.CommandTreeNode getData() { return data; } public API.Command getCommand() { return command; } public Path.CommandTreeNode.Builder getPath(Path.CommandTreeNode.Builder path) { return parent.getPath(path).addIndices(index); } public CommandIndex getIndex() { return (data == null) ? null : CommandIndex.forNode(data.getRepresentation(), getPath(Path.CommandTreeNode.newBuilder()).build()); } public ListenableFuture<Node> load(Shell shell, Supplier<ListenableFuture<NodeData>> loader) { if (data != null) { // Already loaded. return null; } else if (loadFuture != null && !loadFuture.isCancelled()) { return loadFuture; } return loadFuture = MoreFutures.transformAsync(loader.get(), newData -> submitIfNotDisposed(shell, () -> { data = newData.data; command = newData.command; loadFuture = null; // Don't hang on to listeners. return Node.this; })); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } else if (!(obj instanceof Node)) { return false; } Node n = (Node)obj; return index == n.index && parent.equals(n.parent); } @Override public int hashCode() { return parent.hashCode() * 31 + index; } @Override public String toString() { return parent + "/" + index + (data == null ? "" : " " + data.getGroup() + data.getCommands().getToList()); } } private static class RootNode extends Node { public final Path.ID tree; public RootNode(Path.Device device, Path.ID tree, Service.CommandTreeNode data) { super(device, data); this.tree = tree; } @Override public Path.CommandTreeNode.Builder getPath(Path.CommandTreeNode.Builder path) { return path.setTree(tree); } @Override public String toString() { return "Root"; } @Override public boolean equals(Object obj) { if (obj == this) { return true; } else if (!(obj instanceof RootNode)) { return false; } RootNode n = (RootNode)obj; return device.equals(n.device) && tree.equals(n.tree); } @Override public int hashCode() { return device.hashCode() * 31 + tree.hashCode(); } } private static class NodeData { public final Service.CommandTreeNode data; public final API.Command command; public NodeData(Service.CommandTreeNode data, API.Command command) { this.data = data; this.command = command; } } public interface Listener extends Events.Listener { /** * Event indicating that the tree root has changed and is being loaded. */ public default void onCommandsLoadingStart() { /* emtpy */ } /** * Event indicating that the tree root has finished loading. */ public default void onCommandsLoaded() { /* empty */ } /** * Event indicating that the currently selected command range has changed. */ @SuppressWarnings("unused") public default void onCommandsSelected(CommandIndex selection) { /* empty */ } } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.examples.bpmn.usertask.taskcandidate; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.flowable.engine.impl.test.PluggableFlowableTestCase; import org.flowable.engine.runtime.ProcessInstance; import org.flowable.engine.test.Deployment; import org.flowable.idm.api.Group; import org.flowable.idm.api.User; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; /** * @author Joram Barrez, Saeid Mirzaei */ public class TaskCandidateTest extends PluggableFlowableTestCase { private static final String KERMIT = "kermit"; private static final String GONZO = "gonzo"; @BeforeEach public void setUp() throws Exception { Group accountants = identityService.newGroup("accountancy"); identityService.saveGroup(accountants); Group managers = identityService.newGroup("management"); identityService.saveGroup(managers); Group sales = identityService.newGroup("sales"); identityService.saveGroup(sales); User kermit = identityService.newUser(KERMIT); identityService.saveUser(kermit); identityService.createMembership(KERMIT, "accountancy"); User gonzo = identityService.newUser(GONZO); identityService.saveUser(gonzo); identityService.createMembership(GONZO, "management"); identityService.createMembership(GONZO, "accountancy"); identityService.createMembership(GONZO, "sales"); } @AfterEach public void tearDown() throws Exception { identityService.deleteUser(KERMIT); identityService.deleteUser(GONZO); identityService.deleteGroup("sales"); identityService.deleteGroup("accountancy"); identityService.deleteGroup("management"); } @Test @Deployment public void testSingleCandidateGroup() { // Deploy and start process ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("singleCandidateGroup"); // org.flowable.task.service.Task should not yet be assigned to kermit List<org.flowable.task.api.Task> tasks = taskService.createTaskQuery().taskAssignee(KERMIT).list(); assertTrue(tasks.isEmpty()); // The task should be visible in the candidate task list tasks = taskService.createTaskQuery().taskCandidateUser(KERMIT).list(); assertEquals(1, tasks.size()); org.flowable.task.api.Task task = tasks.get(0); assertEquals("Pay out expenses", task.getName()); // Claim the task taskService.claim(task.getId(), KERMIT); // The task must now be gone from the candidate task list tasks = taskService.createTaskQuery().taskCandidateUser(KERMIT).list(); assertTrue(tasks.isEmpty()); // The task will be visible on the personal task list tasks = taskService.createTaskQuery().taskAssignee(KERMIT).list(); assertEquals(1, tasks.size()); task = tasks.get(0); assertEquals("Pay out expenses", task.getName()); // Completing the task ends the process taskService.complete(task.getId()); assertProcessEnded(processInstance.getId()); } @Test @Deployment public void testMultipleCandidateGroups() { // Deploy and start process ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("multipleCandidatesGroup"); // org.flowable.task.service.Task should not yet be assigned to anyone List<org.flowable.task.api.Task> tasks = taskService.createTaskQuery().taskAssignee(KERMIT).list(); assertTrue(tasks.isEmpty()); tasks = taskService.createTaskQuery().taskAssignee(GONZO).list(); assertTrue(tasks.isEmpty()); // The task should be visible in the candidate task list of Gonzo and // Kermit // and anyone in the management/accountancy group assertEquals(1, taskService.createTaskQuery().taskCandidateUser(KERMIT).list().size()); assertEquals(1, taskService.createTaskQuery().taskCandidateUser(GONZO).list().size()); assertEquals(1, taskService.createTaskQuery().taskCandidateGroup("management").count()); assertEquals(1, taskService.createTaskQuery().taskCandidateGroup("accountancy").count()); assertEquals(0, taskService.createTaskQuery().taskCandidateGroup("sales").count()); // Gonzo claims the task tasks = taskService.createTaskQuery().taskCandidateUser(GONZO).list(); org.flowable.task.api.Task task = tasks.get(0); assertEquals("Approve expenses", task.getName()); taskService.claim(task.getId(), GONZO); // The task must now be gone from the candidate task lists assertTrue(taskService.createTaskQuery().taskCandidateUser(KERMIT).list().isEmpty()); assertTrue(taskService.createTaskQuery().taskCandidateUser(GONZO).list().isEmpty()); assertEquals(0, taskService.createTaskQuery().taskCandidateGroup("management").count()); // The task will be visible on the personal task list of Gonzo assertEquals(1, taskService.createTaskQuery().taskAssignee(GONZO).count()); // But not on the personal task list of (for example) Kermit assertEquals(0, taskService.createTaskQuery().taskAssignee(KERMIT).count()); // Completing the task ends the process taskService.complete(task.getId()); assertProcessEnded(processInstance.getId()); } @Test @Deployment public void testMultipleCandidateUsers() { runtimeService.startProcessInstanceByKey("multipleCandidateUsersExample", Collections.singletonMap("Variable", (Object) "var")); assertEquals(1, taskService.createTaskQuery().taskCandidateUser(GONZO).list().size()); assertEquals(1, taskService.createTaskQuery().taskCandidateUser(KERMIT).list().size()); List<org.flowable.task.api.Task> tasks = taskService.createTaskQuery().taskInvolvedUser(KERMIT).list(); assertEquals(1, tasks.size()); org.flowable.task.api.Task task = tasks.get(0); taskService.setVariableLocal(task.getId(), "taskVar", 123); tasks = taskService.createTaskQuery().taskInvolvedUser(KERMIT).includeProcessVariables().includeTaskLocalVariables().list(); task = tasks.get(0); assertEquals(1, task.getProcessVariables().size()); assertEquals(1, task.getTaskLocalVariables().size()); taskService.addUserIdentityLink(task.getId(), GONZO, "test"); tasks = taskService.createTaskQuery().taskInvolvedUser(GONZO).includeProcessVariables().includeTaskLocalVariables().list(); assertEquals(1, tasks.size()); assertEquals(1, task.getProcessVariables().size()); assertEquals(1, task.getTaskLocalVariables().size()); } @Test @Deployment public void testMixedCandidateUserAndGroup() { runtimeService.startProcessInstanceByKey("mixedCandidateUserAndGroupExample"); assertEquals(1, taskService.createTaskQuery().taskCandidateUser(GONZO).list().size()); assertEquals(1, taskService.createTaskQuery().taskCandidateUser(KERMIT).list().size()); } // test if candidate group works with expression, when there is a function // with one parameter @Test @Deployment public void testCandidateExpressionOneParam() { Map<String, Object> params = new HashMap<>(); params.put("testBean", new TestBean()); runtimeService.startProcessInstanceByKey("candidateWithExpression", params); assertEquals(1, taskService.createTaskQuery().taskCandidateUser(KERMIT).list().size()); } // test if candidate group works with expression, when there is a function // with two parameters @Test @Deployment public void testCandidateExpressionTwoParams() { Map<String, Object> params = new HashMap<>(); params.put("testBean", new TestBean()); runtimeService.startProcessInstanceByKey("candidateWithExpression", params); assertEquals(1, taskService.createTaskQuery().taskCandidateUser(KERMIT).count()); assertEquals(1, taskService.createTaskQuery().taskCandidateGroup("sales").count()); } }
/* * Copyright (c) 2010, Sun Microsystems, Inc. Copyright (c) 2010, The Storage Networking Industry * Association. * * Redistribution and use in source and binary forms, with or without modification, are permitted * provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this list of conditions * and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, this list of conditions * and the following disclaimer in the documentation and/or other materials provided with the * distribution. * * Neither the name of The Storage Networking Industry Association (SNIA) nor the names of its * contributors may be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY * WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.snia.cdmiserver.model; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.snia.cdmiserver.util.MediaTypes; /** * <p> * Representation of a CDMI <em>DataObject</em>. * </p> */ public class DataObject extends CdmiObject { @SuppressWarnings("unused") private static final Logger LOG = LoggerFactory.getLogger(DataObject.class); private String objectType; private String objectName; private String parentUri; private String parentId; private String domainUri; private String capabilitiesUri; private String completionStatus; private String percentComplete; private JSONObject metadata; private String mimetype; private String deserializevalue; private String reference; private String move; private String copy; private String deserialize; private String serialize; private String valuetransferencoding; private String value; private DataObject() {} /** * Creates a new data object with the mandatory fields. * * @param objectName the container's name * @param parentUri the container's parent URI * @param parentId the container's parent objectId */ public DataObject(String objectName, String parentUri, String parentId) { super(); this.objectName = objectName; this.parentUri = parentUri; this.parentId = parentId; // default values this.objectType = MediaTypes.DATA_OBJECT; this.domainUri = "/cdmi_domains"; this.capabilitiesUri = "/cdmi_capabilities/dataobject"; this.completionStatus = "Processing"; this.mimetype = "application/octet-stream"; this.metadata = new JSONObject(); } public String getObjectType() { return objectType; } public void setObjectType(String objectType) { this.objectType = objectType; } public String getObjectName() { return objectName; } public void setObjectName(String objectName) { this.objectName = objectName; } public String getParentUri() { return parentUri; } public void setParentUri(String parentUri) { this.parentUri = parentUri; } public String getParentId() { return parentId; } public void setParentId(String parentId) { this.parentId = parentId; } public String getDomainUri() { return domainUri; } public void setDomainUri(String domainUri) { this.domainUri = domainUri; } public String getCapabilitiesUri() { return capabilitiesUri; } public void setCapabilitiesUri(String capabilitiesUri) { this.capabilitiesUri = capabilitiesUri; } public String getCompletionStatus() { return completionStatus; } public void setCompletionStatus(String completionStatus) { this.completionStatus = completionStatus; } public String getPercentComplete() { return percentComplete; } public void setPercentComplete(String percentComplete) { this.percentComplete = percentComplete; } public JSONObject getMetadata() { return metadata; } public void setMetadata(JSONObject metadata) { this.metadata = metadata; } public String getDeserializedvalue() { return deserializevalue; } public void setDeserializedvalue(String deserializevalue) { this.deserializevalue = deserializevalue; } public String getReference() { return reference; } public void setReference(String reference) { this.reference = reference; } public String getMove() { return move; } public void setMove(String move) { this.move = move; } public String getCopy() { return copy; } public void setCopy(String copy) { this.copy = copy; } public String getDeserialize() { return deserialize; } public void setDeserialize(String deserialize) { this.deserialize = deserialize; } public String getMimetype() { return mimetype; } public void setMimetype(String mimetype) { this.mimetype = mimetype; } public String getSerialize() { return serialize; } public void setSerialize(String serialize) { this.serialize = serialize; } public String getValuetransferencoding() { return valuetransferencoding; } public void setValuetransferencoding(String valuetransferencoding) { this.valuetransferencoding = valuetransferencoding; } public String getValue() { return value; } public void setValue(String value) { this.value = value; } /** * Deserializes a data object from the given JSON object. * * @param json a {@link JSONObject} */ public static DataObject fromJson(JSONObject json) { DataObject dataObject = new DataObject(); if (json.has("objectID")) { dataObject.setObjectId(json.optString("objectID")); } if (json.has("objectName")) { dataObject.objectName = json.optString("objectName"); } if (json.has("parentURI")) { dataObject.parentUri = json.optString("parentURI"); } if (json.has("parentID")) { dataObject.parentId = json.optString("parentID"); } // default values dataObject.objectType = MediaTypes.DATA_OBJECT; dataObject.domainUri = json.optString("domainURI", "/cdmi_domains"); dataObject.capabilitiesUri = json.optString("capabilitiesURI", "/cdmi_capabilities/container"); dataObject.completionStatus = json.optString("completionStatus", "Processing"); dataObject.mimetype = json.optString("mimetype", "application/octet-stream"); dataObject.metadata = json.optJSONObject("metadata"); if (dataObject.metadata == null) { dataObject.metadata = new JSONObject(); } // optional values if (json.has("percentComplete")) { dataObject.percentComplete = json.optString("percentComplete"); } if (json.has("serialize")) { dataObject.serialize = json.optString("serialize"); } if (json.has("valuetransferencoding")) { dataObject.valuetransferencoding = json.optString("valuetransferencoding"); } if (json.has("value")) { dataObject.value = json.optString("value"); } if (json.has("deserializevalue")) { dataObject.deserializevalue = json.optString("deserializevalue"); } if (json.has("reference")) { dataObject.reference = json.optString("reference"); } if (json.has("move")) { dataObject.move = json.optString("move"); } if (json.has("copy")) { dataObject.copy = json.optString("copy"); } if (json.has("deserialize")) { dataObject.deserialize = json.optString("deserialize"); } return dataObject; } @Override public JSONObject toJson() { JSONObject json = super.toJson(); json.putOpt("objectType", objectType); json.putOpt("objectName", objectName); json.putOpt("parentURI", parentUri); json.putOpt("parentID", parentId); json.putOpt("domainURI", domainUri); json.putOpt("capabilitiesURI", capabilitiesUri); json.putOpt("completionStatus", completionStatus); json.putOpt("percentComplete", percentComplete); json.putOpt("metadata", metadata); json.putOpt("mimetype", mimetype); json.putOpt("serialize", serialize); json.putOpt("valuetransferencoding", valuetransferencoding); json.putOpt("value", value); json.putOpt("deserializevalue", deserializevalue); json.putOpt("reference", reference); json.putOpt("move", move); json.putOpt("copy", copy); json.putOpt("deserialize", deserialize); return json; } @Override public String toString() { return "DataObject [objectId=" + getObjectId() + ", " + (objectType != null ? "objectType=" + objectType + ", " : "") + (objectName != null ? "objectName=" + objectName + ", " : "") + (parentUri != null ? "parentUri=" + parentUri + ", " : "") + (parentId != null ? "parentId=" + parentId + ", " : "") + (domainUri != null ? "domainUri=" + domainUri + ", " : "") + (capabilitiesUri != null ? "capabilitiesUri=" + capabilitiesUri + ", " : "") + (completionStatus != null ? "completionStatus=" + completionStatus + ", " : "") + (percentComplete != null ? "percentComplete=" + percentComplete + ", " : "") + (metadata != null ? "metadata=" + metadata + ", " : "") + (mimetype != null ? "mimetype=" + mimetype + ", " : "") + (deserializevalue != null ? "deserializevalue=" + deserializevalue + ", " : "") + (reference != null ? "reference=" + reference + ", " : "") + (move != null ? "move=" + move + ", " : "") + (copy != null ? "copy=" + copy + ", " : "") + (deserialize != null ? "deserialize=" + deserialize + ", " : "") + (serialize != null ? "serialize=" + serialize + ", " : "") + (valuetransferencoding != null ? "valuetransferencoding=" + valuetransferencoding + ", " : "") + (value != null ? "value=" + value : "") + "]"; } }
/* * $Id$ * IzPack - Copyright 2001-2008 Julien Ponge, All Rights Reserved. * * http://izpack.org/ * http://izpack.codehaus.org/ * * Copyright 2002 Jan Blok * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.izforge.izpack.installer.gui; import static com.izforge.izpack.api.GuiId.BUTTON_HELP; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Component; import java.awt.Cursor; import java.awt.Dimension; import java.awt.Font; import java.awt.GraphicsEnvironment; import java.awt.GridLayout; import java.awt.Point; import java.awt.Window; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.FocusAdapter; import java.awt.event.KeyAdapter; import java.awt.event.MouseAdapter; import java.awt.event.MouseMotionAdapter; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import javax.swing.BorderFactory; import javax.swing.Box; import javax.swing.BoxLayout; import javax.swing.ImageIcon; import javax.swing.JButton; import javax.swing.JComponent; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JProgressBar; import javax.swing.JSeparator; import javax.swing.SwingUtilities; import javax.swing.WindowConstants; import javax.swing.border.TitledBorder; import javax.swing.text.JTextComponent; import com.izforge.izpack.api.data.Info; import com.izforge.izpack.api.data.LocaleDatabase; import com.izforge.izpack.api.data.Panel; import com.izforge.izpack.api.data.Variables; import com.izforge.izpack.api.event.ProgressListener; import com.izforge.izpack.api.exception.ResourceException; import com.izforge.izpack.api.exception.ResourceNotFoundException; import com.izforge.izpack.api.resource.Locales; import com.izforge.izpack.api.resource.Messages; import com.izforge.izpack.api.rules.RulesEngine; import com.izforge.izpack.core.resource.ResourceManager; import com.izforge.izpack.gui.ButtonFactory; import com.izforge.izpack.gui.EtchedLineBorder; import com.izforge.izpack.gui.IconsDatabase; import com.izforge.izpack.gui.log.Log; import com.izforge.izpack.installer.base.InstallerBase; import com.izforge.izpack.installer.data.GUIInstallData; import com.izforge.izpack.installer.data.UninstallData; import com.izforge.izpack.installer.data.UninstallDataWriter; import com.izforge.izpack.installer.debugger.Debugger; import com.izforge.izpack.installer.unpacker.IUnpacker; import com.izforge.izpack.util.Debug; import com.izforge.izpack.util.Housekeeper; /** * The IzPack installer frame. * * @author Julien Ponge created October 27, 2002 * @author Fabrice Mirabile added fix for alert window on cross button, July 06 2005 * @author Dennis Reil, added RulesEngine November 10 2006, several changes in January 2007 * @author Bill Root added per-panel quit confirmation control, Feb 2015 */ public class InstallerFrame extends JFrame implements InstallerBase, InstallerView { private static final long serialVersionUID = 3257852069162727473L; private static final transient Logger logger = Logger.getLogger(InstallerFrame.class.getName()); private static final String ICON_RESOURCE = "Installer.image"; /** * Name of the variable where to find an extension to the resource name of the icon resource */ private static final String ICON_RESOURCE_EXT_VARIABLE_NAME = "installerimage.ext"; /** * Heading icon resource name. */ private static final String HEADING_ICON_RESOURCE = "Heading.image"; /** * The installation data. */ private GUIInstallData installdata; /** * The icons database. */ private IconsDatabase icons; /** * The panels container. */ protected JPanel panelsContainer; /** * The frame content pane. */ protected JPanel contentPane; /** * The help button. */ protected JButton helpButton = null; /** * The panel navigator. */ private final DefaultNavigator navigator; /** * Registered GUICreationListener. */ protected ArrayList<GUIListener> guiListener; /** * Heading major text. */ protected JLabel[] headingLabels; /** * Panel which contains the heading text and/or icon */ protected JPanel headingPanel; /** * The heading counter component. */ protected JComponent headingCounterComponent; /** * Image */ private JLabel iconLabel; /** * Count for discarded interrupt trials. */ private int interruptCount = 1; /** * Maximum of discarded interrupt trials. */ private static final int MAX_INTERRUPT = 3; /** * conditions */ protected RulesEngine rules; private Debugger debugger; // If a heading image is defined should it be displayed on the left private boolean imageLeft = false; /** * The panels. */ private final IzPanels panels; /** * The resources. */ private ResourceManager resourceManager; /** * Manager for writing uninstall data */ private UninstallDataWriter uninstallDataWriter; /** * The variables. */ private Variables variables; private UninstallData uninstallData; /** * The unpacker. */ private IUnpacker unpacker; /** * The house keeper. */ private final Housekeeper housekeeper; /** * The log. */ private final Log log; /** * The supported locales that contains the localised messages. */ private Locales locales; /** * Constructs an <tt>InstallerFrame</tt>. * * @param title the window title * @param installData the installation data * @param rules the rules engine * @param icons the icons database * @param panels the panels * @param uninstallDataWriter the uninstallation data writer * @param resourceManager the resources * @param uninstallData the uninstallation data * @param housekeeper the house-keeper * @param navigator the panel navigator * @param log the log */ public InstallerFrame(String title, GUIInstallData installData, RulesEngine rules, IconsDatabase icons, IzPanels panels, UninstallDataWriter uninstallDataWriter, ResourceManager resourceManager, UninstallData uninstallData, Housekeeper housekeeper, DefaultNavigator navigator, Log log, Locales locales) { super(title); guiListener = new ArrayList<GUIListener>(); this.installdata = installData; this.rules = rules; this.resourceManager = resourceManager; this.uninstallDataWriter = uninstallDataWriter; this.uninstallData = uninstallData; this.panels = panels; this.variables = installData.getVariables(); this.housekeeper = housekeeper; this.log = log; this.locales = locales; this.setIcons(icons); this.navigator = navigator; navigator.setInstallerFrame(this); // Sets the window events handler addWindowListener(new WindowHandler(navigator)); } /** * Sets the unpacker. * * @param unpacker the unpacker */ public void setUnpacker(IUnpacker unpacker) { this.unpacker = unpacker; } @Override public void sizeFrame() { pack(); setSize(installdata.guiPrefs.width, installdata.guiPrefs.height); setPreferredSize(new Dimension(installdata.guiPrefs.width, installdata.guiPrefs.height)); setResizable(installdata.guiPrefs.resizable); centerFrame(this); } public Debugger getDebugger() { return this.debugger; } /** * Builds the GUI. */ public void buildGUI() { this.setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE); ImageIcon jframeIcon = getIcons().get("JFrameIcon"); setIconImage(jframeIcon.getImage()); // Prepares the glass pane to block the gui interaction when needed JPanel glassPane = (JPanel) getGlassPane(); glassPane.addMouseListener(new MouseAdapter() { }); glassPane.addMouseMotionListener(new MouseMotionAdapter() { }); glassPane.addKeyListener(new KeyAdapter() { }); glassPane.addFocusListener(new FocusAdapter() { }); // We set the layout & prepare the constraint object contentPane = (JPanel) getContentPane(); contentPane.setLayout(new BorderLayout()); // layout); // We add the panels container panelsContainer = new JPanel(); panelsContainer.setBorder(BorderFactory.createEmptyBorder(10, 10, 0, 10)); panelsContainer.setLayout(new GridLayout(1, 1)); contentPane.add(panelsContainer, BorderLayout.CENTER); logger.fine("Building GUI. The panel list to display is " + installdata.getPanels()); Messages messages = locales.getMessages(); navigator.updateButtonText(messages); JPanel navPanel = new JPanel(); navPanel.setLayout(new BoxLayout(navPanel, BoxLayout.X_AXIS)); TitledBorder border = BorderFactory.createTitledBorder( new EtchedLineBorder(), messages.get("installer.madewith") + " ", TitledBorder.DEFAULT_JUSTIFICATION, TitledBorder.DEFAULT_POSITION, new Font("Dialog", Font.PLAIN, 10)); navPanel.setBorder(BorderFactory.createCompoundBorder(BorderFactory.createEmptyBorder(8, 8, 8, 8), border)); // Add help Button to the navigation panel this.helpButton = ButtonFactory.createButton(messages.get("installer.help"), getIcons() .get("help"), installdata.buttonsHColor); navPanel.add(this.helpButton); this.helpButton.setName(BUTTON_HELP.id); this.helpButton.addActionListener(new HelpHandler()); // update navigation panel and help button mnemonic shortcuts for selected language. ButtonFactory.clearAllMnemonics(); ButtonFactory.reserveButtonMnemonics(new JButton[] {helpButton}); navigator.reserveNavigatorButtonMnemonics(); navPanel.add(Box.createHorizontalGlue()); navPanel.add(navigator.getPrevious()); navPanel.add(Box.createRigidArea(new Dimension(5, 0))); navPanel.add(navigator.getNext()); navPanel.add(Box.createRigidArea(new Dimension(5, 0))); navPanel.add(navigator.getQuit()); contentPane.add(navPanel, BorderLayout.SOUTH); // always initialize debugger debugger = new Debugger(installdata, getIcons(), rules); // this needed to fully initialize the debugger. JPanel debugpanel = debugger.getDebugPanel(); // create a debug panel if TRACE is enabled if (Debug.isTRACE()) { if (installdata.guiPrefs.modifier.containsKey("showDebugWindow") && Boolean.valueOf(installdata.guiPrefs.modifier.get("showDebugWindow"))) { JFrame debugframe = new JFrame("Debug information"); debugframe.setContentPane(debugpanel); debugframe.setSize(new Dimension(400, 400)); debugframe.setVisible(true); } else { debugpanel.setPreferredSize(new Dimension(200, 400)); contentPane.add(debugpanel, BorderLayout.EAST); } } ImageIcon icon = loadIcon(ICON_RESOURCE, 0 + ""); if (icon != null) { JPanel imgPanel = new JPanel(); imgPanel.setLayout(new BorderLayout()); imgPanel.setBorder(BorderFactory.createEmptyBorder(10, 10, 0, 0)); iconLabel = new JLabel(icon); iconLabel.setBorder(BorderFactory.createLoweredBevelBorder()); imgPanel.add(iconLabel, BorderLayout.NORTH); contentPane.add(imgPanel, BorderLayout.WEST); loadAndShowImageForPanelNum(iconLabel, 0); } getRootPane().setDefaultButton(navigator.setDefaultButton()); callGUIListener(GUIListener.GUI_BUILDED, navPanel); createHeading(navPanel); // need to initialise the panels after construction, as many of the panels require InstallerFrame panels.initialise(); panels.setListener(new IzPanelsListener() { @Override public void switchPanel(IzPanelView newPanel, IzPanelView oldPanel) { InstallerFrame.this.switchPanel(newPanel, oldPanel); } }); } /** * Returns the panel navigator. * * @return the panel navigator */ public Navigator getNavigator() { return navigator; } private void callGUIListener(int what) { callGUIListener(what, null); } private void callGUIListener(int what, JPanel param) { for (GUIListener aGuiListener : guiListener) { aGuiListener.guiActionPerformed(what, param); } } /** * Loads icon for given panel id. * * @param resPrefix resource prefix * @param panelid panel id * @return image icon, or {@code null} if no icon exists * @throws ResourceException if the resource exists but cannot be retrieved */ private ImageIcon loadIcon(String resPrefix, String panelid) { ImageIcon icon = null; String ext = getIconResourceNameExtension(); try { icon = resourceManager.getImageIcon(resPrefix, resPrefix + "." + panelid + ext); } catch (ResourceNotFoundException exception) { logger.fine("No icon for panel=" + panelid + ": " + exception.getMessage()); } return icon; } /** * Returns the current set extension to icon resource names. Can be used to change the static * installer image based on user input * * @return a resource extension or an empty string if the variable was not set. */ private String getIconResourceNameExtension() { try { String iconext = installdata.getVariable(ICON_RESOURCE_EXT_VARIABLE_NAME); if (iconext == null) { iconext = ""; } else { if ((iconext.length() > 0) && (iconext.charAt(0) != '.')) { iconext = "." + iconext; } } iconext = iconext.trim(); return iconext; } catch (Exception e) { // in case of error, return an empty string return ""; } } private void loadAndShowImageForPanelNum(JLabel jLabel, int panelNo) { loadAndShowImage(jLabel, ICON_RESOURCE, panelNo); } private void loadAndShowImageForPanelOrId(JLabel jLabel, int panelNo, String panelId) { loadAndShowImage(jLabel, ICON_RESOURCE, panelNo, panelId); } private void loadAndShowImage(JLabel jLabel, String resPrefix, int panelNo, String panelId) { ImageIcon icon = loadIcon(resPrefix, panelId); if (icon == null) { icon = loadIcon(resPrefix, panelNo + ""); } jLabel.setVisible(false); jLabel.setIcon(icon); jLabel.setVisible(true); } private void loadAndShowImage(JLabel jLabel, String resPrefix, int panelNo) { ImageIcon icon = loadIcon(resPrefix, panelNo + ""); if (icon == null) { icon = loadIcon(resPrefix, panelNo + ""); } if (icon != null) { jLabel.setVisible(false); jLabel.setIcon(icon); jLabel.setVisible(true); } } /** * Switches the current panel. * * @param newPanel the new panel * @param oldPanel the old panel. May be {@code null} */ protected void switchPanel(IzPanelView newPanel, IzPanelView oldPanel) { int oldIndex = (oldPanel != null) ? oldPanel.getIndex() : -1; logger.fine("Switching panel, old index is " + oldIndex); try { panelsContainer.setVisible(false); IzPanel newView = newPanel.getView(); showHelpButton(newView.canShowHelp()); if (Debug.isTRACE()) { Panel panel = (oldPanel != null) ? oldPanel.getPanel() : null; debugger.switchPanel(newPanel.getPanel(), panel); } String oldPanelClass = (oldPanel != null) ? oldPanel.getClass().getName() : null; log.addDebugMessage( "InstallerFrame.switchPanel: try switching newPanel from {0} to {1} ({2} to {3})", new String[]{oldPanelClass, newPanel.getClass().getName(), Integer.toString(oldIndex), Integer.toString(newPanel.getIndex())}, Log.PANEL_TRACE, null); // instead of writing data here which leads to duplicated entries in // auto-installation script (bug # 4551), let's make data only immediately before // writing out that script. // oldPanel.makeXMLData(installdata.xmlData.getChildAtIndex(oldIndex)); // No previous button in the first visible newPanel // Change panels container to the current one. if (oldPanel != null) { IzPanel oldView = oldPanel.getView(); panelsContainer.remove(oldView); oldView.panelDeactivate(); } panelsContainer.add(newView); installdata.setCurPanelNumber(newPanel.getIndex()); if (newView.getInitialFocus() != null) { // Initial focus hint should be performed after current newPanel // was added to the panels container, else the focus hint will // be ignored. // Give a hint for the initial focus to the system. final Component inFoc = newView.getInitialFocus(); // On java VM version >= 1.5 it works only if // invoke later will be used. SwingUtilities.invokeLater(new Runnable() { @Override public void run() { inFoc.requestFocusInWindow(); } }); /* * On editable text components position the caret to the end of the cust existent * text. */ if (inFoc instanceof JTextComponent) { JTextComponent inText = (JTextComponent) inFoc; if (inText.isEditable() && inText.getDocument() != null) { inText.setCaretPosition(inText.getDocument().getLength()); } } } performHeading(newPanel); performHeadingCounter(newPanel); newPanel.executePreActivationActions(); Panel panel = newPanel.getPanel(); String readonlyCondition = panel.getReadonlyCondition(); String displayHiddenCondition = panel.getDisplayHiddenCondition(); newView.panelActivate(); panelsContainer.setVisible(true); if (iconLabel != null) { if (!"UNKNOWN".equals(newPanel.getPanelId())) { loadAndShowImageForPanelOrId(iconLabel, panels.getVisibleIndex(newPanel), newPanel.getPanelId()); } else { loadAndShowImageForPanelNum(iconLabel, panels.getVisibleIndex(newPanel)); } } callGUIListener(GUIListener.PANEL_SWITCHED); log.addDebugMessage("InstallerFrame.switchPanel: switched", null, Log.PANEL_TRACE, null); } catch (Exception e) { e.printStackTrace(); logger.log(Level.SEVERE, "Error when switching panel", e); } } /** * Centers a window on screen. * * @param frame The window tp center. */ public void centerFrame(Window frame) { Point center = GraphicsEnvironment.getLocalGraphicsEnvironment().getCenterPoint(); Dimension frameSize = frame.getSize(); frame.setLocation(center.x - frameSize.width / 2, center.y - frameSize.height / 2 - 10); } /** * Returns the panels container size. * * @return The panels container size. */ public Dimension getPanelsContainerSize() { return panelsContainer.getSize(); } /** * Exits the installer, if quit is enabled. * <p/> * If installation is complete, this writes any uninstallation data, and shuts down. * If installation is incomplete, a confirmation dialog will be displayed. */ public void exit() { navigator.quit(); } /** * Quits the installer. * <p/> * If installation is complete, this writes any uninstallation data, and shuts down. * If installation is incomplete, a confirmation dialog will be displayed. */ void quit() { // FIXME !!! Reboot handling boolean confirmQuit; Panel panel = panels.getPanel(); if (panel.getConfirmQuitType() == Panel.ConfirmQuitType.DYNAMIC) confirmQuit = !(installdata.isCanClose() || (!navigator.isNextEnabled() && !navigator.isPreviousEnabled())); else confirmQuit = (panel.getConfirmQuitType() == Panel.ConfirmQuitType.CONFIRM); if (!confirmQuit) { if (!writeUninstallData()) { // TODO - for now just shut down. Alternative approaches include: // . retry // . revert installation - which is what wipeAborted attempts to do, but fails to handle shortcuts and // registry changes } shutdown(); } else { // The installation is not over confirmExit(); } } /** * Wipes the written files when you abort the installation. */ protected void wipeAborted() { // We set interrupt to all running Unpacker and wait 40 sec for maximum. // If interrupt is discarded (return value false), return immediately: if (!unpacker.interrupt(40000)) { return; } // Wipe the files that had been installed for (String installedFile : uninstallData.getInstalledFilesList()) { File file = new File(installedFile); file.delete(); } } /** * Launches the installation. * * @param listener The installation listener. */ public void install(ProgressListener listener) { unpacker.setProgressListener(listener); Thread unpackerthread = new Thread(unpacker, "IzPack - Unpacker thread"); unpackerthread.start(); } /** * Writes the installation record to a file. * * @param out The file to write to. * @throws Exception Description of the Exception */ @Override public void writeInstallationRecord(File file, UninstallData uninstallData) throws Exception { panels.writeInstallationRecord(file, uninstallData); } /** * Changes the quit button text. If <tt>text</tt> is null, the default quit text is used. * * @param text text to be used for changes */ public void setQuitButtonText(String text) { if (text == null) { Messages messages = locales.getMessages(); text = messages.get("installer.quit"); } navigator.setQuitText(text); } /** * Sets a new icon into the quit button if icons should be used, else nothing will be done. * * @param iconName name of the icon to be used */ public void setQuitButtonIcon(String iconName) { String useButtonIcons = installdata.guiPrefs.modifier.get("useButtonIcons"); if (useButtonIcons == null || "yes".equalsIgnoreCase(useButtonIcons)) { navigator.getQuit().setIcon(getIcons().get(iconName)); } } /** * FocusTraversalPolicy objects to handle keybord blocking; the declaration os Object allows to * use a pre version 1.4 VM. */ private Object usualFTP = null; private Object blockFTP = null; /** * Blocks GUI interaction. */ public void blockGUI() { setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); getGlassPane().setVisible(true); getGlassPane().setEnabled(true); if (usualFTP == null) { usualFTP = getFocusTraversalPolicy(); } if (blockFTP == null) { blockFTP = new BlockFocusTraversalPolicy(); } setFocusTraversalPolicy((java.awt.FocusTraversalPolicy) blockFTP); getGlassPane().requestFocus(); callGUIListener(GUIListener.GUI_BLOCKED); } /** * Releases GUI interaction. */ public void releaseGUI() { getGlassPane().setEnabled(false); getGlassPane().setVisible(false); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); setFocusTraversalPolicy((java.awt.FocusTraversalPolicy) usualFTP); callGUIListener(GUIListener.GUI_RELEASED); } /** * Locks the 'previous' button. */ @Override public void lockPrevButton() { navigator.setPreviousEnabled(false); } /** * Locks the 'next' button. */ @Override public void lockNextButton() { navigator.setNextEnabled(false); } /** * Locks the 'Quit' button. */ @Override public void lockQuitButton() { navigator.setQuitEnabled(false); } /** * Unlocks the 'previous' button. */ @Override public void unlockPrevButton() { navigator.setPreviousEnabled(true); } /** * Unlocks the 'next' button. */ @Override public void unlockNextButton() { unlockNextButton(true); } /** * Unlocks the 'Quit' button. */ @Override public void unlockQuitButton() { navigator.setQuitEnabled(true); } /** * Unlocks the 'next' button. * * @param requestFocus if <code>true</code> focus goes to <code>nextButton</code> */ @Override public void unlockNextButton(boolean requestFocus) { navigator.setNextEnabled(true); if (requestFocus) { getRootPane().setDefaultButton(navigator.setDefaultButton()); navigator.getNext().requestFocusInWindow(); if (this.getFocusOwner() != null) { logger.fine("Current focus owner: " + this.getFocusOwner().getName()); } } } /** * Allows a panel to ask to be skipped. */ public void skipPanel() { if (panels.isBack()) { navigatePrevious(); } else { navigator.next(false); } } /** * This function moves to the next panel */ @Override public void navigateNext() { navigator.next(); } /** * Check to see if there is another panel that can be navigated to next. This checks the * successive panels to see if at least one can be shown based on the conditions associated with * the panels. * * @param startPanel The panel to check from * @param visibleOnly Only check the visible panels * @return The panel that we can navigate to next or -1 if there is no panel that we can * navigate next to */ public int hasNavigateNext(int startPanel, boolean visibleOnly) { return panels.getNext(startPanel, visibleOnly); } /** * Check to see if there is another panel that can be navigated to previous. This checks the * previous panels to see if at least one can be shown based on the conditions associated with * the panels. * * @param endingPanel The panel to check from * @return The panel that we can navigate to previous or -1 if there is no panel that we can * navigate previous to */ public int hasNavigatePrevious(int endingPanel, boolean visibleOnly) { return panels.getPrevious(endingPanel, visibleOnly); } /** * This function moves to the previous panel */ @Override public void navigatePrevious() { navigator.previous(); } /** * Show help Window */ @Override public void showHelp() { IzPanel izPanel = panels.getView(); izPanel.showHelp(); } /** * Returns the locale-specific messages. * * @return the messages */ public Messages getMessages() { Messages messages = locales.getMessages(); return messages; } /** * Returns the locale-specific messages. * * @return the messages * @deprecated use {@link #getMessages()} */ @Deprecated public LocaleDatabase getLangpack() { Messages messages = locales.getMessages(); return (LocaleDatabase) messages; } /** * Sets the locale specific messages. * * @param langpack the language pack * @deprecated no replacement */ @Deprecated public void setLangpack(LocaleDatabase langpack) { } public IconsDatabase getIcons() { return icons; } public void setIcons(IconsDatabase icons) { this.icons = icons; } class HelpHandler implements ActionListener { /**Button * Actions handler. * * @param e The event. */ @Override public void actionPerformed(ActionEvent e) { showHelp(); } } /** * A FocusTraversalPolicy that only allows the block panel to have the focus */ private class BlockFocusTraversalPolicy extends java.awt.DefaultFocusTraversalPolicy { private static final long serialVersionUID = 3258413928261169209L; /** * Only accepts the block panel * * @param aComp the component to check * @return true if aComp is the block panel */ @Override protected boolean accept(Component aComp) { return aComp == getGlassPane(); } } /** * Returns the gui creation listener list. * * @return the gui creation listener list */ public List<GUIListener> getGuiListener() { return guiListener; } /** * Add a listener to the listener list. * * @param listener to be added as gui creation listener */ public void addGuiListener(GUIListener listener) { guiListener.add(listener); } /** * Creates heading labels. * * @param headingLines the number of lines of heading labels * @param back background color (currently not used) */ private void createHeadingLabels(int headingLines, Color back) { // headingLabels are an array which contains the labels for header (0), // description lines and the icon (last). headingLabels = new JLabel[headingLines + 1]; headingLabels[0] = new JLabel(""); // First line ist the "main heading" which should be bold. headingLabels[0].setFont(headingLabels[0].getFont().deriveFont(Font.BOLD)); // Updated by Daniel Azarov, Exadel Inc. // start Color foreground; if (installdata.guiPrefs.modifier.containsKey("headingForegroundColor")) { foreground = Color.decode(installdata.guiPrefs.modifier.get("headingForegroundColor")); headingLabels[0].setForeground(foreground); } // end if (installdata.guiPrefs.modifier.containsKey("headingFontSize")) { float fontSize = Float.parseFloat(installdata.guiPrefs.modifier.get("headingFontSize")); if (fontSize > 0.0 && fontSize <= 5.0) { float currentSize = headingLabels[0].getFont().getSize2D(); headingLabels[0].setFont(headingLabels[0].getFont().deriveFont( currentSize * fontSize)); } } if (imageLeft) { headingLabels[0].setAlignmentX(Component.RIGHT_ALIGNMENT); } for (int i = 1; i < headingLines; ++i) { headingLabels[i] = new JLabel(); // Minor headings should be a little bit more to the right. if (imageLeft) { headingLabels[i].setAlignmentX(Component.RIGHT_ALIGNMENT); } else { headingLabels[i].setBorder(BorderFactory.createEmptyBorder(0, 30, 0, 8)); } } } /** * Creates heading panel counter. * * @param navPanel navi JPanel * @param leftHeadingPanel left heading JPanel */ private void createHeadingCounter(JPanel navPanel, JPanel leftHeadingPanel) { int i; String counterPos = "inHeading"; if (installdata.guiPrefs.modifier.containsKey("headingPanelCounterPos")) { counterPos = installdata.guiPrefs.modifier.get("headingPanelCounterPos"); } // Do not create counter if it should be in the heading, but no heading should be used. if (leftHeadingPanel == null && "inHeading".equalsIgnoreCase(counterPos)) { return; } if (installdata.guiPrefs.modifier.containsKey("headingPanelCounter")) { headingCounterComponent = null; if ("progressbar".equalsIgnoreCase(installdata.guiPrefs.modifier .get("headingPanelCounter"))) { JProgressBar headingProgressBar = new JProgressBar(); headingProgressBar.setStringPainted(true); headingProgressBar.setString(""); headingProgressBar.setValue(0); headingCounterComponent = headingProgressBar; if (imageLeft) { headingCounterComponent.setAlignmentX(Component.RIGHT_ALIGNMENT); } } else { if ("text".equalsIgnoreCase(installdata.guiPrefs.modifier .get("headingPanelCounter"))) { JLabel headingCountPanels = new JLabel(" "); headingCounterComponent = headingCountPanels; if (imageLeft) { headingCounterComponent.setAlignmentX(Component.RIGHT_ALIGNMENT); } else { headingCounterComponent.setBorder(BorderFactory.createEmptyBorder(0, 30, 0, 0)); } // Updated by Daniel Azarov, Exadel Inc. // start Color foreground; if (installdata.guiPrefs.modifier.containsKey("headingForegroundColor")) { foreground = Color.decode(installdata.guiPrefs.modifier .get("headingForegroundColor")); headingCountPanels.setForeground(foreground); } // end } } if ("inHeading".equals(counterPos)) { leftHeadingPanel.add(headingCounterComponent); } else if ("inNavigationPanel".equals(counterPos)) { Component[] comps = navPanel.getComponents(); for (i = 0; i < comps.length; ++i) { if (comps[i].equals(navigator.getPrevious())) { break; } } if (i <= comps.length) { navPanel.add(Box.createHorizontalGlue(), i); navPanel.add(headingCounterComponent, i); } } } } /** * Creates heading icon. * * @param back the color of background around image. * @return a panel with heading image. */ private JPanel createHeadingIcon(Color back) { JPanel imgPanel = new JPanel(); imgPanel.setLayout(new BoxLayout(imgPanel, BoxLayout.Y_AXIS)); // Updated by Daniel Azarov, Exadel Inc. // start int borderSize = 8; if (installdata.guiPrefs.modifier.containsKey("headingImageBorderSize")) { borderSize = Integer.parseInt(installdata.guiPrefs.modifier .get("headingImageBorderSize")); } imgPanel.setBorder(BorderFactory.createEmptyBorder(borderSize, borderSize, borderSize, borderSize)); // end if (back != null) { imgPanel.setBackground(back); } ImageIcon icon = loadIcon(HEADING_ICON_RESOURCE, 0 + ""); if (icon != null) { JLabel iconLab = new JLabel(icon); if (imageLeft) { imgPanel.add(iconLab, BorderLayout.WEST); } else { imgPanel.add(iconLab, BorderLayout.EAST); } headingLabels[headingLabels.length - 1] = iconLab; } return (imgPanel); } /** * Creates a Heading in given Panel. * * @param navPanel a panel */ private void createHeading(JPanel navPanel) { headingPanel = null; int headingLines = 1; // The number of lines can be determined in the config xml file. // The first is the header, additonals are descriptions for the header. if (installdata.guiPrefs.modifier.containsKey("headingLineCount")) { headingLines = Integer.parseInt(installdata.guiPrefs.modifier.get("headingLineCount")); } Color back = null; // It is possible to determine the used background color of the heading panel. if (installdata.guiPrefs.modifier.containsKey("headingBackgroundColor")) { back = Color.decode(installdata.guiPrefs.modifier.get("headingBackgroundColor")); } // Try to create counter if no heading should be used. if (!isHeading(null)) { createHeadingCounter(navPanel, null); return; } // See if we should switch the header image to the left side if (installdata.guiPrefs.modifier.containsKey("headingImageOnLeft") && (installdata.guiPrefs.modifier.get("headingImageOnLeft").equalsIgnoreCase( "yes") || installdata.guiPrefs.modifier .get("headingImageOnLeft").equalsIgnoreCase("true"))) { imageLeft = true; } // We create the text labels and the needed panels. From inner to outer. // Labels createHeadingLabels(headingLines, back); // Panel which contains the labels JPanel leftHeadingPanel = new JPanel(); if (back != null) { leftHeadingPanel.setBackground(back); } leftHeadingPanel.setLayout(new BoxLayout(leftHeadingPanel, BoxLayout.Y_AXIS)); if (imageLeft) { leftHeadingPanel.setBorder(BorderFactory.createEmptyBorder(0, 0, 0, 8)); } for (int i = 0; i < headingLines; ++i) { leftHeadingPanel.add(headingLabels[i]); } // HeadingPanel counter: this is a label or a progress bar which can be placed // in the leftHeadingPanel or in the navigation bar. It is facultative. If // exist, it shows the current panel number and the amount of panels. createHeadingCounter(navPanel, leftHeadingPanel); // It is possible to place an icon on the right side of the heading panel. JPanel imgPanel = createHeadingIcon(back); // The panel for text and icon. JPanel northPanel = new JPanel(); if (back != null) { northPanel.setBackground(back); } northPanel.setLayout(new BoxLayout(northPanel, BoxLayout.X_AXIS)); if (imageLeft) { northPanel.setBorder(BorderFactory.createEmptyBorder(0, 0, 0, 0)); northPanel.add(imgPanel); northPanel.add(Box.createHorizontalGlue()); northPanel.add(leftHeadingPanel); } else { northPanel.setBorder(BorderFactory.createEmptyBorder(0, 12, 0, 0)); northPanel.add(leftHeadingPanel); northPanel.add(Box.createHorizontalGlue()); northPanel.add(imgPanel); } headingPanel = new JPanel(new BorderLayout()); headingPanel.add(northPanel); headingPanel.add(new JSeparator(), BorderLayout.SOUTH); // contentPane.add(northPanel, BorderLayout.NORTH); contentPane.add(headingPanel, BorderLayout.NORTH); } /** * Returns whether this installer frame uses with the given panel a separated heading panel or * not. Be aware, this is an other heading as given by the IzPanel which will be placed in the * IzPanel. This heading will be placed if the gui preferences contains an modifier with the key * "useHeadingPanel" and the value "yes" and there is a message with the key "&lt;class * name&gt;.headline". * * @param caller the IzPanel for which heading should be resolved * @return whether an heading panel will be used or not */ public boolean isHeading(IzPanel caller) { if (!installdata.guiPrefs.modifier.containsKey("useHeadingPanel") || !(installdata.guiPrefs.modifier.get("useHeadingPanel")).equalsIgnoreCase("yes")) { return (false); } if (caller == null) { return (true); } return (caller.getI18nStringForClass("headline") != null); } private void performHeading(IzPanelView panel) { int i; int headingLines = 1; if (installdata.guiPrefs.modifier.containsKey("headingLineCount")) { headingLines = Integer.parseInt(installdata.guiPrefs.modifier.get("headingLineCount")); } if (headingLabels == null) { return; } IzPanel view = panel.getView(); String headline = view.getI18nStringForClass("headline"); if (headline == null) { headingPanel.setVisible(false); return; } for (i = 0; i <= headingLines; ++i) { if (headingLabels[i] != null) { headingLabels[i].setVisible(false); } } String info; for (i = 0; i < headingLines - 1; ++i) { info = view.getI18nStringForClass("headinfo" + Integer.toString(i)); if (info == null) { info = " "; } if (info.endsWith(":")) { info = info.substring(0, info.length() - 1) + "."; } headingLabels[i + 1].setText(info); headingLabels[i + 1].setVisible(true); } // Do not forgett the first headline. headingLabels[0].setText(headline); headingLabels[0].setVisible(true); int curPanelNo = panels.getVisibleIndex(panel); if (headingLabels[headingLines] != null) { loadAndShowImage(headingLabels[headingLines], HEADING_ICON_RESOURCE, curPanelNo); headingLabels[headingLines].setVisible(true); } headingPanel.setVisible(true); } private void performHeadingCounter(IzPanelView panel) { if (headingCounterComponent != null) { int curPanelNo = panels.getVisibleIndex(panel); int visPanelsCount = panels.getVisible(); Messages messages = locales.getMessages(); String message = String.format( "%s %d %s %d", messages.get("installer.step"), curPanelNo + 1, messages.get("installer.of"), visPanelsCount ); if (headingCounterComponent instanceof JProgressBar) { updateProgressBar(visPanelsCount, curPanelNo + 1, message); } else { updateProgressCounter(message); } } } public void updateProgressCounter(String message) { ((JLabel) headingCounterComponent).setText(message); } public void updateProgressBar(int maximum, int value, String message) { JProgressBar counterComponent = (JProgressBar) headingCounterComponent; counterComponent.setMaximum(maximum); counterComponent.setValue(value); counterComponent.setString(message); } /** * Shows or hides Help button depending on <code>show</code> parameter * * @param show - flag to show or hide Help button */ private void showHelpButton(boolean show) { if (this.helpButton == null) { return; } this.helpButton.setVisible(show); } public void refreshDynamicVariables() { try { installdata.refreshVariables(); } catch (Exception e) { logger.log(Level.SEVERE, "Error when refreshing variable", e); logger.fine("Refreshing dynamic variables failed, asking user whether to proceed."); StringBuilder msg = new StringBuilder(); msg.append("<html>"); msg.append("The following error occured during refreshing panel contents:<br>"); msg.append("<i>").append(e.getMessage()).append("</i><br>"); msg.append("Are you sure you want to continue with this installation?"); msg.append("</html>"); JLabel label = new JLabel(msg.toString()); label.setFont(new Font("Sans Serif", Font.PLAIN, 12)); Object[] optionValues = {"Continue", "Exit"}; int selectedOption = JOptionPane.showOptionDialog(null, label, "Warning", JOptionPane.YES_NO_OPTION, JOptionPane.WARNING_MESSAGE, null, optionValues, optionValues[1]); logger.fine("Selected option: " + selectedOption); if (selectedOption == 0) { logger.fine("Continuing installation"); } else { // TODO - shouldn't do this. Should try and clean up the installation logger.fine("Exiting"); System.exit(1); } } } /** * Writes uninstall data if it is required. * <p/> * An error message will be displayed if the write fails. * * @return <tt>true</tt> if uninstall data was written successfully or is not required, otherwise <tt>false</tt> */ private boolean writeUninstallData() { boolean result = true; if (uninstallDataWriter.isUninstallRequired()) { result = uninstallDataWriter.write(); if (!result) { Messages messages = locales.getMessages(); String title = messages.get("installer.error"); String message = messages.get("installer.uninstall.writefailed"); JOptionPane.showMessageDialog(this, message, title, JOptionPane.ERROR_MESSAGE); } } return result; } /** * Shuts down the installer after successful installation. * <p/> * This may trigger a reboot. */ private void shutdown() { boolean reboot = false; if (installdata.isRebootNecessary()) { Messages messages = locales.getMessages(); String message; String title; System.out.println("[ There are file operations pending after reboot ]"); switch (installdata.getInfo().getRebootAction()) { case Info.REBOOT_ACTION_ALWAYS: reboot = true; break; case Info.REBOOT_ACTION_ASK: message = variables.replace(messages.get("installer.reboot.ask.message")); title = variables.replace(messages.get("installer.reboot.ask.title")); int res = JOptionPane .showConfirmDialog(this, message, title, JOptionPane.YES_NO_OPTION); if (res == JOptionPane.YES_OPTION) { reboot = true; } break; case Info.REBOOT_ACTION_NOTICE: message = variables.replace(messages.get("installer.reboot.notice.message")); title = variables.replace(messages.get("installer.reboot.notice.title")); JOptionPane.showConfirmDialog(this, message, title, JOptionPane.OK_OPTION); break; } if (reboot) { System.out.println("[ Rebooting now automatically ]"); } } housekeeper.shutDown(0, reboot); } /** * Confirms exit when installation is not complete. */ private void confirmExit() { if (unpacker.isInterruptDisabled() && interruptCount < MAX_INTERRUPT) { // But we should not interrupt. interruptCount++; return; } Messages messages = locales.getMessages(); // Use a alternate message and title if defined. final String mkey = "installer.quit.reversemessage"; final String tkey = "installer.quit.reversetitle"; String message = messages.get(mkey); String title = messages.get(tkey); // message equal to key -> no alternate message defined. if (message.contains(mkey)) { message = messages.get("installer.quit.message"); } // title equal to key -> no alternate title defined. if (title.contains(tkey)) { title = messages.get("installer.quit.title"); } // Now replace variables in message or title. message = variables.replace(message); title = variables.replace(title); int res = JOptionPane.showConfirmDialog(this, message, title, JOptionPane.YES_NO_OPTION); if (res == JOptionPane.YES_OPTION) { wipeAborted(); housekeeper.shutDown(0); } } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.activiti.engine.impl; import java.io.Serializable; import java.util.List; import java.util.Set; import org.activiti.engine.ActivitiIllegalArgumentException; import org.activiti.engine.impl.interceptor.CommandContext; import org.activiti.engine.impl.interceptor.CommandExecutor; import org.activiti.engine.impl.persistence.entity.SuspensionState; import org.activiti.engine.runtime.ProcessInstance; import org.activiti.engine.runtime.ProcessInstanceQuery; /** * @author Tom Baeyens * @author Joram Barrez * @author Frederik Heremans * @author Falko Menge * @author Daniel Meyer */ public class ProcessInstanceQueryImpl extends AbstractVariableQueryImpl<ProcessInstanceQuery, ProcessInstance> implements ProcessInstanceQuery, Serializable { private static final long serialVersionUID = 1L; protected String executionId; protected String businessKey; protected boolean includeChildExecutionsWithBusinessKeyQuery; protected String processDefinitionId; protected Set<String> processInstanceIds; protected String processDefinitionKey; protected String superProcessInstanceId; protected String subProcessInstanceId; protected boolean excludeSubprocesses; protected String involvedUser; protected SuspensionState suspensionState; protected boolean includeProcessVariables; // Unused, see dynamic query protected String activityId; protected List<EventSubscriptionQueryValue> eventSubscriptions; public ProcessInstanceQueryImpl() { } public ProcessInstanceQueryImpl(CommandContext commandContext) { super(commandContext); } public ProcessInstanceQueryImpl(CommandExecutor commandExecutor) { super(commandExecutor); } public ProcessInstanceQueryImpl processInstanceId(String processInstanceId) { if (processInstanceId == null) { throw new ActivitiIllegalArgumentException("Process instance id is null"); } this.executionId = processInstanceId; return this; } public ProcessInstanceQuery processInstanceIds(Set<String> processInstanceIds) { if (processInstanceIds == null) { throw new ActivitiIllegalArgumentException("Set of process instance ids is null"); } if (processInstanceIds.isEmpty()) { throw new ActivitiIllegalArgumentException("Set of process instance ids is empty"); } this.processInstanceIds = processInstanceIds; return this; } public ProcessInstanceQuery processInstanceBusinessKey(String businessKey) { if (businessKey == null) { throw new ActivitiIllegalArgumentException("Business key is null"); } this.businessKey = businessKey; return this; } public ProcessInstanceQuery processInstanceBusinessKey(String businessKey, String processDefinitionKey) { if (businessKey == null) { throw new ActivitiIllegalArgumentException("Business key is null"); } this.businessKey = businessKey; this.processDefinitionKey = processDefinitionKey; return this; } public ProcessInstanceQueryImpl processDefinitionId(String processDefinitionId) { if (processDefinitionId == null) { throw new ActivitiIllegalArgumentException("Process definition id is null"); } this.processDefinitionId = processDefinitionId; return this; } public ProcessInstanceQueryImpl processDefinitionKey(String processDefinitionKey) { if (processDefinitionKey == null) { throw new ActivitiIllegalArgumentException("Process definition key is null"); } this.processDefinitionKey = processDefinitionKey; return this; } public ProcessInstanceQuery superProcessInstanceId(String superProcessInstanceId) { this.superProcessInstanceId = superProcessInstanceId; return this; } public ProcessInstanceQuery subProcessInstanceId(String subProcessInstanceId) { this.subProcessInstanceId = subProcessInstanceId; return this; } public ProcessInstanceQuery excludeSubprocesses(boolean excludeSubprocesses) { this.excludeSubprocesses = excludeSubprocesses; return this; } public ProcessInstanceQuery involvedUser(String involvedUser) { if (involvedUser == null) { throw new ActivitiIllegalArgumentException("Involved user is null"); } this.involvedUser = involvedUser; return this; } public ProcessInstanceQuery orderByProcessInstanceId() { this.orderProperty = ProcessInstanceQueryProperty.PROCESS_INSTANCE_ID; return this; } public ProcessInstanceQuery orderByProcessDefinitionId() { this.orderProperty = ProcessInstanceQueryProperty.PROCESS_DEFINITION_ID; return this; } public ProcessInstanceQuery orderByProcessDefinitionKey() { this.orderProperty = ProcessInstanceQueryProperty.PROCESS_DEFINITION_KEY; return this; } public ProcessInstanceQuery active() { this.suspensionState = SuspensionState.ACTIVE; return this; } public ProcessInstanceQuery suspended() { this.suspensionState = SuspensionState.SUSPENDED; return this; } public ProcessInstanceQuery includeProcessVariables() { this.includeProcessVariables = true; return this; } public String getMssqlOrDB2OrderBy() { String specialOrderBy = super.getOrderBy(); if (specialOrderBy != null && specialOrderBy.length() > 0) { specialOrderBy = specialOrderBy.replace("RES.", "TEMPRES_"); } return specialOrderBy; } //results ///////////////////////////////////////////////////////////////// public long executeCount(CommandContext commandContext) { checkQueryOk(); ensureVariablesInitialized(); return commandContext .getExecutionEntityManager() .findProcessInstanceCountByQueryCriteria(this); } public List<ProcessInstance> executeList(CommandContext commandContext, Page page) { checkQueryOk(); ensureVariablesInitialized(); if (includeProcessVariables) { return commandContext .getExecutionEntityManager() .findProcessInstanceAndVariablesByQueryCriteria(this, page); } else { return commandContext .getExecutionEntityManager() .findProcessInstanceByQueryCriteria(this, page); } } //getters ///////////////////////////////////////////////////////////////// public boolean getOnlyProcessInstances() { return true; // See dynamic query in runtime.mapping.xml } public String getProcessInstanceId() { return executionId; } public Set<String> getProcessInstanceIds() { return processInstanceIds; } public String getBusinessKey() { return businessKey; } public boolean isIncludeChildExecutionsWithBusinessKeyQuery() { return includeChildExecutionsWithBusinessKeyQuery; } public String getProcessDefinitionId() { return processDefinitionId; } public String getProcessDefinitionKey() { return processDefinitionKey; } public String getActivityId() { return null; // Unused, see dynamic query } public String getSuperProcessInstanceId() { return superProcessInstanceId; } public String getSubProcessInstanceId() { return subProcessInstanceId; } public boolean isExcludeSubprocesses() { return excludeSubprocesses; } public String getInvolvedUser() { return involvedUser; } public SuspensionState getSuspensionState() { return suspensionState; } public void setSuspensionState(SuspensionState suspensionState) { this.suspensionState = suspensionState; } public List<EventSubscriptionQueryValue> getEventSubscriptions() { return eventSubscriptions; } public void setEventSubscriptions(List<EventSubscriptionQueryValue> eventSubscriptions) { this.eventSubscriptions = eventSubscriptions; } /** * Method needed for ibatis because of re-use of query-xml for executions. ExecutionQuery contains * a parentId property. */ public String getParentId() { return null; } }
package controller; import java.io.IOException; import java.util.ArrayList; import java.util.List; import javax.servlet.ServletException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import model.dao.InspectionDAO; import model.dao.InspectionweekDAO; import model.dao.InspectorDAO; import model.dao.StudentDAO; import model.entity.Inspection; import model.entity.Inspectionweek; import model.entity.Inspector; import model.entity.Module; import model.entity.Student; import model.entity.User; /** * Servlet implementation class StudentsServlet */ @WebServlet("/StudentsServlet") public class StudentsServlet extends BootstrapServlet { private static final long serialVersionUID = 1L; /** * @see HttpServlet#HttpServlet() */ public StudentsServlet() { super(); this.addJavascriptFile("students.js"); this.layoutType = LayoutType.Grid; } public Student getStudentBySlug(String studentSlug) { StudentDAO studentDAO = new StudentDAO(); Student student = studentDAO.findByUsername(studentSlug); return student; } /** * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response) */ protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { this.doView(request, response); } private void doView(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String studentSlug = this.getObjectSlug(request); Student student = this.getStudentBySlug(studentSlug); if (student != null) { this.proceedSingleStudent(student, request, response); } else if (studentSlug != null) { this.setAlertView(AlertType.AlertTypeDanger, "Student not found", request); this.proceedSingleStudentError(request, response); } else { this.proceedStudentList(request, response); } } protected void proceedStudentList(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { Module module = this.getSelectedModule(request); this.setBreadcrumbTitles("Modules%"+ module.getModule_name() +"%Students", request); this.setBreadcrumbLinks("/PIMS/modules/%/PIMS/modules/"+ module.getModule_id() +"/", request); this.relatedMenuClass = "students"; this.proceedGet("/Students.jsp", request, response); } protected void proceedSingleStudent(Student student, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { this.relatedMenuClass = "students student-profile"; request.setAttribute("student", student); InspectorDAO inspectorDAO = new InspectorDAO(); List<Inspector> inspectors = inspectorDAO.findAll(); request.setAttribute("inspectors", inspectors); Module module = this.getSelectedModule(request); this.setBreadcrumbTitles("Modules%"+ module.getModule_name() +"%Students%"+ student.getUsername(), request); this.setBreadcrumbLinks("/PIMS/modules/%/PIMS/modules/"+ module.getModule_id() +"/%/PIMS/students/"+ module.getModule_id() +"/", request); // Inspections InspectionweekDAO inspectionWeekDAO = new InspectionweekDAO(); List<Inspectionweek> inspectionWeeks = inspectionWeekDAO.findByModuleID(student.getModule_id()); request.setAttribute("inspectionWeeks", inspectionWeeks); request.setAttribute("servlet", this); this.proceedGet("/Student.jsp", request, response); } protected void proceedSingleStudentError(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { Module module = this.getSelectedModule(request); this.setBreadcrumbTitles("Modules%"+ module.getModule_name() +"%Students%Error", request); this.setBreadcrumbLinks("/PIMS/modules/%/PIMS/modules/"+ module.getModule_id() +"/%/PIMS/students/"+ module.getModule_id() +"/", request); this.proceedGet("/Student.jsp", request, response); } /** * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response) */ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String firstName = request.getParameter("inputFirstName"); String lastName = request.getParameter("inputLastName"); String email = request.getParameter("inputEmail"); String projectTitle = request.getParameter("inputTitle"); String projectDescription = request.getParameter("inputDescription"); String supervisorSlug = request.getParameter("inputSupervisor"); String password = request.getParameter("inputPassword"); String error = null; if (firstName == null || firstName.equals("")) { error = "Invalid first name"; } else if (lastName == null || lastName.equals("")) { error = "Invalid last name"; } else if (email == null || email.equals("")) { error = "Invalid email"; } else if (password == null || password.length() < 5) { error = "Invalid password"; } else if (projectTitle == null || projectTitle.equals("")) { error = "Invalid project title"; } else if (projectDescription == null || projectDescription.equals("")) { error = "Invalid project description"; } else if (supervisorSlug == null || supervisorSlug.equals("")) { error = "Invalid supervisor"; } if (error == null) { StudentDAO studentDAO = new StudentDAO(); String studentSlug = this.getObjectSlug(request); Student student = this.getStudentBySlug(studentSlug); HttpSession session = request.getSession(); User user = (User) session.getAttribute("user"); boolean success = false; if (user.isCoordinator()) { // PC can change everything student.setFirst_name(firstName); student.setLast_name(lastName); student.setEmail(email); student.setProject_title(projectTitle); student.setProject_description(projectDescription); student.setPassword(password); student.setSupervisor(supervisorSlug); success = studentDAO.update(student); if (success) { this.setAlertView(AlertType.AlertTypeSuccess, "Student saved successfuly", request); } else { this.setAlertView(AlertType.AlertTypeDanger, "Student not saved, unexpected error occurred", request); } } else if (user.isStudent() && user.getUsername().equals(student.getUsername())) { // Student can only change his own project and email student.setEmail(email); student.setProject_title(projectTitle); student.setProject_description(projectDescription); student.setPassword(password); success = studentDAO.update(student); if (success) { this.setAlertView(AlertType.AlertTypeSuccess, "Student saved successfuly", request); } else { this.setAlertView(AlertType.AlertTypeDanger, "Student not saved, unexpected error occurred", request); } } else { success = false; this.setAlertView(AlertType.AlertTypeDanger, "Access denied", request); } } else { this.setAlertView(AlertType.AlertTypeDanger, error, request); } this.doView(request, response); } public Inspection inspectionForInspectionWeek(Student student, Inspectionweek inspectionWeek) { InspectionDAO inspectionDAO = new InspectionDAO(); Inspection inspection = inspectionDAO.findByStudentAndInspectionWeek(student.getStudent_id(), inspectionWeek.getInspectionweek_id()); return inspection; } @Override public Boolean shouldDenyAcces(HttpServletRequest request) { if (super.shouldDenyAcces(request)) { return true; } HttpSession session = request.getSession(); User user = (User) session.getAttribute("user"); if (user.isCoordinator()) { return false; // coordinator can edit students } else { String userSlug = this.getObjectSlug(request); return !user.getUsername().equals(userSlug); // students can edit their own } } }
package com.github.cukedoctor.util; import java.nio.file.Files; import java.nio.file.Paths; import static com.github.cukedoctor.util.Constants.Attributes.Name.*; import static java.lang.System.getProperty; /** * Created by pestano on 04/06/15. */ public abstract class Constants { public static final String SKIP_DOCS = "@skipDocs"; public static final String BASE_DIR = Files.exists(Paths.get("target")) ? Paths.get("target").toString() : Files.exists(Paths.get("bin")) ? Paths.get("bin").toString() : Paths.get("").toString(); public static final String STOP_WATCH = "cukedoctor.stopwatch"; public static final String DISCRETE = "[discrete]"; public static final String DOCUMENT_TITLE = getProperty("DOCUMENT_TITLE") == null ? "Documentation" : getProperty("DOCUMENT_TITLE"); public static String newLine() { return System.getProperty("line.separator"); } public static String home() { String homeDir = Thread.currentThread().getContextClassLoader().getResource("").getPath(); if (!homeDir.endsWith("/")) { homeDir += "/"; } if (isWindows() && homeDir.startsWith("/")) { homeDir = homeDir.substring(1); } return homeDir; } private static boolean isWindows() { return System.getProperty("os.name").toLowerCase().startsWith("windows"); } public abstract static class Markup { public static String bold(String value) { return "*" + value + "*"; } public static String style(String style, String value) { return "[" + style + "]#" + value + "#"; } public static String H1(String value) { return "= " + value; } public static String H2(String value) { return "== " + value; } public static String H3(String value) { return "=== " + value; } public static String H4(String value) { return "==== " + value; } public static String table() { return "|==="; } public static String tableCol() { return "|"; } public static String listing() { return "----"; } public static String exampleBlock() { return "====="; } } public abstract static class Attributes { public enum Name { TOC("toc"), TOCLEVELS("toclevels"), ICONS("icons"), NUMBERED("numbered"), HARDBREAKS("hardbreaks"), LINKCSS("linkcss"), SECTANCHORS("sectanchors"), SECTLINK("sectlink"), BACKEND("backend"), REVNUMBER("revnumber"), DOCTITLE("doctitle"), DOCTYPE("doctype"), DOCINFO("docinfo"), SOURCEHIGHLIGHTER("source-highlighter"), PDFSTYLE("pdf-style"), VERSIONLABEL("version-label"), CHAPTERLABEL("chapter-label"), STEM("stem"), ALLOWURIREAD("allow-uri-read"), DATAURI("data-uri"); final String name; Name(String name) { this.name = name; } public String getName() { return name; } } private static String toAdocAttr(String attrName, Object attrValue) { if (attrValue == null || "".equals(attrValue.toString().trim())) { return ""; } StringBuilder adocAttr = new StringBuilder(); adocAttr.append(":"); if (Boolean.class.isAssignableFrom(attrValue.getClass())) { boolean bool = Boolean.parseBoolean(attrValue.toString()); if (!bool) { adocAttr.append("!"); } adocAttr.append(attrName).append(":"); } else { adocAttr.append(attrName).append(":").append(" ").append(attrValue.toString()); } return adocAttr.toString(); } public static String toc(String value) { return toAdocAttr(TOC.name, value); } public static String tocLevels(String value) { return toAdocAttr(TOCLEVELS.name, value); } public static String icons(String value) { return toAdocAttr(ICONS.name, value); } public static String numbered(boolean numbered) { return toAdocAttr(NUMBERED.name, numbered); } public static String hardBreaks(boolean hardBreaks) { return toAdocAttr(HARDBREAKS.name, hardBreaks); } public static String linkcss(boolean linkcss) { return toAdocAttr(LINKCSS.name, linkcss); } public static String sectAnchors(boolean sectAnchors) { return toAdocAttr(SECTANCHORS.name, sectAnchors); } public static String sectLink(boolean sectLink) { return toAdocAttr(SECTLINK.name, sectLink); } public static String backend(String backend) { return toAdocAttr(BACKEND.name, backend); } public static String revNumber(String revNumber) { return toAdocAttr(REVNUMBER.name, revNumber); } public static String docTitle(String docTitle) { return toAdocAttr(DOCTITLE.name, docTitle); } public static String docType(String docType) { return toAdocAttr(DOCTYPE.name, docType); } public static String docInfo(boolean docInfo) { return toAdocAttr(DOCINFO.name, docInfo); } public static String sourceHighlighter(String sourceHighlighter) { return toAdocAttr(SOURCEHIGHLIGHTER.name, sourceHighlighter); } public static String pdfStyle(String pdfStyle) { return toAdocAttr(PDFSTYLE.name, pdfStyle); } public static String versionLabel(String versionLabel) { return toAdocAttr(VERSIONLABEL.name, versionLabel); } public static String chapterLabel(String chapterLabel) { return toAdocAttr(CHAPTERLABEL.name, chapterLabel); } public static String stem(String stem) { return toAdocAttr(STEM.name, stem); } public static String allowUriRead(boolean allowUriRead) { return toAdocAttr(ALLOWURIREAD.name, allowUriRead); } public static String dataUri(Boolean value) { return toAdocAttr(DATAURI.name, value); } } public static <T> T getProp(String property, Class<T> type) { if (property == null) { return null; } return type.cast(getProperty(property)); } public static Boolean getBooleanProperty(String property) { String value = getProperty(property); if (value == null) { return null; } return Boolean.valueOf(getProperty(property)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.jexl3.internal; import java.lang.reflect.Array; import java.util.Collection; import java.util.Iterator; import java.util.NoSuchElementException; /** * A range of integers. */ public abstract class IntegerRange implements Collection<Integer> { /** The lower boundary. */ protected final int min; /** The upper boundary. */ protected final int max; /** * Creates a range, ascending or descending depending on boundaries order. * @param from the lower inclusive boundary * @param to the higher inclusive boundary * @return a range */ public static IntegerRange create(final int from, final int to) { if (from <= to) { return new IntegerRange.Ascending(from, to); } return new IntegerRange.Descending(to, from); } /** * Creates a new range. * @param from the lower inclusive boundary * @param to the higher inclusive boundary */ public IntegerRange(final int from, final int to) { min = from; max = to; } /** * Gets the interval minimum value. * @return the low boundary */ public int getMin() { return min; } /** * Gets the interval maximum value. * @return the high boundary */ public int getMax() { return max; } @Override public int hashCode() { int hash = getClass().hashCode(); //CSOFF: MagicNumber hash = 13 * hash + this.min; hash = 13 * hash + this.max; //CSON: MagicNumber return hash; } @Override public boolean equals(final Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final IntegerRange other = (IntegerRange) obj; if (this.min != other.min) { return false; } if (this.max != other.max) { return false; } return true; } @Override public abstract Iterator<Integer> iterator(); @Override public int size() { return max - min + 1; } @Override public boolean isEmpty() { return false; } @Override public boolean contains(final Object o) { if (o instanceof Number) { final long v = ((Number) o).intValue(); return min <= v && v <= max; } return false; } @Override public Object[] toArray() { final int size = size(); final Object[] array = new Object[size]; for(int a = 0; a < size; ++a) { array[a] = min + a; } return array; } @Override @SuppressWarnings("unchecked") public <T> T[] toArray(final T[] array) { final Class<?> ct = array.getClass().getComponentType(); final int length = size(); T[] copy = array; if (ct.isAssignableFrom(Integer.class)) { if (array.length < length) { copy = (T[]) Array.newInstance(ct, length); } for (int a = 0; a < length; ++a) { Array.set(copy, a, min + a); } if (length < copy.length) { copy[length] = null; } return copy; } throw new UnsupportedOperationException(); } @Override public boolean containsAll(final Collection<?> c) { for(final Object cc : c) { if (!contains(cc)) { return false; } } return true; } @Override public boolean add(final Integer e) { throw new UnsupportedOperationException(); } @Override public boolean remove(final Object o) { throw new UnsupportedOperationException(); } @Override public boolean addAll(final Collection<? extends Integer> c) { throw new UnsupportedOperationException(); } @Override public boolean removeAll(final Collection<?> c) { throw new UnsupportedOperationException(); } @Override public boolean retainAll(final Collection<?> c) { throw new UnsupportedOperationException(); } @Override public void clear() { throw new UnsupportedOperationException(); } /** * Ascending integer range. */ public static class Ascending extends IntegerRange { /** * Constructor. * @param from lower boundary * @param to upper boundary */ protected Ascending(final int from, final int to) { super(from, to); } @Override public Iterator<Integer> iterator() { return new AscIntegerIterator(min, max); } } /** * Descending integer range. */ public static class Descending extends IntegerRange { /** * Constructor. * @param from upper boundary * @param to lower boundary */ protected Descending(final int from, final int to) { super(from, to); } @Override public Iterator<Integer> iterator() { return new DescIntegerIterator(min, max); } } } /** * An ascending iterator on an integer range. */ class AscIntegerIterator implements Iterator<Integer> { /** The lower boundary. */ private final int min; /** The upper boundary. */ private final int max; /** The current value. */ private int cursor; /** * Creates a iterator on the range. * @param l low boundary * @param h high boundary */ public AscIntegerIterator(final int l, final int h) { min = l; max = h; cursor = min; } @Override public boolean hasNext() { return cursor <= max; } @Override public Integer next() { if (cursor <= max) { return cursor++; } throw new NoSuchElementException(); } @Override public void remove() { throw new UnsupportedOperationException("Not supported."); } } /** * A descending iterator on an integer range. */ class DescIntegerIterator implements Iterator<Integer> { /** The lower boundary. */ private final int min; /** The upper boundary. */ private final int max; /** The current value. */ private int cursor; /** * Creates a iterator on the range. * @param l low boundary * @param h high boundary */ public DescIntegerIterator(final int l, final int h) { min = l; max = h; cursor = max; } @Override public boolean hasNext() { return cursor >= min; } @Override public Integer next() { if (cursor >= min) { return cursor--; } throw new NoSuchElementException(); } @Override public void remove() { throw new UnsupportedOperationException("Not supported."); } }
// ---------------------------------------------------------------------------- // Copyright 2007-2017, GeoTelematic Solutions, Inc. // All rights reserved // ---------------------------------------------------------------------------- // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // ---------------------------------------------------------------------------- // Change History: // 2008/12/01 Martin D. Flynn // -Initial release // 2011/08/21 Martin D. Flynn // -Moved "button-icon" text to below the icon // 2016/09/01 Martin D. Flynn // -Switched to using "DIV" for menu button layout. // ---------------------------------------------------------------------------- package org.opengts.war.track; import java.util.*; import java.io.*; import javax.servlet.*; import javax.servlet.http.*; import org.opengts.util.*; import org.opengts.db.tables.*; import org.opengts.war.tools.*; public class IconMenu { private static final String BLANK_IMAGE = "images/blank.png"; // ------------------------------------------------------------------------ private static final String PROP_iconMenuCSS_menuItemImage_on_background = "iconMenuCSS.menuItemImage_on.background"; private static final String PROP_iconMenuCSS_menuItemImage_off_background = "iconMenuCSS.menuItemImage_off.background"; private static final String PROP_iconMenuCSS_menuItemText_on_background = "iconMenuCSS.menuItemText_on.background"; private static final String PROP_iconMenuCSS_menuItemText_off_background = "iconMenuCSS.menuItemText_off.background"; private static final String PROP_iconMenuCSS_menuItemTextW_on_background = "iconMenuCSS.menuItemTextW_on.background"; private static final String PROP_iconMenuCSS_menuItemTextW_off_background = "iconMenuCSS.menuItemTextW_off.background"; // ------------------------------------------------------------------------ // -- write Style public static void writeStyle(PrintWriter out, RequestProperties reqState) throws IOException { WebPageAdaptor.writeCssLink(out, reqState, "IconMenu.css", null); // -- overrides below PrivateLabel pl = reqState.getPrivateLabel(); String indent = " "; String menuItemText_on = pl.getStringProperty(PROP_iconMenuCSS_menuItemText_on_background, null); String menuItemText_off = pl.getStringProperty(PROP_iconMenuCSS_menuItemText_off_background, null); String menuItemTextW_on = pl.getStringProperty(PROP_iconMenuCSS_menuItemTextW_on_background, null); String menuItemTextW_off = pl.getStringProperty(PROP_iconMenuCSS_menuItemTextW_off_background, null); String menuItemImage_on = pl.getStringProperty(PROP_iconMenuCSS_menuItemImage_on_background, null); String menuItemImage_off = pl.getStringProperty(PROP_iconMenuCSS_menuItemImage_off_background, null); // -- <style type="text/css"> boolean writeEndScript = false; if ((menuItemImage_on != null) || (menuItemImage_off != null) || (menuItemText_on != null) || (menuItemText_off != null) || (menuItemTextW_on != null) || (menuItemTextW_off != null) ) { out.println(indent+"<style type=\"text/css\">"); writeEndScript = true; } // -- menuItemText_on, menuItemText_off // - Icon/Text over Button if (!StringTools.isBlank(menuItemText_on)) { String imgURL = menuItemText_on; if (imgURL.equalsIgnoreCase("default")) { imgURL = "./images/MenuBtnH.png"; } else if (imgURL.equalsIgnoreCase("transparent") || imgURL.equalsIgnoreCase("xparent")) { imgURL = "./images/MenuBtnH_X.png"; // MenuBtnH_X } //out.println(indent+" #iconMenu TD.menuItemText_on {"); //out.println(indent+" background: url('"+imgURL+"') no-repeat;"); // "../images/MenuBtnH.png" //out.println(indent+" }"); out.println(indent+" #iconMenu DIV.menuItemText_on {"); out.println(indent+" background: url('"+imgURL+"') no-repeat;"); // "../images/MenuBtnH.png" out.println(indent+" }"); } if (!StringTools.isBlank(menuItemText_off)) { String imgURL = menuItemText_off; if (imgURL.equalsIgnoreCase("default")) { imgURL = "./images/MenuBtn.png"; } else if (imgURL.equalsIgnoreCase("transparent") || imgURL.equalsIgnoreCase("xparent")) { imgURL = "./images/MenuBtn_X.png"; } //out.println(indent+" #iconMenu TD.menuItemText_off {"); //out.println(indent+" background: url('"+imgURL+"') no-repeat;"); // "../images/MenuBtn.png" //out.println(indent+" }"); out.println(indent+" #iconMenu DIV.menuItemText_off {"); out.println(indent+" background: url('"+imgURL+"') no-repeat;"); // "../images/MenuBtn.png" out.println(indent+" }"); } // -- menuItemTextW_on, menuItemTextW_off // - Icon/Text over Button(wide) if (!StringTools.isBlank(menuItemTextW_on)) { String imgURL = menuItemTextW_on; if (imgURL.equalsIgnoreCase("default")) { imgURL = "./images/MenuBtnH_lg.png"; } else if (imgURL.equalsIgnoreCase("transparent") || imgURL.equalsIgnoreCase("xparent")) { imgURL = "./images/MenuBtnH_X.png"; } //out.println(indent+" #iconMenu TD.menuItemTextW_on {"); //out.println(indent+" background: url('"+imgURL+"') no-repeat;"); // "../images/MenuBtnH_lg.png" //out.println(indent+" }"); out.println(indent+" #iconMenu DIV.menuItemTextW_on {"); out.println(indent+" background: url('"+imgURL+"') no-repeat;"); // "../images/MenuBtnH_lg.png" out.println(indent+" }"); } if (!StringTools.isBlank(menuItemTextW_off)) { String imgURL = menuItemTextW_off; if (imgURL.equalsIgnoreCase("default")) { imgURL = "./images/MenuBtn_lg.png"; } else if (imgURL.equalsIgnoreCase("transparent") || imgURL.equalsIgnoreCase("xparent")) { imgURL = "./images/MenuBtn_X.png"; } //out.println(indent+" #iconMenu TD.menuItemTextW_off {"); //out.println(indent+" background: url('"+imgURL+"') no-repeat;"); // "../images/MenuBtn_lg.png" //out.println(indent+" }"); out.println(indent+" #iconMenu DIV.menuItemTextW_off {"); out.println(indent+" background: url('"+imgURL+"') no-repeat;"); // "../images/MenuBtn_lg.png" out.println(indent+" }"); } // -- menuItemImage_on, menuItemImage_off // - Text over Button if (!StringTools.isBlank(menuItemImage_on)) { String imgURL = menuItemImage_on; if (imgURL.equalsIgnoreCase("default")) { imgURL = "./images/MenuBtnH.png"; } else if (imgURL.equalsIgnoreCase("transparent") || imgURL.equalsIgnoreCase("xparent")) { imgURL = "./images/MenuBtnH_X.png"; } //out.println(indent+" #iconMenu TD.menuItemImage_on {"); //out.println(indent+" background: url('"+imgURL+"') no-repeat;"); // "../images/MenuBtnH.png" //out.println(indent+" }"); out.println(indent+" #iconMenu DIV.menuItemImage_on {"); out.println(indent+" background: url('"+imgURL+"') no-repeat;"); // "../images/MenuBtnH.png" out.println(indent+" }"); } if (!StringTools.isBlank(menuItemImage_off)) { String imgURL = menuItemImage_off; if (imgURL.equalsIgnoreCase("default")) { imgURL = "./images/MenuBtn.png"; } else if (imgURL.equalsIgnoreCase("transparent") || imgURL.equalsIgnoreCase("xparent")) { imgURL = "./images/MenuBtn_X.png"; } //out.println(indent+" #iconMenu TD.menuItemImage_off {"); //out.println(indent+" background: url('"+imgURL+"') no-repeat;"); // "../images/MenuBtn.png" //out.println(indent+" }"); out.println(indent+" #iconMenu DIV.menuItemImage_off {"); out.println(indent+" background: url('"+imgURL+"') no-repeat;"); // "../images/MenuBtn.png" out.println(indent+" }"); } // -- </script> if (writeEndScript) { out.println(indent+"</style>"); } } // ------------------------------------------------------------------------ // -- write JavaScript public static void writeJavaScript(PrintWriter out, RequestProperties reqState) throws IOException { HttpServletRequest request = reqState.getHttpServletRequest(); JavaScriptTools.writeJSInclude(out, JavaScriptTools.qualifyJSFileRef("IconMenu.js"), request); } // ------------------------------------------------------------------------ // -- write JavaScript public static void writeMenu(PrintWriter out, RequestProperties reqState, String menuID, int maxIconsPerRow, boolean showIcon) throws IOException { PrivateLabel privLabel = reqState.getPrivateLabel(); Locale locale = reqState.getLocale(); String parentPageName = null; Account account = reqState.getCurrentAccount(); /* sub style classes */ // -- these are defined in "IconMenu.css" String topMenuID = "iconMenu"; String menuTableClass = "iconMenuTable"; String groupTitleClass = "menuGroupTitle"; String groupIconsClass = "menuGroupIcons"; String menuItemTable = "menuItemTable"; String menuItemRow = "menuItemRow"; String menuItemImage_on = "menuItemImage_on"; String menuItemImage_off = "menuItemImage_off"; String menuItemImage = "menuItemImage"; String menuItemText_on = "menuItemText_on"; String menuItemText_off = "menuItemText_off"; String menuItemText = "menuItemText"; String menuIconImage = "menuIconImage"; /* start menu */ out.println("<table id='"+topMenuID+"' class='"+menuTableClass+"' cellpadding='0' cellspacing='0' border='0' width='100%'>"); /* iterate through menu groups */ Map<String,MenuGroup> menuMap = privLabel.getMenuGroupMap(); for (String mgn : menuMap.keySet()) { MenuGroup mg = menuMap.get(mgn); if (!mg.showInTopMenu()) { continue; // skip this group } int rowIconCount = 0; boolean didDisplayGroup = false; for (WebPage wp : mg.getWebPageList(reqState)) { String menuName = wp.getPageName(); String iconImg = showIcon? wp.getMenuIconImage() : null; String buttonImg = wp.getMenuButtonImage(); String buttonAlt = wp.getMenuButtonAltImage(); String url = wp.encodePageURL(reqState);//, RequestProperties.TRACK_BASE_URI()); /* skip login page */ if (menuName.equalsIgnoreCase(Constants.PAGE_LOGIN)) { //Print.logInfo("Skipping Login menu item: " + menuName); continue; // omit login } /* skip sysAdmin pages */ if (wp.systemAdminOnly() && !Account.isSystemAdmin(account)) { //Print.logInfo("Skipping SysAdmin menu item: " + menuName); continue; } /* skip pages that are not ok to display */ if (!wp.isOkToDisplay(reqState)) { continue; } /* start menu group */ if (!didDisplayGroup) { didDisplayGroup = true; out.write("\n"); out.write("<!-- "+mg.getTitle(null)+" -->\n"); out.write("<tr class='"+groupTitleClass+"'><td class='"+groupTitleClass+"' width='100%'>"+mg.getTitle(locale)+"</td></tr>\n"); out.write("<tr class='"+groupIconsClass+"'><td class='"+groupIconsClass+"' width='100%'>\n"); // -- //out.write("<table class='"+menuItemTable+"' border='0'>\n"); //cellspacing='0' cellpadding='0' //out.write("<tr class='"+menuItemRow+"'>\n"); out.write("<div class='"+menuItemRow+"'>\n"); } /* wrap to next line? */ //if ((maxIconsPerRow > 0) && (rowIconCount >= maxIconsPerRow)) { // out.write("</tr>\n"); // out.write("<tr class='"+menuItemRow+"'>\n"); // rowIconCount = 0; //} /* menu description */ // -- replace all spaces with a newline "<BR>" String menuDesc = StringTools.trim(wp.getNavigationDescription(reqState)); // short menuDesc = filterButtonMenuDescription(menuDesc); /* menu help */ String menuHelp = StringTools.trim(wp.getMenuHelp(reqState, parentPageName)); /* icon */ String classOff = !StringTools.isBlank(buttonImg)? menuItemImage_off : menuItemText_off; String classOn = !StringTools.isBlank(buttonImg)? menuItemImage_on : menuItemText_on; String target = StringTools.blankDefault(wp.getTarget(),"_self"); // (wp instanceof WebPageURL)? ((WebPageURL)wp).getTarget() : "_self"; String onclick = "javascript:openURL('"+url+"','"+target+"')"; if (!target.startsWith("_")) { PixelDimension pixDim = wp.getWindowDimension(); if (pixDim != null) { int W = pixDim.getWidth(); int H = pixDim.getHeight(); onclick = "javascript:openFixedWindow('"+url+"','"+target+"',"+W+","+H+")"; } } //out.write(" <td class='"+classOff+"' title=\""+menuHelp+"\""+ // " onmouseover=\"this.className='"+classOn+"'\""+ // " onmouseout=\"this.className='"+classOff+"'\""+ // " onclick=\""+onclick+"\""+ // ">"); out.write(" <div class='"+classOff+"' title=\""+menuHelp+"\""+ " onmouseover=\"this.className='"+classOn+"'\""+ " onmouseout=\"this.className='"+classOff+"'\""+ " onclick=\""+onclick+"\""+ ">"); if (StringTools.isBlank(buttonImg)) { // -- draw text over background image, include icon if specified if (!StringTools.isBlank(iconImg)) { out.write("<img class='"+menuIconImage+"' border='0' src='"+iconImg+"'/>"); out.write("<br>"); } out.write("<span class='"+menuItemText+"'>"+menuDesc+"</span>"); } else { // -- draw the main button image itself (no text) out.write("<img class='"+menuItemImage+"' border='0' src='"+buttonImg+"'"); if (!StringTools.isBlank(buttonAlt)) { out.write(" onmouseover=\"this.src='"+buttonAlt+"'\""); out.write(" onmouseout=\"this.src='" +buttonImg+"'\""); } out.write("/>"); } //out.write("</td>\n"); out.write("</div>\n"); rowIconCount++; } /* end menu group */ if (didDisplayGroup) { //out.write("</tr>\n"); //out.write("</table>\n"); out.write("</div>\n"); // -- out.write("</td></tr>\n"); out.write("\n"); } } /* end of menu */ out.write("</table>\n"); } /* break menu description text into reasonable length lines */ private static String filterButtonMenuDescription(String str) { String s[] = StringTools.split(str, ' '); StringBuffer sb = new StringBuffer(); int len = 0; for (int i = 0; i < s.length; i++) { String x = s[i].trim(); int xlen = x.length(); if (xlen == 0) { continue; } else if ((len + 1 + xlen) > 13) { sb.append("<BR>"); len = 0; } else { sb.append(" "); len += 1; } sb.append(x); len += xlen; } return sb.toString(); // StringTools.replace(s, " ", "<BR>"); } // ------------------------------------------------------------------------ }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2019 Serge Rider ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ext.oracle.model; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.Log; import org.jkiss.dbeaver.ext.oracle.model.source.OracleSourceObject; import org.jkiss.dbeaver.ext.oracle.model.source.OracleStatefulObject; import org.jkiss.dbeaver.model.*; import org.jkiss.dbeaver.model.edit.DBEPersistAction; import org.jkiss.dbeaver.model.exec.DBCException; import org.jkiss.dbeaver.model.exec.jdbc.JDBCPreparedStatement; import org.jkiss.dbeaver.model.exec.jdbc.JDBCResultSet; import org.jkiss.dbeaver.model.exec.jdbc.JDBCSession; import org.jkiss.dbeaver.model.impl.DBObjectNameCaseTransformer; import org.jkiss.dbeaver.model.impl.DBSObjectCache; import org.jkiss.dbeaver.model.impl.edit.SQLDatabasePersistAction; import org.jkiss.dbeaver.model.impl.jdbc.JDBCUtils; import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor; import org.jkiss.dbeaver.model.struct.DBSObject; import org.jkiss.dbeaver.model.struct.DBSObjectLazy; import org.jkiss.dbeaver.model.struct.DBStructUtils; import org.jkiss.utils.CommonUtils; import org.jkiss.utils.IOUtils; import java.io.IOException; import java.io.PrintWriter; import java.io.Reader; import java.io.StringWriter; import java.sql.Clob; import java.sql.SQLException; import java.util.List; import java.util.Map; import java.util.StringTokenizer; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Oracle utils */ public class OracleUtils { private static final Log log = Log.getLog(OracleUtils.class); public static String getDDL( DBRProgressMonitor monitor, String objectType, OracleTableBase object, OracleDDLFormat ddlFormat, Map<String, Object> options) throws DBException { String objectFullName = DBUtils.getObjectFullName(object, DBPEvaluationContext.DDL); OracleSchema schema = object.getContainer(); /* if (object instanceof OracleSchemaObject) { schema = ((OracleSchemaObject)object).getSchema(); } else if (object instanceof OracleTableBase) { schema = ((OracleTableBase)object).getContainer(); } */ final OracleDataSource dataSource = object.getDataSource(); monitor.beginTask("Load sources for " + objectType + " '" + objectFullName + "'...", 1); try (final JDBCSession session = DBUtils.openMetaSession(monitor, object, "Load source code for " + objectType + " '" + objectFullName + "'")) { if (dataSource.isAtLeastV9()) { try { // Do not add semicolon in the end // JDBCUtils.executeProcedure( // session, // "begin DBMS_METADATA.SET_TRANSFORM_PARAM(DBMS_METADATA.SESSION_TRANSFORM,'SQLTERMINATOR',true); end;"); JDBCUtils.executeProcedure( session, "begin\n" + "DBMS_METADATA.SET_TRANSFORM_PARAM(DBMS_METADATA.SESSION_TRANSFORM,'SQLTERMINATOR',true);\n" + "DBMS_METADATA.SET_TRANSFORM_PARAM(DBMS_METADATA.SESSION_TRANSFORM,'STORAGE'," + ddlFormat.isShowStorage() + ");\n" + "DBMS_METADATA.SET_TRANSFORM_PARAM(DBMS_METADATA.SESSION_TRANSFORM,'TABLESPACE'," + ddlFormat.isShowTablespace() + ");\n" + "DBMS_METADATA.SET_TRANSFORM_PARAM(DBMS_METADATA.SESSION_TRANSFORM,'SEGMENT_ATTRIBUTES'," + ddlFormat.isShowSegments() + ");\n" + "end;"); } catch (SQLException e) { log.error("Can't apply DDL transform parameters", e); } } String ddl; try (JDBCPreparedStatement dbStat = session.prepareStatement( "SELECT DBMS_METADATA.GET_DDL(?,?" + (schema == null ? "" : ",?") + ") TXT FROM DUAL")) { dbStat.setString(1, objectType); dbStat.setString(2, object.getName()); if (schema != null) { dbStat.setString(3, schema.getName()); } try (JDBCResultSet dbResult = dbStat.executeQuery()) { if (dbResult.next()) { Object ddlValue = dbResult.getObject(1); if (ddlValue instanceof Clob) { StringWriter buf = new StringWriter(); try (Reader clobReader = ((Clob) ddlValue).getCharacterStream()) { IOUtils.copyText(clobReader, buf); } catch (IOException e) { e.printStackTrace(new PrintWriter(buf, true)); } ddl = buf.toString(); } else { ddl = CommonUtils.toString(ddlValue); } } else { log.warn("No DDL for " + objectType + " '" + objectFullName + "'"); return "-- EMPTY DDL"; } } } if (ddlFormat != OracleDDLFormat.COMPACT) { try (JDBCPreparedStatement dbStat = session.prepareStatement( "SELECT DBMS_METADATA.GET_DEPENDENT_DDL('COMMENT',?" + (schema == null ? "" : ",?") + ") TXT FROM DUAL")) { dbStat.setString(1, object.getName()); if (schema != null) { dbStat.setString(2, schema.getName()); } try (JDBCResultSet dbResult = dbStat.executeQuery()) { if (dbResult.next()) { ddl += "\n" + dbResult.getString(1); } } } catch (Exception e) { // No dependent DDL or something went wrong log.debug("Error reading dependent DDL", e); } } return ddl; } catch (SQLException e) { if (object instanceof OracleTablePhysical) { log.error("Error generating Oracle DDL. Generate default.", e); return DBStructUtils.generateTableDDL(monitor, object, options, true); } else { throw new DBException(e, dataSource); } } finally { monitor.done(); } } public static void setCurrentSchema(JDBCSession session, String schema) throws SQLException { JDBCUtils.executeSQL(session, "ALTER SESSION SET CURRENT_SCHEMA=" + DBUtils.getQuotedIdentifier(session.getDataSource(), schema)); } public static String getCurrentSchema(JDBCSession session) throws SQLException { return JDBCUtils.queryString( session, "SELECT SYS_CONTEXT( 'USERENV', 'CURRENT_SCHEMA' ) FROM DUAL"); } public static String normalizeSourceName(OracleSourceObject object, boolean body) { try { String source = body ? ((DBPScriptObjectExt)object).getExtendedDefinitionText(null) : object.getObjectDefinitionText(null, DBPScriptObject.EMPTY_OPTIONS); if (source == null) { return null; } java.util.regex.Pattern pattern = java.util.regex.Pattern.compile( object.getSourceType() + (body ? "\\s+BODY" : "") + "\\s(\\s*)([\\w$\\.]+)[\\s\\(]+", java.util.regex.Pattern.CASE_INSENSITIVE); final Matcher matcher = pattern.matcher(source); if (matcher.find()) { String objectName = matcher.group(2); if (objectName.indexOf('.') == -1) { if (!objectName.equalsIgnoreCase(object.getName())) { object.setName(DBObjectNameCaseTransformer.transformObjectName(object, objectName)); object.getDataSource().getContainer().fireEvent(new DBPEvent(DBPEvent.Action.OBJECT_UPDATE, object)); } return source;//.substring(0, matcher.start(1)) + object.getSchema().getName() + "." + objectName + source.substring(matcher.end(2)); } } return source.trim(); } catch (DBException e) { log.error(e); return null; } } public static void addSchemaChangeActions(List<DBEPersistAction> actions, OracleSourceObject object) { actions.add(0, new SQLDatabasePersistAction( "Set target schema", "ALTER SESSION SET CURRENT_SCHEMA=" + object.getSchema().getName(), DBEPersistAction.ActionType.INITIALIZER)); if (object.getSchema() != object.getDataSource().getDefaultObject()) { actions.add(new SQLDatabasePersistAction( "Set current schema", "ALTER SESSION SET CURRENT_SCHEMA=" + object.getDataSource().getDefaultObject().getName(), DBEPersistAction.ActionType.FINALIZER)); } } public static String getSysSchemaPrefix(OracleDataSource dataSource) { boolean useSysView = CommonUtils.toBoolean(dataSource.getContainer().getConnectionConfiguration().getProviderProperty(OracleConstants.PROP_METADATA_USE_SYS_SCHEMA)); if (useSysView) { return OracleConstants.SCHEMA_SYS + "."; } else { return ""; } } public static String getSource(DBRProgressMonitor monitor, OracleSourceObject sourceObject, boolean body, boolean insertCreateReplace) throws DBCException { if (sourceObject.getSourceType().isCustom()) { log.warn("Can't read source for custom source objects"); return "-- ???? CUSTOM SOURCE"; } final String sourceType = sourceObject.getSourceType().name(); final OracleSchema sourceOwner = sourceObject.getSchema(); if (sourceOwner == null) { log.warn("No source owner for object '" + sourceObject.getName() + "'"); return null; } monitor.beginTask("Load sources for '" + sourceObject.getName() + "'...", 1); String sysViewName = OracleConstants.VIEW_DBA_SOURCE; if (!sourceObject.getDataSource().isViewAvailable(monitor, OracleConstants.SCHEMA_SYS, sysViewName)) { sysViewName = OracleConstants.VIEW_ALL_SOURCE; } try (final JDBCSession session = DBUtils.openMetaSession(monitor, sourceOwner, "Load source code for " + sourceType + " '" + sourceObject.getName() + "'")) { try (JDBCPreparedStatement dbStat = session.prepareStatement( "SELECT TEXT FROM " + getSysSchemaPrefix(sourceObject.getDataSource()) + sysViewName + " " + "WHERE TYPE=? AND OWNER=? AND NAME=? " + "ORDER BY LINE")) { dbStat.setString(1, body ? sourceType + " BODY" : sourceType); dbStat.setString(2, sourceOwner.getName()); dbStat.setString(3, sourceObject.getName()); dbStat.setFetchSize(DBConstants.METADATA_FETCH_SIZE); try (JDBCResultSet dbResult = dbStat.executeQuery()) { StringBuilder source = null; int lineCount = 0; while (dbResult.next()) { if (monitor.isCanceled()) { break; } final String line = dbResult.getString(1); if (source == null) { source = new StringBuilder(200); } source.append(line); lineCount++; monitor.subTask("Line " + lineCount); } if (source == null) { return null; } if (insertCreateReplace) { return insertCreateReplace(sourceObject, body, source.toString()); } else { return source.toString(); } } } } catch (SQLException e) { throw new DBCException(e, sourceOwner.getDataSource()); } finally { monitor.done(); } } public static String getSysUserViewName(DBRProgressMonitor monitor, OracleDataSource dataSource, String viewName) { String dbaView = "DBA_" + viewName; if (dataSource.isViewAvailable(monitor, OracleConstants.SCHEMA_SYS, dbaView)) { return OracleUtils.getSysSchemaPrefix(dataSource) + dbaView; } else { return OracleUtils.getSysSchemaPrefix(dataSource) + "USER_" + viewName; } } public static String getAdminAllViewPrefix(DBRProgressMonitor monitor, OracleDataSource dataSource, String viewName) { boolean useDBAView = CommonUtils.toBoolean(dataSource.getContainer().getConnectionConfiguration().getProviderProperty(OracleConstants.PROP_ALWAYS_USE_DBA_VIEWS)); if (useDBAView) { String dbaView = "DBA_" + viewName; if (dataSource.isViewAvailable(monitor, OracleConstants.SCHEMA_SYS, dbaView)) { return OracleUtils.getSysSchemaPrefix(dataSource) + dbaView; } } return OracleUtils.getSysSchemaPrefix(dataSource) + "ALL_" + viewName; } public static String getSysCatalogHint(OracleDataSource dataSource) { return dataSource.isUseRuleHint() ? "/*+RULE*/" : ""; } static <PARENT extends DBSObject> Object resolveLazyReference( DBRProgressMonitor monitor, PARENT parent, DBSObjectCache<PARENT,?> cache, DBSObjectLazy<?> referrer, Object propertyId) throws DBException { final Object reference = referrer.getLazyReference(propertyId); if (reference instanceof String) { Object object; if (monitor != null) { object = cache.getObject( monitor, parent, (String) reference); } else { object = cache.getCachedObject((String) reference); } if (object != null) { return object; } else { log.warn("Object '" + reference + "' not found"); return reference; } } else { return reference; } } public static boolean getObjectStatus( DBRProgressMonitor monitor, OracleStatefulObject object, OracleObjectType objectType) throws DBCException { try (JDBCSession session = DBUtils.openMetaSession(monitor, object, "Refresh state of " + objectType.getTypeName() + " '" + object.getName() + "'")) { try (JDBCPreparedStatement dbStat = session.prepareStatement( "SELECT STATUS FROM " + OracleUtils.getAdminAllViewPrefix(monitor, object.getDataSource(), "OBJECTS") + " WHERE OBJECT_TYPE=? AND OWNER=? AND OBJECT_NAME=?")) { dbStat.setString(1, objectType.getTypeName()); dbStat.setString(2, object.getSchema().getName()); dbStat.setString(3, DBObjectNameCaseTransformer.transformObjectName(object, object.getName())); try (JDBCResultSet dbResult = dbStat.executeQuery()) { if (dbResult.next()) { return "VALID".equals(dbResult.getString("STATUS")); } else { log.warn(objectType.getTypeName() + " '" + object.getName() + "' not found in system dictionary"); return false; } } } } catch (SQLException e) { throw new DBCException(e, object.getDataSource()); } } public static String insertCreateReplace(OracleSourceObject object, boolean body, String source) { String sourceType = object.getSourceType().name(); if (body) { sourceType += " BODY"; } Pattern srcPattern = Pattern.compile("^(" + sourceType + ")\\s+(\"{0,1}\\w+\"{0,1})", Pattern.CASE_INSENSITIVE); Matcher matcher = srcPattern.matcher(source); if (matcher.find()) { return "CREATE OR REPLACE " + matcher.group(1) + " " + DBUtils.getQuotedIdentifier(object.getSchema()) + "." + matcher.group(2) + source.substring(matcher.end()); } return source; } public static String formatWord(String word) { if (word == null) { return ""; } StringBuilder sb = new StringBuilder(word.length()); sb.append(Character.toUpperCase(word.charAt(0))); for (int i = 1; i < word.length(); i++) { char c = word.charAt(i); if ((c == 'i' || c == 'I') && sb.charAt(i - 1) == 'I') { sb.append('I'); } else { sb.append(Character.toLowerCase(c)); } } return sb.toString(); } public static String formatSentence(String sent) { if (sent == null) { return ""; } StringBuilder result = new StringBuilder(); StringTokenizer st = new StringTokenizer(sent, " \t\n\r-,.\\/", true); while (st.hasMoreTokens()) { String word = st.nextToken(); if (word.length() > 0) { result.append(formatWord(word)); } } return result.toString(); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.cloudtrace.v2beta1; /** * Service definition for CloudTrace (v2beta1). * * <p> * Sends application trace data to Cloud Trace for viewing. Trace data is collected for all App Engine applications by default. Trace data from other applications can be provided using this API. This library is used to interact with the Cloud Trace API directly. If you are looking to instrument your application for Cloud Trace, we recommend using OpenCensus. * </p> * * <p> * For more information about this service, see the * <a href="https://cloud.google.com/trace" target="_blank">API Documentation</a> * </p> * * <p> * This service uses {@link CloudTraceRequestInitializer} to initialize global parameters via its * {@link Builder}. * </p> * * @since 1.3 * @author Google, Inc. */ @SuppressWarnings("javadoc") public class CloudTrace extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient { // Note: Leave this static initializer at the top of the file. static { com.google.api.client.util.Preconditions.checkState( com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 && com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 15, "You are currently running with version %s of google-api-client. " + "You need at least version 1.15 of google-api-client to run version " + "1.30.10 of the Cloud Trace API library.", com.google.api.client.googleapis.GoogleUtils.VERSION); } /** * The default encoded root URL of the service. This is determined when the library is generated * and normally should not be changed. * * @since 1.7 */ public static final String DEFAULT_ROOT_URL = "https://cloudtrace.googleapis.com/"; /** * The default encoded service path of the service. This is determined when the library is * generated and normally should not be changed. * * @since 1.7 */ public static final String DEFAULT_SERVICE_PATH = ""; /** * The default encoded batch path of the service. This is determined when the library is * generated and normally should not be changed. * * @since 1.23 */ public static final String DEFAULT_BATCH_PATH = "batch"; /** * The default encoded base URL of the service. This is determined when the library is generated * and normally should not be changed. */ public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH; /** * Constructor. * * <p> * Use {@link Builder} if you need to specify any of the optional parameters. * </p> * * @param transport HTTP transport, which should normally be: * <ul> * <li>Google App Engine: * {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li> * <li>Android: {@code newCompatibleTransport} from * {@code com.google.api.client.extensions.android.http.AndroidHttp}</li> * <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()} * </li> * </ul> * @param jsonFactory JSON factory, which may be: * <ul> * <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li> * <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li> * <li>Android Honeycomb or higher: * {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li> * </ul> * @param httpRequestInitializer HTTP request initializer or {@code null} for none * @since 1.7 */ public CloudTrace(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory, com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { this(new Builder(transport, jsonFactory, httpRequestInitializer)); } /** * @param builder builder */ CloudTrace(Builder builder) { super(builder); } @Override protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException { super.initialize(httpClientRequest); } /** * An accessor for creating requests from the Projects collection. * * <p>The typical use is:</p> * <pre> * {@code CloudTrace cloudtrace = new CloudTrace(...);} * {@code CloudTrace.Projects.List request = cloudtrace.projects().list(parameters ...)} * </pre> * * @return the resource collection */ public Projects projects() { return new Projects(); } /** * The "projects" collection of methods. */ public class Projects { /** * An accessor for creating requests from the TraceSinks collection. * * <p>The typical use is:</p> * <pre> * {@code CloudTrace cloudtrace = new CloudTrace(...);} * {@code CloudTrace.TraceSinks.List request = cloudtrace.traceSinks().list(parameters ...)} * </pre> * * @return the resource collection */ public TraceSinks traceSinks() { return new TraceSinks(); } /** * The "traceSinks" collection of methods. */ public class TraceSinks { /** * Creates a sink that exports trace spans to a destination. The export of newly-ingested traces * begins immediately, unless the sink's `writer_identity` is not permitted to write to the * destination. A sink can export traces only from the resource owning the sink (the 'parent'). * * Create a request for the method "traceSinks.create". * * This request holds the parameters needed by the cloudtrace server. After setting any optional * parameters, call the {@link Create#execute()} method to invoke the remote operation. * * @param parent Required. The resource in which to create the sink (currently only project sinks are supported): * "projects/[PROJECT_ID]" Examples: `"projects/my-trace-project"`, `"projects/123456789"`. * @param content the {@link com.google.api.services.cloudtrace.v2beta1.model.TraceSink} * @return the request */ public Create create(java.lang.String parent, com.google.api.services.cloudtrace.v2beta1.model.TraceSink content) throws java.io.IOException { Create result = new Create(parent, content); initialize(result); return result; } public class Create extends CloudTraceRequest<com.google.api.services.cloudtrace.v2beta1.model.TraceSink> { private static final String REST_PATH = "v2beta1/{+parent}/traceSinks"; private final java.util.regex.Pattern PARENT_PATTERN = java.util.regex.Pattern.compile("^projects/[^/]+$"); /** * Creates a sink that exports trace spans to a destination. The export of newly-ingested traces * begins immediately, unless the sink's `writer_identity` is not permitted to write to the * destination. A sink can export traces only from the resource owning the sink (the 'parent'). * * Create a request for the method "traceSinks.create". * * This request holds the parameters needed by the the cloudtrace server. After setting any * optional parameters, call the {@link Create#execute()} method to invoke the remote operation. * <p> {@link * Create#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must * be called to initialize this instance immediately after invoking the constructor. </p> * * @param parent Required. The resource in which to create the sink (currently only project sinks are supported): * "projects/[PROJECT_ID]" Examples: `"projects/my-trace-project"`, `"projects/123456789"`. * @param content the {@link com.google.api.services.cloudtrace.v2beta1.model.TraceSink} * @since 1.13 */ protected Create(java.lang.String parent, com.google.api.services.cloudtrace.v2beta1.model.TraceSink content) { super(CloudTrace.this, "POST", REST_PATH, content, com.google.api.services.cloudtrace.v2beta1.model.TraceSink.class); this.parent = com.google.api.client.util.Preconditions.checkNotNull(parent, "Required parameter parent must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(PARENT_PATTERN.matcher(parent).matches(), "Parameter parent must conform to the pattern " + "^projects/[^/]+$"); } } @Override public Create set$Xgafv(java.lang.String $Xgafv) { return (Create) super.set$Xgafv($Xgafv); } @Override public Create setAccessToken(java.lang.String accessToken) { return (Create) super.setAccessToken(accessToken); } @Override public Create setAlt(java.lang.String alt) { return (Create) super.setAlt(alt); } @Override public Create setCallback(java.lang.String callback) { return (Create) super.setCallback(callback); } @Override public Create setFields(java.lang.String fields) { return (Create) super.setFields(fields); } @Override public Create setKey(java.lang.String key) { return (Create) super.setKey(key); } @Override public Create setOauthToken(java.lang.String oauthToken) { return (Create) super.setOauthToken(oauthToken); } @Override public Create setPrettyPrint(java.lang.Boolean prettyPrint) { return (Create) super.setPrettyPrint(prettyPrint); } @Override public Create setQuotaUser(java.lang.String quotaUser) { return (Create) super.setQuotaUser(quotaUser); } @Override public Create setUploadType(java.lang.String uploadType) { return (Create) super.setUploadType(uploadType); } @Override public Create setUploadProtocol(java.lang.String uploadProtocol) { return (Create) super.setUploadProtocol(uploadProtocol); } /** * Required. The resource in which to create the sink (currently only project sinks are * supported): "projects/[PROJECT_ID]" Examples: `"projects/my-trace-project"`, * `"projects/123456789"`. */ @com.google.api.client.util.Key private java.lang.String parent; /** Required. The resource in which to create the sink (currently only project sinks are supported): "projects/[PROJECT_ID]" Examples: `"projects/my-trace-project"`, `"projects/123456789"`. */ public java.lang.String getParent() { return parent; } /** * Required. The resource in which to create the sink (currently only project sinks are * supported): "projects/[PROJECT_ID]" Examples: `"projects/my-trace-project"`, * `"projects/123456789"`. */ public Create setParent(java.lang.String parent) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(PARENT_PATTERN.matcher(parent).matches(), "Parameter parent must conform to the pattern " + "^projects/[^/]+$"); } this.parent = parent; return this; } @Override public Create set(String parameterName, Object value) { return (Create) super.set(parameterName, value); } } /** * Deletes a sink. * * Create a request for the method "traceSinks.delete". * * This request holds the parameters needed by the cloudtrace server. After setting any optional * parameters, call the {@link Delete#execute()} method to invoke the remote operation. * * @param name Required. The full resource name of the sink to delete, including the parent resource and the sink * identifier: "projects/[PROJECT_NUMBER]/traceSinks/[SINK_ID]" Example: * `"projects/12345/traceSinks/my-sink-id"`. * @return the request */ public Delete delete(java.lang.String name) throws java.io.IOException { Delete result = new Delete(name); initialize(result); return result; } public class Delete extends CloudTraceRequest<com.google.api.services.cloudtrace.v2beta1.model.Empty> { private static final String REST_PATH = "v2beta1/{+name}"; private final java.util.regex.Pattern NAME_PATTERN = java.util.regex.Pattern.compile("^projects/[^/]+/traceSinks/[^/]+$"); /** * Deletes a sink. * * Create a request for the method "traceSinks.delete". * * This request holds the parameters needed by the the cloudtrace server. After setting any * optional parameters, call the {@link Delete#execute()} method to invoke the remote operation. * <p> {@link * Delete#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must * be called to initialize this instance immediately after invoking the constructor. </p> * * @param name Required. The full resource name of the sink to delete, including the parent resource and the sink * identifier: "projects/[PROJECT_NUMBER]/traceSinks/[SINK_ID]" Example: * `"projects/12345/traceSinks/my-sink-id"`. * @since 1.13 */ protected Delete(java.lang.String name) { super(CloudTrace.this, "DELETE", REST_PATH, null, com.google.api.services.cloudtrace.v2beta1.model.Empty.class); this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/traceSinks/[^/]+$"); } } @Override public Delete set$Xgafv(java.lang.String $Xgafv) { return (Delete) super.set$Xgafv($Xgafv); } @Override public Delete setAccessToken(java.lang.String accessToken) { return (Delete) super.setAccessToken(accessToken); } @Override public Delete setAlt(java.lang.String alt) { return (Delete) super.setAlt(alt); } @Override public Delete setCallback(java.lang.String callback) { return (Delete) super.setCallback(callback); } @Override public Delete setFields(java.lang.String fields) { return (Delete) super.setFields(fields); } @Override public Delete setKey(java.lang.String key) { return (Delete) super.setKey(key); } @Override public Delete setOauthToken(java.lang.String oauthToken) { return (Delete) super.setOauthToken(oauthToken); } @Override public Delete setPrettyPrint(java.lang.Boolean prettyPrint) { return (Delete) super.setPrettyPrint(prettyPrint); } @Override public Delete setQuotaUser(java.lang.String quotaUser) { return (Delete) super.setQuotaUser(quotaUser); } @Override public Delete setUploadType(java.lang.String uploadType) { return (Delete) super.setUploadType(uploadType); } @Override public Delete setUploadProtocol(java.lang.String uploadProtocol) { return (Delete) super.setUploadProtocol(uploadProtocol); } /** * Required. The full resource name of the sink to delete, including the parent resource and * the sink identifier: "projects/[PROJECT_NUMBER]/traceSinks/[SINK_ID]" Example: * `"projects/12345/traceSinks/my-sink-id"`. */ @com.google.api.client.util.Key private java.lang.String name; /** Required. The full resource name of the sink to delete, including the parent resource and the sink identifier: "projects/[PROJECT_NUMBER]/traceSinks/[SINK_ID]" Example: `"projects/12345/traceSinks /my-sink-id"`. */ public java.lang.String getName() { return name; } /** * Required. The full resource name of the sink to delete, including the parent resource and * the sink identifier: "projects/[PROJECT_NUMBER]/traceSinks/[SINK_ID]" Example: * `"projects/12345/traceSinks/my-sink-id"`. */ public Delete setName(java.lang.String name) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/traceSinks/[^/]+$"); } this.name = name; return this; } @Override public Delete set(String parameterName, Object value) { return (Delete) super.set(parameterName, value); } } /** * Get a trace sink by name under the parent resource (GCP project). * * Create a request for the method "traceSinks.get". * * This request holds the parameters needed by the cloudtrace server. After setting any optional * parameters, call the {@link Get#execute()} method to invoke the remote operation. * * @param name Required. The resource name of the sink: "projects/[PROJECT_NUMBER]/traceSinks/[SINK_ID]" Example: * `"projects/12345/traceSinks/my-sink-id"`. * @return the request */ public Get get(java.lang.String name) throws java.io.IOException { Get result = new Get(name); initialize(result); return result; } public class Get extends CloudTraceRequest<com.google.api.services.cloudtrace.v2beta1.model.TraceSink> { private static final String REST_PATH = "v2beta1/{+name}"; private final java.util.regex.Pattern NAME_PATTERN = java.util.regex.Pattern.compile("^projects/[^/]+/traceSinks/[^/]+$"); /** * Get a trace sink by name under the parent resource (GCP project). * * Create a request for the method "traceSinks.get". * * This request holds the parameters needed by the the cloudtrace server. After setting any * optional parameters, call the {@link Get#execute()} method to invoke the remote operation. <p> * {@link Get#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} * must be called to initialize this instance immediately after invoking the constructor. </p> * * @param name Required. The resource name of the sink: "projects/[PROJECT_NUMBER]/traceSinks/[SINK_ID]" Example: * `"projects/12345/traceSinks/my-sink-id"`. * @since 1.13 */ protected Get(java.lang.String name) { super(CloudTrace.this, "GET", REST_PATH, null, com.google.api.services.cloudtrace.v2beta1.model.TraceSink.class); this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/traceSinks/[^/]+$"); } } @Override public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException { return super.executeUsingHead(); } @Override public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException { return super.buildHttpRequestUsingHead(); } @Override public Get set$Xgafv(java.lang.String $Xgafv) { return (Get) super.set$Xgafv($Xgafv); } @Override public Get setAccessToken(java.lang.String accessToken) { return (Get) super.setAccessToken(accessToken); } @Override public Get setAlt(java.lang.String alt) { return (Get) super.setAlt(alt); } @Override public Get setCallback(java.lang.String callback) { return (Get) super.setCallback(callback); } @Override public Get setFields(java.lang.String fields) { return (Get) super.setFields(fields); } @Override public Get setKey(java.lang.String key) { return (Get) super.setKey(key); } @Override public Get setOauthToken(java.lang.String oauthToken) { return (Get) super.setOauthToken(oauthToken); } @Override public Get setPrettyPrint(java.lang.Boolean prettyPrint) { return (Get) super.setPrettyPrint(prettyPrint); } @Override public Get setQuotaUser(java.lang.String quotaUser) { return (Get) super.setQuotaUser(quotaUser); } @Override public Get setUploadType(java.lang.String uploadType) { return (Get) super.setUploadType(uploadType); } @Override public Get setUploadProtocol(java.lang.String uploadProtocol) { return (Get) super.setUploadProtocol(uploadProtocol); } /** * Required. The resource name of the sink: "projects/[PROJECT_NUMBER]/traceSinks/[SINK_ID]" * Example: `"projects/12345/traceSinks/my-sink-id"`. */ @com.google.api.client.util.Key private java.lang.String name; /** Required. The resource name of the sink: "projects/[PROJECT_NUMBER]/traceSinks/[SINK_ID]" Example: `"projects/12345/traceSinks/my-sink-id"`. */ public java.lang.String getName() { return name; } /** * Required. The resource name of the sink: "projects/[PROJECT_NUMBER]/traceSinks/[SINK_ID]" * Example: `"projects/12345/traceSinks/my-sink-id"`. */ public Get setName(java.lang.String name) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/traceSinks/[^/]+$"); } this.name = name; return this; } @Override public Get set(String parameterName, Object value) { return (Get) super.set(parameterName, value); } } /** * List all sinks for the parent resource (GCP project). * * Create a request for the method "traceSinks.list". * * This request holds the parameters needed by the cloudtrace server. After setting any optional * parameters, call the {@link List#execute()} method to invoke the remote operation. * * @param parent Required. The parent resource whose sinks are to be listed (currently only project parent resources * are supported): "projects/[PROJECT_ID]" * @return the request */ public List list(java.lang.String parent) throws java.io.IOException { List result = new List(parent); initialize(result); return result; } public class List extends CloudTraceRequest<com.google.api.services.cloudtrace.v2beta1.model.ListTraceSinksResponse> { private static final String REST_PATH = "v2beta1/{+parent}/traceSinks"; private final java.util.regex.Pattern PARENT_PATTERN = java.util.regex.Pattern.compile("^projects/[^/]+$"); /** * List all sinks for the parent resource (GCP project). * * Create a request for the method "traceSinks.list". * * This request holds the parameters needed by the the cloudtrace server. After setting any * optional parameters, call the {@link List#execute()} method to invoke the remote operation. <p> * {@link List#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} * must be called to initialize this instance immediately after invoking the constructor. </p> * * @param parent Required. The parent resource whose sinks are to be listed (currently only project parent resources * are supported): "projects/[PROJECT_ID]" * @since 1.13 */ protected List(java.lang.String parent) { super(CloudTrace.this, "GET", REST_PATH, null, com.google.api.services.cloudtrace.v2beta1.model.ListTraceSinksResponse.class); this.parent = com.google.api.client.util.Preconditions.checkNotNull(parent, "Required parameter parent must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(PARENT_PATTERN.matcher(parent).matches(), "Parameter parent must conform to the pattern " + "^projects/[^/]+$"); } } @Override public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException { return super.executeUsingHead(); } @Override public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException { return super.buildHttpRequestUsingHead(); } @Override public List set$Xgafv(java.lang.String $Xgafv) { return (List) super.set$Xgafv($Xgafv); } @Override public List setAccessToken(java.lang.String accessToken) { return (List) super.setAccessToken(accessToken); } @Override public List setAlt(java.lang.String alt) { return (List) super.setAlt(alt); } @Override public List setCallback(java.lang.String callback) { return (List) super.setCallback(callback); } @Override public List setFields(java.lang.String fields) { return (List) super.setFields(fields); } @Override public List setKey(java.lang.String key) { return (List) super.setKey(key); } @Override public List setOauthToken(java.lang.String oauthToken) { return (List) super.setOauthToken(oauthToken); } @Override public List setPrettyPrint(java.lang.Boolean prettyPrint) { return (List) super.setPrettyPrint(prettyPrint); } @Override public List setQuotaUser(java.lang.String quotaUser) { return (List) super.setQuotaUser(quotaUser); } @Override public List setUploadType(java.lang.String uploadType) { return (List) super.setUploadType(uploadType); } @Override public List setUploadProtocol(java.lang.String uploadProtocol) { return (List) super.setUploadProtocol(uploadProtocol); } /** * Required. The parent resource whose sinks are to be listed (currently only project parent * resources are supported): "projects/[PROJECT_ID]" */ @com.google.api.client.util.Key private java.lang.String parent; /** Required. The parent resource whose sinks are to be listed (currently only project parent resources are supported): "projects/[PROJECT_ID]" */ public java.lang.String getParent() { return parent; } /** * Required. The parent resource whose sinks are to be listed (currently only project parent * resources are supported): "projects/[PROJECT_ID]" */ public List setParent(java.lang.String parent) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(PARENT_PATTERN.matcher(parent).matches(), "Parameter parent must conform to the pattern " + "^projects/[^/]+$"); } this.parent = parent; return this; } /** * Optional. The maximum number of results to return from this request. Non-positive values * are ignored. The presence of `nextPageToken` in the response indicates that more results * might be available. */ @com.google.api.client.util.Key private java.lang.Integer pageSize; /** Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of `nextPageToken` in the response indicates that more results might be available. */ public java.lang.Integer getPageSize() { return pageSize; } /** * Optional. The maximum number of results to return from this request. Non-positive values * are ignored. The presence of `nextPageToken` in the response indicates that more results * might be available. */ public List setPageSize(java.lang.Integer pageSize) { this.pageSize = pageSize; return this; } /** * Optional. If present, then retrieve the next batch of results from the preceding call to * this method. `pageToken` must be the value of `nextPageToken` from the previous response. * The values of other method parameters should be identical to those in the previous call. */ @com.google.api.client.util.Key private java.lang.String pageToken; /** Optional. If present, then retrieve the next batch of results from the preceding call to this method. `pageToken` must be the value of `nextPageToken` from the previous response. The values of other method parameters should be identical to those in the previous call. */ public java.lang.String getPageToken() { return pageToken; } /** * Optional. If present, then retrieve the next batch of results from the preceding call to * this method. `pageToken` must be the value of `nextPageToken` from the previous response. * The values of other method parameters should be identical to those in the previous call. */ public List setPageToken(java.lang.String pageToken) { this.pageToken = pageToken; return this; } @Override public List set(String parameterName, Object value) { return (List) super.set(parameterName, value); } } /** * Updates a sink. This method updates fields in the existing sink according to the provided update * mask. The sink's name cannot be changed nor any output-only fields (e.g. the writer_identity). * * Create a request for the method "traceSinks.patch". * * This request holds the parameters needed by the cloudtrace server. After setting any optional * parameters, call the {@link Patch#execute()} method to invoke the remote operation. * * @param name Required. The full resource name of the sink to update, including the parent resource and the sink * identifier: "projects/[PROJECT_NUMBER]/traceSinks/[SINK_ID]" Example: * `"projects/12345/traceSinks/my-sink-id"`. * @param content the {@link com.google.api.services.cloudtrace.v2beta1.model.TraceSink} * @return the request */ public Patch patch(java.lang.String name, com.google.api.services.cloudtrace.v2beta1.model.TraceSink content) throws java.io.IOException { Patch result = new Patch(name, content); initialize(result); return result; } public class Patch extends CloudTraceRequest<com.google.api.services.cloudtrace.v2beta1.model.TraceSink> { private static final String REST_PATH = "v2beta1/{+name}"; private final java.util.regex.Pattern NAME_PATTERN = java.util.regex.Pattern.compile("^projects/[^/]+/traceSinks/[^/]+$"); /** * Updates a sink. This method updates fields in the existing sink according to the provided * update mask. The sink's name cannot be changed nor any output-only fields (e.g. the * writer_identity). * * Create a request for the method "traceSinks.patch". * * This request holds the parameters needed by the the cloudtrace server. After setting any * optional parameters, call the {@link Patch#execute()} method to invoke the remote operation. * <p> {@link * Patch#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must * be called to initialize this instance immediately after invoking the constructor. </p> * * @param name Required. The full resource name of the sink to update, including the parent resource and the sink * identifier: "projects/[PROJECT_NUMBER]/traceSinks/[SINK_ID]" Example: * `"projects/12345/traceSinks/my-sink-id"`. * @param content the {@link com.google.api.services.cloudtrace.v2beta1.model.TraceSink} * @since 1.13 */ protected Patch(java.lang.String name, com.google.api.services.cloudtrace.v2beta1.model.TraceSink content) { super(CloudTrace.this, "PATCH", REST_PATH, content, com.google.api.services.cloudtrace.v2beta1.model.TraceSink.class); this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/traceSinks/[^/]+$"); } } @Override public Patch set$Xgafv(java.lang.String $Xgafv) { return (Patch) super.set$Xgafv($Xgafv); } @Override public Patch setAccessToken(java.lang.String accessToken) { return (Patch) super.setAccessToken(accessToken); } @Override public Patch setAlt(java.lang.String alt) { return (Patch) super.setAlt(alt); } @Override public Patch setCallback(java.lang.String callback) { return (Patch) super.setCallback(callback); } @Override public Patch setFields(java.lang.String fields) { return (Patch) super.setFields(fields); } @Override public Patch setKey(java.lang.String key) { return (Patch) super.setKey(key); } @Override public Patch setOauthToken(java.lang.String oauthToken) { return (Patch) super.setOauthToken(oauthToken); } @Override public Patch setPrettyPrint(java.lang.Boolean prettyPrint) { return (Patch) super.setPrettyPrint(prettyPrint); } @Override public Patch setQuotaUser(java.lang.String quotaUser) { return (Patch) super.setQuotaUser(quotaUser); } @Override public Patch setUploadType(java.lang.String uploadType) { return (Patch) super.setUploadType(uploadType); } @Override public Patch setUploadProtocol(java.lang.String uploadProtocol) { return (Patch) super.setUploadProtocol(uploadProtocol); } /** * Required. The full resource name of the sink to update, including the parent resource and * the sink identifier: "projects/[PROJECT_NUMBER]/traceSinks/[SINK_ID]" Example: * `"projects/12345/traceSinks/my-sink-id"`. */ @com.google.api.client.util.Key private java.lang.String name; /** Required. The full resource name of the sink to update, including the parent resource and the sink identifier: "projects/[PROJECT_NUMBER]/traceSinks/[SINK_ID]" Example: `"projects/12345/traceSinks /my-sink-id"`. */ public java.lang.String getName() { return name; } /** * Required. The full resource name of the sink to update, including the parent resource and * the sink identifier: "projects/[PROJECT_NUMBER]/traceSinks/[SINK_ID]" Example: * `"projects/12345/traceSinks/my-sink-id"`. */ public Patch setName(java.lang.String name) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/traceSinks/[^/]+$"); } this.name = name; return this; } /** * Required. Field mask that specifies the fields in `trace_sink` that are to be updated. A * sink field is overwritten if, and only if, it is in the update mask. `name` and * `writer_identity` fields cannot be updated. An empty updateMask is considered an error. * For a detailed `FieldMask` definition, see https://developers.google.com/protocol- * buffers/docs/reference/google.protobuf#fieldmask Example: `updateMask=output_config`. */ @com.google.api.client.util.Key private String updateMask; /** Required. Field mask that specifies the fields in `trace_sink` that are to be updated. A sink field is overwritten if, and only if, it is in the update mask. `name` and `writer_identity` fields cannot be updated. An empty updateMask is considered an error. For a detailed `FieldMask` definition, see https://developers.google.com/protocol- buffers/docs/reference/google.protobuf#fieldmask Example: `updateMask=output_config`. */ public String getUpdateMask() { return updateMask; } /** * Required. Field mask that specifies the fields in `trace_sink` that are to be updated. A * sink field is overwritten if, and only if, it is in the update mask. `name` and * `writer_identity` fields cannot be updated. An empty updateMask is considered an error. * For a detailed `FieldMask` definition, see https://developers.google.com/protocol- * buffers/docs/reference/google.protobuf#fieldmask Example: `updateMask=output_config`. */ public Patch setUpdateMask(String updateMask) { this.updateMask = updateMask; return this; } @Override public Patch set(String parameterName, Object value) { return (Patch) super.set(parameterName, value); } } } } /** * Builder for {@link CloudTrace}. * * <p> * Implementation is not thread-safe. * </p> * * @since 1.3.0 */ public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder { /** * Returns an instance of a new builder. * * @param transport HTTP transport, which should normally be: * <ul> * <li>Google App Engine: * {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li> * <li>Android: {@code newCompatibleTransport} from * {@code com.google.api.client.extensions.android.http.AndroidHttp}</li> * <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()} * </li> * </ul> * @param jsonFactory JSON factory, which may be: * <ul> * <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li> * <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li> * <li>Android Honeycomb or higher: * {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li> * </ul> * @param httpRequestInitializer HTTP request initializer or {@code null} for none * @since 1.7 */ public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory, com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { super( transport, jsonFactory, DEFAULT_ROOT_URL, DEFAULT_SERVICE_PATH, httpRequestInitializer, false); setBatchPath(DEFAULT_BATCH_PATH); } /** Builds a new instance of {@link CloudTrace}. */ @Override public CloudTrace build() { return new CloudTrace(this); } @Override public Builder setRootUrl(String rootUrl) { return (Builder) super.setRootUrl(rootUrl); } @Override public Builder setServicePath(String servicePath) { return (Builder) super.setServicePath(servicePath); } @Override public Builder setBatchPath(String batchPath) { return (Builder) super.setBatchPath(batchPath); } @Override public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { return (Builder) super.setHttpRequestInitializer(httpRequestInitializer); } @Override public Builder setApplicationName(String applicationName) { return (Builder) super.setApplicationName(applicationName); } @Override public Builder setSuppressPatternChecks(boolean suppressPatternChecks) { return (Builder) super.setSuppressPatternChecks(suppressPatternChecks); } @Override public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) { return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks); } @Override public Builder setSuppressAllChecks(boolean suppressAllChecks) { return (Builder) super.setSuppressAllChecks(suppressAllChecks); } /** * Set the {@link CloudTraceRequestInitializer}. * * @since 1.12 */ public Builder setCloudTraceRequestInitializer( CloudTraceRequestInitializer cloudtraceRequestInitializer) { return (Builder) super.setGoogleClientRequestInitializer(cloudtraceRequestInitializer); } @Override public Builder setGoogleClientRequestInitializer( com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) { return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer); } } }
//********************************************************* // Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. //********************************************************* package com.microsoft.kafkaavailability.threads; import com.codahale.metrics.Histogram; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.SlidingWindowReservoir; import com.google.gson.Gson; import com.google.inject.Inject; import com.google.inject.assistedinject.Assisted; import com.microsoft.kafkaavailability.*; import com.microsoft.kafkaavailability.discovery.CommonUtils; import com.microsoft.kafkaavailability.metrics.AvailabilityGauge; import com.microsoft.kafkaavailability.metrics.MetricNameEncoded; import com.microsoft.kafkaavailability.metrics.MetricNameEncodedFactory; import com.microsoft.kafkaavailability.properties.AppProperties; import com.microsoft.kafkaavailability.properties.MetaDataManagerProperties; import com.microsoft.kafkaavailability.reporters.ScheduledReporterCollector; import org.apache.curator.framework.CuratorFramework; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.*; import static com.microsoft.kafkaavailability.discovery.Constants.DEFAULT_ELAPSED_TIME; public class ConsumerThread implements Callable<Long> { final static Logger m_logger = LoggerFactory.getLogger(ConsumerThread.class); private final ScheduledReporterCollector reporterCollector; private final CuratorFramework m_curatorFramework; private final ServiceSpecProvider serviceSpecProvider; private final MetricNameEncodedFactory metricNameFactory; private Phaser m_phaser; private List<String> m_listServers; private long m_threadSleepTime; @Inject public ConsumerThread(CuratorFramework curatorFramework, ScheduledReporterCollector reporterCollector, ServiceSpecProvider serviceSpecProvider, MetricNameEncodedFactory metricNameFactory, @Assisted Phaser phaser, @Assisted List<String> listServers, @Assisted long threadSleepTime) { this.m_curatorFramework = curatorFramework; this.reporterCollector = reporterCollector; this.serviceSpecProvider = serviceSpecProvider; this.metricNameFactory = metricNameFactory; this.m_phaser = phaser; this.m_phaser.register(); //Registers/Add a new unArrived party to this phaser. CommonUtils.dumpPhaserState("After registration of ConsumerThread", phaser); m_listServers = listServers; this.m_threadSleepTime = threadSleepTime; } @Override public Long call() throws Exception { int sleepDuration = 1000; long elapsedTime = 0L; long lStartTime = System.currentTimeMillis(); MetricRegistry metrics; m_logger.info(Thread.currentThread().getName() + " - Consumer party has arrived and is working in " + "Phase-" + m_phaser.getPhase()); try { metrics = reporterCollector.getRegistry(); runConsumer(metrics); } catch (Exception e) { m_logger.error(e.getMessage(), e); try { m_phaser.arriveAndDeregister(); } catch (IllegalStateException success) { } } finally { try { CommonUtils.sleep(1000); } catch (Exception e) { m_logger.error(e.getMessage(), e); } } elapsedTime = CommonUtils.stopWatch(lStartTime); m_logger.info("Consumer Elapsed: " + elapsedTime + " milliseconds."); try { m_phaser.arriveAndDeregister(); } catch (IllegalStateException exception) { } CommonUtils.dumpPhaserState("After arrival of ConsumerThread", m_phaser); m_logger.info("ConsumerThread (run()) has been COMPLETED."); return Long.valueOf(elapsedTime); } private void runConsumer(MetricRegistry metrics) throws IOException, MetaDataManagerException { m_logger.info("Starting ConsumerLatency"); IPropertiesManager metaDataPropertiesManager = new PropertiesManager<MetaDataManagerProperties>("metadatamanagerProperties.json", MetaDataManagerProperties.class); IMetaDataManager metaDataManager = new MetaDataManager(m_curatorFramework, metaDataPropertiesManager); IPropertiesManager appPropertiesManager = new PropertiesManager<AppProperties>("appProperties.json", AppProperties.class); AppProperties appProperties = (AppProperties) appPropertiesManager.getProperties(); int numPartitionsConsumers = 0; // check the number of available processors int nThreads = Runtime.getRuntime().availableProcessors(); //default to 15 Seconds, if not configured long consumerPartitionTimeoutInSeconds = (appProperties.consumerPartitionTimeoutInSeconds > 0 ? appProperties.consumerPartitionTimeoutInSeconds : 30); long consumerTopicTimeoutInSeconds = (appProperties.consumerTopicTimeoutInSeconds > 0 ? appProperties.consumerTopicTimeoutInSeconds : 60); //This is full list of topics List<kafka.javaapi.TopicMetadata> totalTopicMetadata = metaDataManager.getAllTopicPartition(); List<kafka.javaapi.TopicMetadata> allTopicMetadata = new ArrayList<kafka.javaapi.TopicMetadata>(); String sep = ", "; StringBuilder rString = new StringBuilder(); for (kafka.javaapi.TopicMetadata topic : totalTopicMetadata) { //Log the server/topic mapping to know which topic is getting by which instance of KAT-List<String> int topicIndex = totalTopicMetadata.indexOf(topic); int serverIndex = (topicIndex % m_listServers.size()); String client = m_listServers.get(serverIndex); String serviceSpec = serviceSpecProvider.getServiceSpec(); if (serverIndex == m_listServers.indexOf(serviceSpec)) { allTopicMetadata.add(topic); } rString.append(sep).append(topic.topic() + "-->" + client); } m_logger.info("Mapping of topics and servers:" + rString); m_logger.info("totalTopicMetadata size:" + totalTopicMetadata.size()); m_logger.info("allTopicMetadata size in Consumer:" + allTopicMetadata.size()); int consumerTryCount = 0; int consumerFailCount = 0; for (kafka.javaapi.TopicMetadata topic : allTopicMetadata) { numPartitionsConsumers += topic.partitionsMetadata().size(); } final SlidingWindowReservoir consumerLatencyWindow = new SlidingWindowReservoir(numPartitionsConsumers); Histogram histogramConsumerLatency = new Histogram(consumerLatencyWindow); MetricNameEncoded consumerLatency = metricNameFactory.createWithTopic("Consumer.Latency", "all"); if (!metrics.getNames().contains(new Gson().toJson(consumerLatency))) { if (appProperties.sendConsumerLatency) { metrics.register(new Gson().toJson(consumerLatency), histogramConsumerLatency); } } for (kafka.javaapi.TopicMetadata item : allTopicMetadata) { boolean isTopicAvailable = true; m_logger.info("Reading from Topic: {};", item.topic()); consumerTryCount++; final SlidingWindowReservoir topicLatency = new SlidingWindowReservoir(item.partitionsMetadata().size()); Histogram histogramConsumerTopicLatency = new Histogram(topicLatency); MetricNameEncoded consumerTopicLatency = metricNameFactory.createWithTopic("Consumer.Latency", item.topic()); if (!metrics.getNames().contains(new Gson().toJson(consumerTopicLatency))) { if (appProperties.sendConsumerTopicLatency) metrics.register(new Gson().toJson(consumerTopicLatency), histogramConsumerTopicLatency); } //Get ExecutorService from Executors utility class, thread pool size is number of available processors ExecutorService newFixedThreadPool = Executors.newFixedThreadPool(nThreads); //create a list to hold the Future object associated with Callable //List<Future<Long>> futures = new ArrayList<Future<Long>>(); Map<Integer, Future<Long>> response = new HashMap<Integer, Future<Long>>(); for (kafka.javaapi.PartitionMetadata part : item.partitionsMetadata()) { m_logger.debug("Reading from Topic: {}; Partition: {};", item.topic(), part.partitionId()); //Create ConsumerPartitionThread instance ConsumerPartitionThread consumerPartitionJob = new ConsumerPartitionThread(m_curatorFramework, item, part); //submit Callable tasks to be executed by thread pool Future<Long> future = newFixedThreadPool.submit(new JobManager(consumerPartitionTimeoutInSeconds, TimeUnit.SECONDS, consumerPartitionJob, "Consumer-" + item.topic() + "-P#" + part.partitionId())); //add Future to the list, we can get return value using Future //futures.add(future); response.put(part.partitionId(), future); } //shut down the executor service now. This will make the executor accept no new threads // and finish all existing threads in the queue CommonUtils.shutdownAndAwaitTermination(newFixedThreadPool, item.topic()); int topicConsumerFailCount = 0; for (Integer key : response.keySet()) { int partitionConsumerFailCount = 0; long elapsedTime = DEFAULT_ELAPSED_TIME; try { // Future.get() waits for task to get completed elapsedTime = Long.valueOf(response.get(key).get()); } catch (InterruptedException | ExecutionException e) { m_logger.error("Error Reading from Topic: {}; Partition: {}; Exception: {}", item.topic(), key, e); } if (elapsedTime >= DEFAULT_ELAPSED_TIME) { topicConsumerFailCount++; partitionConsumerFailCount++; if (isTopicAvailable) { consumerFailCount++; isTopicAvailable = false; } } MetricNameEncoded consumerPartitionLatency = metricNameFactory.createWithPartition("Consumer.Latency", item.topic() + "##" + key); Histogram histogramConsumerPartitionLatency = new Histogram(new SlidingWindowReservoir(1)); if (!metrics.getNames().contains(new Gson().toJson(consumerPartitionLatency))) { if (appProperties.sendConsumerPartitionLatency) metrics.register(new Gson().toJson(consumerPartitionLatency), histogramConsumerPartitionLatency); } histogramConsumerPartitionLatency.update(elapsedTime); histogramConsumerTopicLatency.update(elapsedTime); histogramConsumerLatency.update(elapsedTime); if (appProperties.sendConsumerPartitionAvailability) { MetricNameEncoded consumerPartitionAvailability = metricNameFactory.createWithPartition("Consumer.Availability", item.topic() + "##" + key); if (!metrics.getNames().contains(new Gson().toJson(consumerPartitionAvailability))) { metrics.register(new Gson().toJson(consumerPartitionAvailability), new AvailabilityGauge(1, 1 - partitionConsumerFailCount)); } } } if (appProperties.sendConsumerTopicAvailability) { MetricNameEncoded consumerTopicAvailability = metricNameFactory.createWithTopic("Consumer.Availability", item.topic()); if (!metrics.getNames().contains(new Gson().toJson(consumerTopicAvailability))) { metrics.register(new Gson().toJson(consumerTopicAvailability), new AvailabilityGauge(response.keySet().size(), response.keySet().size() - topicConsumerFailCount)); } } } if (appProperties.sendConsumerAvailability) { MetricNameEncoded consumerAvailability = metricNameFactory.createWithTopic("Consumer.Availability", "all"); if (!metrics.getNames().contains(new Gson().toJson(consumerAvailability))) { metrics.register(new Gson().toJson(consumerAvailability), new AvailabilityGauge(consumerTryCount, consumerTryCount - consumerFailCount)); } } ((MetaDataManager) metaDataManager).close(); m_logger.info("Finished ConsumerLatency"); } }
/* * ARX: Powerful Data Anonymization * Copyright 2012 - 2016 Fabian Prasser, Florian Kohlmayer and contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.deidentifier.arx.test; import static org.junit.Assert.assertEquals; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.deidentifier.arx.ARXAnonymizer; import org.deidentifier.arx.ARXConfiguration; import org.deidentifier.arx.ARXResult; import org.deidentifier.arx.AttributeType; import org.deidentifier.arx.AttributeType.Hierarchy; import org.deidentifier.arx.Data; import org.deidentifier.arx.criteria.LDiversity; import org.deidentifier.arx.criteria.PrivacyCriterion; import org.deidentifier.arx.criteria.TCloseness; import org.deidentifier.arx.io.CSVHierarchyInput; import org.deidentifier.arx.metric.Metric; import org.junit.Before; import org.junit.Test; /** * Test for utility metrics. * * @author Fabian Prasser * @author Florian Kohlmayer */ public abstract class AbstractTestUtilityMetricsPrecomputation extends AbstractTest { /** * Represents a test case. * * @author Fabian Prasser * @author Florian Kohlmayer */ public static class ARXUtilityMetricsTestCase { /** TODO */ public ARXConfiguration config; /** TODO */ public String dataset; /** TODO */ public String sensitiveAttribute; /** TODO */ public Metric<?> m1; /** TODO */ public Metric<?> m2; /** * Creates a new instance. * * @param config * @param sensitiveAttribute * @param dataset * @param m1 * @param m2 */ public ARXUtilityMetricsTestCase(final ARXConfiguration config, final String sensitiveAttribute, final String dataset, final Metric<?> m1, final Metric<?> m2) { this.config = config; this.sensitiveAttribute = sensitiveAttribute; this.dataset = dataset; this.m1 = m1; this.m2 = m2; } /** * Returns a string description. * * @return */ public String getDescription() { StringBuilder builder = new StringBuilder(); builder.append("TestCase{\n"); builder.append(" - Dataset: ").append(dataset).append("\n"); builder.append(" - Sensitive: ").append(sensitiveAttribute).append("\n"); builder.append(" - Suppression: ").append(config.getMaxOutliers()).append("\n"); builder.append(" - Metric1: ").append(m1.toString()).append("\n"); builder.append(" - Metric2: ").append(m2.toString()).append("\n"); builder.append(" - Criteria:\n"); for (PrivacyCriterion c : config.getCriteria()) { builder.append(" * ").append(c.toString()).append("\n"); } builder.append("}"); return builder.toString(); } @Override public String toString() { return config.getCriteria() + "-" + config.getMaxOutliers() + "-" + config.getMetric() + "-" + dataset + "-PM:" + config.isPracticalMonotonicity(); } } /** * Returns the data object for the test case. * * @param testCase * @return * @throws IOException */ public static Data getDataObject(final ARXUtilityMetricsTestCase testCase) throws IOException { final Data data = Data.create(testCase.dataset, StandardCharsets.UTF_8, ';'); // Read generalization hierachies final FilenameFilter hierarchyFilter = new FilenameFilter() { @Override public boolean accept(final File dir, final String name) { if (name.matches(testCase.dataset.substring(testCase.dataset.lastIndexOf("/") + 1, testCase.dataset.length() - 4) + "_hierarchy_(.)+.csv")) { return true; } else { return false; } } }; final File testDir = new File(testCase.dataset.substring(0, testCase.dataset.lastIndexOf("/"))); final File[] genHierFiles = testDir.listFiles(hierarchyFilter); final Pattern pattern = Pattern.compile("_hierarchy_(.*?).csv"); for (final File file : genHierFiles) { final Matcher matcher = pattern.matcher(file.getName()); if (matcher.find()) { final CSVHierarchyInput hier = new CSVHierarchyInput(file, StandardCharsets.UTF_8, ';'); final String attributeName = matcher.group(1); if (!attributeName.equalsIgnoreCase(testCase.sensitiveAttribute)) { data.getDefinition().setAttributeType(attributeName, Hierarchy.create(hier.getHierarchy())); } else { // sensitive attribute if (testCase.config.containsCriterion(LDiversity.class) || testCase.config.containsCriterion(TCloseness.class)) { data.getDefinition().setAttributeType(attributeName, AttributeType.SENSITIVE_ATTRIBUTE); } } } } return data; } /** The test case. */ protected final ARXUtilityMetricsTestCase testcase; /** * Creates a new instance. * * @param testCase */ public AbstractTestUtilityMetricsPrecomputation(final ARXUtilityMetricsTestCase testCase) { this.testcase = testCase; } @Override @Before public void setUp() { // Empty by design } /** * * * @throws IOException */ @Test public void test() throws IOException { // Anonymize ARXConfiguration testcaseconfig = testcase.config; // Metric 1 testcaseconfig.setMetric(testcase.m1); Data data1 = getDataObject(testcase); ARXAnonymizer anonymizer1 = new ARXAnonymizer(); ARXResult result1 = anonymizer1.anonymize(data1, testcaseconfig); // Metric 2 testcaseconfig.setMetric(testcase.m2); Data data2 = getDataObject(testcase); ARXAnonymizer anonymizer2 = new ARXAnonymizer(); ARXResult result2 = anonymizer2.anonymize(data2, testcaseconfig); String loss1 = result1.getGlobalOptimum().getMaximumInformationLoss().toString(); String loss2 = result2.getGlobalOptimum().getMaximumInformationLoss().toString(); assertEquals("Metric value differs", loss1, loss2); // Map<String, ARXNode> result1nodesmap = new HashMap<String, ARXNode>(); // // for (ARXNode[] level : result1.getLattice().getLevels()) { // for (ARXNode node : level) { // result1nodesmap.put(Arrays.toString(node.getTransformation()), node); // } // } // // // Test equality information loss for all transformations // for (ARXNode[] level : result2.getLattice().getLevels()) { // for (ARXNode node : level) { // // String label = Arrays.toString(node.getTransformation()); // result1.getOutput(result1nodesmap.get(label), false); // result2.getOutput(node, false); // // String loss1 = result1nodesmap.get(label).getMaximumInformationLoss().toString(); // String loss2 = node.getMaximumInformationLoss().toString(); // // assertEquals(label, loss1, loss2); // } // } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill; import java.io.IOException; import java.util.LinkedList; import java.util.List; import java.util.Random; import java.util.Set; import java.util.concurrent.Semaphore; import org.apache.drill.categories.SlowTest; import org.apache.drill.common.exceptions.UserException; import org.apache.drill.test.TestTools; import org.apache.drill.exec.proto.UserBitShared; import org.apache.drill.exec.proto.UserBitShared.QueryResult.QueryState; import org.apache.drill.exec.rpc.user.UserResultsListener; import org.apache.drill.test.BaseTestQuery; import org.apache.drill.test.QueryTestUtil; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestRule; import com.google.common.collect.Sets; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; /* * Note that the real interest here is that the drillbit doesn't become * unstable from running a lot of queries concurrently -- it's not about * any particular order of execution. We ignore the results. */ @Category({SlowTest.class}) public class TestTpchDistributedConcurrent extends BaseTestQuery { @Rule public final TestRule TIMEOUT = TestTools.getTimeoutRule(360000); // Longer timeout than usual. /* * Valid test names taken from TestTpchDistributed. Fuller path prefixes are * used so that tests may also be taken from other locations -- more variety * is better as far as this test goes. */ private final static String queryFile[] = { "queries/tpch/01.sql", "queries/tpch/03.sql", "queries/tpch/04.sql", "queries/tpch/05.sql", "queries/tpch/06.sql", "queries/tpch/07.sql", "queries/tpch/08.sql", "queries/tpch/09.sql", "queries/tpch/10.sql", "queries/tpch/11.sql", "queries/tpch/12.sql", "queries/tpch/13.sql", "queries/tpch/14.sql", // "queries/tpch/15.sql", this creates a view "queries/tpch/16.sql", "queries/tpch/18.sql", "queries/tpch/19_1.sql", "queries/tpch/20.sql", }; private final static int TOTAL_QUERIES = 115; private final static int CONCURRENT_QUERIES = 15; private final static Random random = new Random(0xdeadbeef); private final static String alterSession = "alter session set `planner.slice_target` = 10"; private int remainingQueries = TOTAL_QUERIES - CONCURRENT_QUERIES; private final Semaphore completionSemaphore = new Semaphore(0); private final Semaphore submissionSemaphore = new Semaphore(0); private final Set<UserResultsListener> listeners = Sets.newIdentityHashSet(); private Thread testThread = null; // used to interrupt semaphore wait in case of error private static class FailedQuery { final String queryFile; final UserException userEx; public FailedQuery(final String queryFile, final UserException userEx) { this.queryFile = queryFile; this.userEx = userEx; } } private final List<FailedQuery> failedQueries = new LinkedList<>(); private void submitRandomQuery() { final String filename = queryFile[random.nextInt(queryFile.length)]; final String query; try { query = QueryTestUtil.normalizeQuery(getFile(filename)).replace(';', ' '); } catch(IOException e) { throw new RuntimeException("Caught exception", e); } final UserResultsListener listener = new ChainingSilentListener(query); client.runQuery(UserBitShared.QueryType.SQL, query, listener); synchronized(this) { listeners.add(listener); } } private class ChainingSilentListener extends SilentListener { private final String query; public ChainingSilentListener(final String query) { this.query = query; } @Override public void queryCompleted(QueryState state) { super.queryCompleted(state); completionSemaphore.release(); synchronized(TestTpchDistributedConcurrent.this) { final Object object = listeners.remove(this); assertNotNull("listener not found", object); /* Only submit more queries if there hasn't been an error. */ if ((failedQueries.size() == 0) && (remainingQueries > 0)) { /* * We can't directly submit the query from here, because we're on the RPC * thread, and it throws an exception if we try to send from here. So we * allow the QuerySubmitter thread to advance. */ submissionSemaphore.release(); --remainingQueries; } } } @Override public void submissionFailed(UserException uex) { super.submissionFailed(uex); completionSemaphore.release(); System.out.println("submissionFailed for " + query + "\nwith " + uex); synchronized(TestTpchDistributedConcurrent.this) { final Object object = listeners.remove(this); assertNotNull("listener not found", object); failedQueries.add(new FailedQuery(query, uex)); testThread.interrupt(); } } } private class QuerySubmitter extends Thread { @Override public void run() { while(true) { try { submissionSemaphore.acquire(); } catch(InterruptedException e) { System.out.println("QuerySubmitter quitting."); return; } submitRandomQuery(); } } } //@Test public void testConcurrentQueries() throws Exception { QueryTestUtil.testRunAndPrint(client, UserBitShared.QueryType.SQL, alterSession); testThread = Thread.currentThread(); final QuerySubmitter querySubmitter = new QuerySubmitter(); querySubmitter.start(); // Kick off the initial queries. As they complete, they will submit more. submissionSemaphore.release(CONCURRENT_QUERIES); // Wait for all the queries to complete. InterruptedException interruptedException = null; try { completionSemaphore.acquire(TOTAL_QUERIES); } catch(InterruptedException e) { interruptedException = e; // List the failed queries. for(final FailedQuery fq : failedQueries) { System.err.println(String.format( "%s failed with %s", fq.queryFile, fq.userEx)); } } // Stop the querySubmitter thread. querySubmitter.interrupt(); if (interruptedException != null) { final StackTraceElement[] ste = interruptedException.getStackTrace(); final StringBuilder sb = new StringBuilder(); for(StackTraceElement s : ste) { sb.append(s.toString()); sb.append('\n'); } System.out.println("interruptedException: " + interruptedException.getMessage() + " from \n" + sb.toString()); } assertNull("Query error caused interruption", interruptedException); final int nListeners = listeners.size(); assertEquals(nListeners + " listeners still exist", 0, nListeners); assertEquals("Didn't submit all queries", 0, remainingQueries); assertEquals("Queries failed", 0, failedQueries.size()); } }
/* * Copyright 2007-2008 Sun Microsystems, Inc. All Rights Reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * - Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * - Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * - Neither the name of Sun Microsystems nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.zhangmz.ui.paradisaeidae; import org.zhangmz.ui.paradisaeidae.constant.Keys; import org.zhangmz.ui.paradisaeidae.utilities.HTMLPanel; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Dimension; import java.awt.FontMetrics; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.Insets; import java.awt.Rectangle; import java.awt.geom.Rectangle2D; import java.io.IOException; import java.net.URL; import javax.swing.JComponent; import javax.swing.JEditorPane; import javax.swing.UIManager; import javax.swing.border.Border; import org.jdesktop.animation.timing.Animator; import org.jdesktop.animation.timing.interpolation.PropertySetter; import org.jdesktop.animation.timing.triggers.TimingTrigger; import org.jdesktop.animation.timing.triggers.TimingTriggerEvent; import org.jdesktop.swingx.JXPanel; import org.zhangmz.ui.paradisaeidae.utilities.RoundedBorder; import org.zhangmz.ui.paradisaeidae.utilities.RoundedPanel; import org.zhangmz.ui.paradisaeidae.utilities.RoundedTitleBorder; import org.zhangmz.ui.paradisaeidae.utilities.Utilities; /** * * @author aim */ public class DemoPanel extends JXPanel { private static final Border roundedBorder = new RoundedBorder(10); private static final Insets margin = new Insets(8, 10, 8, 8); private Demo demo; public DemoPanel(Demo demo) { this.demo = demo; setLayout(new BorderLayout()); // remind(aim): how to access resourceMap? //resourceMap = getContext().getResourceMap(); LoadAnimationPanel loadAnimationPanel = new LoadAnimationPanel(); add(loadAnimationPanel); loadAnimationPanel.setAnimating(true); LoadedDemoPanel demoPanel = new LoadedDemoPanel(demo); try { loadAnimationPanel.setAnimating(false); Animator fadeOutAnimator = new Animator(400, new FadeOut(DemoPanel.this, loadAnimationPanel, demoPanel)); fadeOutAnimator.setAcceleration(.2f); fadeOutAnimator.setDeceleration(.3f); Animator fadeInAnimator = new Animator(400, new PropertySetter(DemoPanel.this, "alpha", 0.3f, 1.0f)); TimingTrigger.addTrigger(fadeOutAnimator, fadeInAnimator, TimingTriggerEvent.STOP); fadeOutAnimator.start(); } catch (Exception ignore) { System.err.println(ignore); ignore.printStackTrace(); } } public Demo getDemo() { return demo; } private static class FadeOut extends PropertySetter { private JXPanel parent; private JXPanel out; private JComponent in; public FadeOut(JXPanel parent, JXPanel out, JComponent in) { super(out, "alpha", 1.0f, 0.3f); this.parent = parent; this.out = out; this.in = in; } public void end() { parent.setAlpha(0.3f); parent.remove(out); parent.add(in); parent.revalidate(); } } // Fader private static class LoadAnimationPanel extends RoundedPanel { private String message; private int triState = 0; private boolean animating = false; private Animator animator; public LoadAnimationPanel() { super(10); setBorder(roundedBorder); setBackground(Utilities.deriveColorHSB( UIManager.getColor("Panel.background"), 0, 0, -.06f)); // remind(aim): get from resource map message = "demo loading"; PropertySetter rotator = new PropertySetter(this, "triState", 0, 3); animator = new Animator(500, Animator.INFINITE, Animator.RepeatBehavior.LOOP, rotator); // Don't animate gears if loading is quick animator.setStartDelay(200); } public void setAnimating(boolean animating) { this.animating = animating; if (animating) { animator.start(); } else { animator.stop(); } } public boolean isAnimating() { return animating; } public void setTriState(int triState) { this.triState = triState; repaint(); } public int getTriState() { return triState; } public void paintComponent(Graphics g) { super.paintComponent(g); Graphics2D g2 = (Graphics2D) g.create(); Dimension size = getSize(); Color textColor = Utilities.deriveColorHSB(getBackground(), 0, 0, -.3f); Color dotColor = Utilities.deriveColorHSB(textColor, 0, .2f, -.08f); g2.setColor(textColor); g2.setFont(UIManager.getFont("Label.font").deriveFont(32f)); FontMetrics metrics = g2.getFontMetrics(); Rectangle2D rect = metrics.getStringBounds(message, g2); Rectangle2D dotRect = metrics.getStringBounds(".", g2); float x = (float) (size.width - (rect.getWidth() + 3 * dotRect.getWidth())) / 2; float y = (float) (size.height - rect.getHeight()) / 2; g2.drawString(message, x, y); int tri = getTriState(); float dx = 0; for (int i = 0; i < 3; i++) { g2.setColor(animator.isRunning() && i == tri ? dotColor : textColor); g2.drawString(".", x + (float) (rect.getWidth() + dx), y); dx += dotRect.getWidth(); } } } // LoadAnimationPanel private static class LoadedDemoPanel extends RoundedPanel { private String demoName; private JComponent descriptionArea; private JComponent demoPanel; public LoadedDemoPanel(Demo demo) { super(10); setLayout(null); demoName = demo.getName(); URL description = demo.getHTMLDescription(); if (description != null) { descriptionArea = createDescriptionArea(description); add(descriptionArea); demoPanel = new RoundedPanel(new BorderLayout()); demoPanel.setBorder(roundedBorder); } else { // no description demoPanel = new JXPanel(new BorderLayout()); } demoPanel.add(demo.createDemoComponent()); add(demoPanel); applyDefaults(); } private static JComponent createDescriptionArea(URL descriptionURL) { JEditorPane descriptionPane = new HTMLPanel(); descriptionPane.setEditable(false); descriptionPane.setMargin(margin); descriptionPane.setOpaque(true); try { descriptionPane.setPage(descriptionURL); } catch (IOException e) { System.err.println("couldn't load description from URL:" + descriptionURL); } return descriptionPane; } @Override public void doLayout() { if (demoPanel != null) { Dimension size = getSize(); Insets insets = getInsets(); if (descriptionArea == null) { // Make demo fill entire area within border demoPanel.setBounds(insets.left, insets.top, size.width - insets.left - insets.right, size.height - insets.top - insets.bottom); } else { // Split space between HTML description and running demo Dimension demoSize = demoPanel.getPreferredSize(); int margin = insets.top / 2; Rectangle bounds = new Rectangle(); bounds.width = Math.max(demoSize.width, (int) (size.width * .50)); bounds.height = Math.max(demoSize.height, size.height - 2 * margin); bounds.x = size.width - bounds.width - margin; bounds.y = margin; demoPanel.setBounds(bounds); descriptionArea.setBounds(insets.left, insets.top, size.width - margin - insets.right - bounds.width, size.height - insets.top - insets.bottom); } } } @Override public void updateUI() { super.updateUI(); applyDefaults(); } private void applyDefaults() { setBorder(new RoundedTitleBorder(demoName, UIManager.getColor(Keys.TITLE_GRADIENT_COLOR1_KEY), UIManager.getColor(Keys.TITLE_GRADIENT_COLOR2_KEY))); setFont(UIManager.getFont(Keys.TITLE_FONT_KEY)); Color bg = Utilities.deriveColorHSB( UIManager.getColor("Panel.background"), 0, 0, -.06f); setBackground(bg); setForeground(UIManager.getColor(Keys.TITLE_FOREGROUND_KEY)); if (demoPanel != null) { demoPanel.setBackground(Utilities.deriveColorHSB(bg, 0, 0, .04f)); } if (descriptionArea != null) { descriptionArea.setBackground(bg); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.thrift.server; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.nio.ByteBuffer; import junit.framework.TestCase; import org.apache.thrift.TException; import org.apache.thrift.TProcessor; import org.apache.thrift.async.AsyncMethodCallback; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.protocol.TCompactProtocol; import org.apache.thrift.protocol.TProtocol; import org.apache.thrift.protocol.TProtocolFactory; import org.apache.thrift.transport.TFramedTransport; import org.apache.thrift.transport.TSocket; import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportFactory; import org.apache.thrift.transport.TFramedTransport.Factory; import thrift.test.Insanity; import thrift.test.Numberz; import thrift.test.ThriftTest; import thrift.test.Xception; import thrift.test.Xception2; import thrift.test.Xtruct; import thrift.test.Xtruct2; public abstract class ServerTestBase extends TestCase { public static class TestHandler implements ThriftTest.Iface { public TestHandler() {} public void testVoid() { System.out.print("testVoid()\n"); } public String testString(String thing) { System.out.print("testString(\"" + thing + "\")\n"); return thing; } public boolean testBool(boolean thing) { System.out.print("testBool(" + thing + ")\n"); return thing; } public byte testByte(byte thing) { System.out.print("testByte(" + thing + ")\n"); return thing; } public int testI32(int thing) { System.out.print("testI32(" + thing + ")\n"); return thing; } public long testI64(long thing) { System.out.print("testI64(" + thing + ")\n"); return thing; } public double testDouble(double thing) { System.out.print("testDouble(" + thing + ")\n"); return thing; } public ByteBuffer testBinary(ByteBuffer thing) { StringBuilder sb = new StringBuilder(thing.remaining() * 3); thing.mark(); while (thing.remaining() > 0) { sb.append(String.format("%02X ", thing.get())); } System.out.print("testBinary(" + sb.toString() + ")\n"); thing.reset(); return thing; } public Xtruct testStruct(Xtruct thing) { System.out.print("testStruct({" + "\"" + thing.string_thing + "\", " + thing.byte_thing + ", " + thing.i32_thing + ", " + thing.i64_thing + "})\n"); return thing; } public Xtruct2 testNest(Xtruct2 nest) { Xtruct thing = nest.struct_thing; System.out.print("testNest({" + nest.byte_thing + ", {" + "\"" + thing.string_thing + "\", " + thing.byte_thing + ", " + thing.i32_thing + ", " + thing.i64_thing + "}, " + nest.i32_thing + "})\n"); return nest; } public Map<Integer,Integer> testMap(Map<Integer,Integer> thing) { System.out.print("testMap({"); System.out.print(thing); System.out.print("})\n"); return thing; } public Map<String,String> testStringMap(Map<String,String> thing) { System.out.print("testStringMap({"); System.out.print(thing); System.out.print("})\n"); return thing; } public Set<Integer> testSet(Set<Integer> thing) { System.out.print("testSet({"); boolean first = true; for (int elem : thing) { if (first) { first = false; } else { System.out.print(", "); } System.out.print(elem); } System.out.print("})\n"); return thing; } public List<Integer> testList(List<Integer> thing) { System.out.print("testList({"); boolean first = true; for (int elem : thing) { if (first) { first = false; } else { System.out.print(", "); } System.out.print(elem); } System.out.print("})\n"); return thing; } public Numberz testEnum(Numberz thing) { System.out.print("testEnum(" + thing + ")\n"); return thing; } public long testTypedef(long thing) { System.out.print("testTypedef(" + thing + ")\n"); return thing; } public Map<Integer,Map<Integer,Integer>> testMapMap(int hello) { System.out.print("testMapMap(" + hello + ")\n"); Map<Integer,Map<Integer,Integer>> mapmap = new HashMap<Integer,Map<Integer,Integer>>(); HashMap<Integer,Integer> pos = new HashMap<Integer,Integer>(); HashMap<Integer,Integer> neg = new HashMap<Integer,Integer>(); for (int i = 1; i < 5; i++) { pos.put(i, i); neg.put(-i, -i); } mapmap.put(4, pos); mapmap.put(-4, neg); return mapmap; } public Map<Long, Map<Numberz,Insanity>> testInsanity(Insanity argument) { System.out.print("testInsanity()\n"); HashMap<Numberz,Insanity> first_map = new HashMap<Numberz, Insanity>(); HashMap<Numberz,Insanity> second_map = new HashMap<Numberz, Insanity>();; first_map.put(Numberz.TWO, argument); first_map.put(Numberz.THREE, argument); Insanity looney = new Insanity(); second_map.put(Numberz.SIX, looney); Map<Long,Map<Numberz,Insanity>> insane = new HashMap<Long, Map<Numberz,Insanity>>(); insane.put((long)1, first_map); insane.put((long)2, second_map); return insane; } public Xtruct testMulti(byte arg0, int arg1, long arg2, Map<Short,String> arg3, Numberz arg4, long arg5) { System.out.print("testMulti()\n"); Xtruct hello = new Xtruct();; hello.string_thing = "Hello2"; hello.byte_thing = arg0; hello.i32_thing = arg1; hello.i64_thing = arg2; return hello; } public void testException(String arg) throws Xception, TException { System.out.print("testException("+arg+")\n"); if ("Xception".equals(arg)) { Xception x = new Xception(); x.errorCode = 1001; x.message = arg; throw x; } else if ("TException".equals(arg)) { // Unspecified exception should yield a TApplicationException on client side throw new RuntimeException(arg); } else { Xtruct result = new Xtruct(); result.string_thing = arg; } return; } public Xtruct testMultiException(String arg0, String arg1) throws Xception, Xception2 { System.out.print("testMultiException(" + arg0 + ", " + arg1 + ")\n"); if (arg0.equals("Xception")) { Xception x = new Xception(); x.errorCode = 1001; x.message = "This is an Xception"; throw x; } else if (arg0.equals("Xception2")) { Xception2 x = new Xception2(); x.errorCode = 2002; x.struct_thing = new Xtruct(); x.struct_thing.string_thing = "This is an Xception2"; throw x; } Xtruct result = new Xtruct(); result.string_thing = arg1; return result; } public void testOneway(int sleepFor) { System.out.println("testOneway(" + Integer.toString(sleepFor) + ") => sleeping..."); try { Thread.sleep(sleepFor * SLEEP_DELAY); System.out.println("Done sleeping!"); } catch (InterruptedException ie) { throw new RuntimeException(ie); } } } // class TestHandler private static final List<TProtocolFactory> PROTOCOLS = Arrays.asList( new TBinaryProtocol.Factory(), new TCompactProtocol.Factory()); public static final String HOST = "localhost"; public static final int PORT = Integer.valueOf( System.getProperty("test.port", "9090")); protected static final int SLEEP_DELAY = 1000; protected static final int SOCKET_TIMEOUT = 1500; private static final Xtruct XSTRUCT = new Xtruct("Zero", (byte) 1, -3, -5); private static final Xtruct2 XSTRUCT2 = new Xtruct2((byte)1, XSTRUCT, 5); public void startServer(TProcessor processor, TProtocolFactory protoFactory) throws Exception{ startServer(processor, protoFactory, null); } public abstract void startServer(TProcessor processor, TProtocolFactory protoFactory, TTransportFactory factory) throws Exception; public abstract void stopServer() throws Exception; public abstract TTransport getClientTransport(TTransport underlyingTransport) throws Exception; private void testBool(ThriftTest.Client testClient) throws TException { boolean t = testClient.testBool(true); assertEquals(true, t); boolean f = testClient.testBool(false); assertEquals(false, f); } private void testByte(ThriftTest.Client testClient) throws TException { byte i8 = testClient.testByte((byte)1); assertEquals(1, i8); } private void testDouble(ThriftTest.Client testClient) throws TException { double dub = testClient.testDouble(5.325098235); assertEquals(5.325098235, dub); } private void testEnum(ThriftTest.Client testClient) throws TException { assertEquals(Numberz.ONE, testClient.testEnum(Numberz.ONE)); assertEquals(Numberz.TWO, testClient.testEnum(Numberz.TWO)); assertEquals(Numberz.THREE, testClient.testEnum(Numberz.THREE)); assertEquals(Numberz.FIVE, testClient.testEnum(Numberz.FIVE)); assertEquals(Numberz.EIGHT, testClient.testEnum(Numberz.EIGHT)); } private void testI32(ThriftTest.Client testClient) throws TException { int i32 = testClient.testI32(-1); assertEquals(i32, -1); } private void testI64(ThriftTest.Client testClient) throws TException { long i64 = testClient.testI64(-34359738368L); assertEquals(i64, -34359738368L); } // todo: add assertions private void testInsanity(ThriftTest.Client testClient) throws TException { Insanity insane; insane = new Insanity(); insane.userMap = new HashMap<Numberz, Long>(); insane.userMap.put(Numberz.FIVE, (long)5000); Xtruct truck = new Xtruct(); truck.string_thing = "Truck"; truck.byte_thing = (byte)8; truck.i32_thing = 8; truck.i64_thing = 8; insane.xtructs = new ArrayList<Xtruct>(); insane.xtructs.add(truck); System.out.print("testInsanity()"); Map<Long,Map<Numberz,Insanity>> whoa = testClient.testInsanity(insane); System.out.print(" = {"); for (long key : whoa.keySet()) { Map<Numberz,Insanity> val = whoa.get(key); System.out.print(key + " => {"); for (Numberz k2 : val.keySet()) { Insanity v2 = val.get(k2); System.out.print(k2 + " => {"); Map<Numberz, Long> userMap = v2.userMap; System.out.print("{"); if (userMap != null) { for (Numberz k3 : userMap.keySet()) { System.out.print(k3 + " => " + userMap.get(k3) + ", "); } } System.out.print("}, "); List<Xtruct> xtructs = v2.xtructs; System.out.print("{"); if (xtructs != null) { for (Xtruct x : xtructs) { System.out.print("{" + "\"" + x.string_thing + "\", " + x.byte_thing + ", " + x.i32_thing + ", "+ x.i64_thing + "}, "); } } System.out.print("}"); System.out.print("}, "); } System.out.print("}, "); } System.out.print("}\n"); } public boolean useAsyncProcessor() { return false; } public void testIt() throws Exception { for (TProtocolFactory protoFactory : getProtocols()) { TProcessor processor = useAsyncProcessor() ? new ThriftTest.AsyncProcessor<AsyncTestHandler>(new AsyncTestHandler()) : new ThriftTest.Processor<TestHandler>(new TestHandler()); startServer(processor, protoFactory); TSocket socket = new TSocket(HOST, PORT); socket.setTimeout(SOCKET_TIMEOUT); TTransport transport = getClientTransport(socket); TProtocol protocol = protoFactory.getProtocol(transport); ThriftTest.Client testClient = new ThriftTest.Client(protocol); open(transport); testVoid(testClient); testString(testClient); testBool(testClient); testByte(testClient); testI32(testClient); testI64(testClient); testDouble(testClient); testStruct(testClient); testNestedStruct(testClient); testMap(testClient); testStringMap(testClient); testSet(testClient); testList(testClient); testEnum(testClient); testTypedef(testClient); testNestedMap(testClient); testInsanity(testClient); testException(testClient); testOneway(testClient); testI32(testClient); transport.close(); socket.close(); stopServer(); } } public void open(TTransport transport) throws Exception { transport.open(); } public List<TProtocolFactory> getProtocols() { return PROTOCOLS; } private void testList(ThriftTest.Client testClient) throws TException { List<Integer> listout = new ArrayList<Integer>(); for (int i = -2; i < 3; ++i) { listout.add(i); } List<Integer> listin = testClient.testList(listout); assertEquals(listout, listin); } private void testMap(ThriftTest.Client testClient) throws TException { Map<Integer,Integer> mapout = new HashMap<Integer,Integer>(); for (int i = 0; i < 5; ++i) { mapout.put(i, i-10); } Map<Integer,Integer> mapin = testClient.testMap(mapout); assertEquals(mapout, mapin); } private void testStringMap(ThriftTest.Client testClient) throws TException { Map<String,String> mapout = new HashMap<String,String>(); mapout.put("a", "123"); mapout.put(" x y ", " with spaces "); mapout.put("same", "same"); mapout.put("0", "numeric key"); Map<String,String> mapin = testClient.testStringMap(mapout); assertEquals(mapout, mapin); } private void testNestedMap(ThriftTest.Client testClient) throws TException { Map<Integer,Map<Integer,Integer>> mm = testClient.testMapMap(1); Map<Integer,Map<Integer,Integer>> mapmap = new HashMap<Integer,Map<Integer,Integer>>(); HashMap<Integer,Integer> pos = new HashMap<Integer,Integer>(); HashMap<Integer,Integer> neg = new HashMap<Integer,Integer>(); for (int i = 1; i < 5; i++) { pos.put(i, i); neg.put(-i, -i); } mapmap.put(4, pos); mapmap.put(-4, neg); assertEquals(mapmap, mm); } private void testNestedStruct(ThriftTest.Client testClient) throws TException { Xtruct2 in2 = testClient.testNest(XSTRUCT2); assertEquals(XSTRUCT2, in2); } private void testOneway(ThriftTest.Client testClient) throws Exception { long begin = System.currentTimeMillis(); testClient.testOneway(1); long elapsed = System.currentTimeMillis() - begin; assertTrue(elapsed < 500); } private void testSet(ThriftTest.Client testClient) throws TException { Set<Integer> setout = new HashSet<Integer>(); for (int i = -2; i < 3; ++i) { setout.add(i); } Set<Integer> setin = testClient.testSet(setout); assertEquals(setout, setin); } private void testString(ThriftTest.Client testClient) throws TException { String s = testClient.testString("Test"); assertEquals("Test", s); } private void testStruct(ThriftTest.Client testClient) throws TException { assertEquals(XSTRUCT, testClient.testStruct(XSTRUCT)); } private void testTypedef(ThriftTest.Client testClient) throws TException { assertEquals(309858235082523L, testClient.testTypedef(309858235082523L)); } private void testVoid(ThriftTest.Client testClient) throws TException { testClient.testVoid(); } private static class CallCountingTransportFactory extends TTransportFactory { public int count = 0; private final Factory factory; public CallCountingTransportFactory(Factory factory) { this.factory = factory; } @Override public TTransport getTransport(TTransport trans) { count++; return factory.getTransport(trans); } } public void testTransportFactory() throws Exception { for (TProtocolFactory protoFactory : getProtocols()) { TestHandler handler = new TestHandler(); ThriftTest.Processor<TestHandler> processor = new ThriftTest.Processor<TestHandler>(handler); final CallCountingTransportFactory factory = new CallCountingTransportFactory(new TFramedTransport.Factory()); startServer(processor, protoFactory, factory); assertEquals(0, factory.count); TSocket socket = new TSocket(HOST, PORT); socket.setTimeout(SOCKET_TIMEOUT); TTransport transport = getClientTransport(socket); open(transport); TProtocol protocol = protoFactory.getProtocol(transport); ThriftTest.Client testClient = new ThriftTest.Client(protocol); assertEquals(0, testClient.testByte((byte) 0)); assertEquals(2, factory.count); socket.close(); stopServer(); } } private void testException(ThriftTest.Client testClient) throws TException, Xception { try { testClient.testException("Xception"); assert false; } catch(Xception e) { assertEquals(e.message, "Xception"); assertEquals(e.errorCode, 1001); } try { testClient.testException("TException"); assert false; } catch(TException e) { } testClient.testException("no Exception"); } public static class AsyncTestHandler implements ThriftTest.AsyncIface { TestHandler handler = new TestHandler(); @Override public void testVoid(AsyncMethodCallback<Void> resultHandler) throws TException { resultHandler.onComplete(null); } @Override public void testString(String thing, AsyncMethodCallback<String> resultHandler) throws TException { resultHandler.onComplete(handler.testString(thing)); } @Override public void testBool(boolean thing, AsyncMethodCallback<Boolean> resultHandler) throws TException { resultHandler.onComplete(handler.testBool(thing)); } @Override public void testByte(byte thing, AsyncMethodCallback<Byte> resultHandler) throws TException { resultHandler.onComplete(handler.testByte(thing)); } @Override public void testI32(int thing, AsyncMethodCallback<Integer> resultHandler) throws TException { resultHandler.onComplete(handler.testI32(thing)); } @Override public void testI64(long thing, AsyncMethodCallback<Long> resultHandler) throws TException { resultHandler.onComplete(handler.testI64(thing)); } @Override public void testDouble(double thing, AsyncMethodCallback<Double> resultHandler) throws TException { resultHandler.onComplete(handler.testDouble(thing)); } @Override public void testBinary(ByteBuffer thing, AsyncMethodCallback<ByteBuffer> resultHandler) throws TException { resultHandler.onComplete(handler.testBinary(thing)); } @Override public void testStruct(Xtruct thing, AsyncMethodCallback<Xtruct> resultHandler) throws TException { resultHandler.onComplete(handler.testStruct(thing)); } @Override public void testNest(Xtruct2 thing, AsyncMethodCallback<Xtruct2> resultHandler) throws TException { resultHandler.onComplete(handler.testNest(thing)); } @Override public void testMap(Map<Integer, Integer> thing, AsyncMethodCallback<Map<Integer, Integer>> resultHandler) throws TException { resultHandler.onComplete(handler.testMap(thing)); } @Override public void testStringMap(Map<String, String> thing, AsyncMethodCallback<Map<String, String>> resultHandler) throws TException { resultHandler.onComplete(handler.testStringMap(thing)); } @Override public void testSet(Set<Integer> thing, AsyncMethodCallback<Set<Integer>> resultHandler) throws TException { resultHandler.onComplete(handler.testSet(thing)); } @Override public void testList(List<Integer> thing, AsyncMethodCallback<List<Integer>> resultHandler) throws TException { resultHandler.onComplete(handler.testList(thing)); } @Override public void testEnum(Numberz thing, AsyncMethodCallback<Numberz> resultHandler) throws TException { resultHandler.onComplete(handler.testEnum(thing)); } @Override public void testTypedef(long thing, AsyncMethodCallback<Long> resultHandler) throws TException { resultHandler.onComplete(handler.testTypedef(thing)); } @Override public void testMapMap(int hello, AsyncMethodCallback<Map<Integer,Map<Integer,Integer>>> resultHandler) throws TException { resultHandler.onComplete(handler.testMapMap(hello)); } @Override public void testInsanity(Insanity argument, AsyncMethodCallback<Map<Long, Map<Numberz,Insanity>>> resultHandler) throws TException { resultHandler.onComplete(handler.testInsanity(argument)); } @Override public void testMulti(byte arg0, int arg1, long arg2, Map<Short, String> arg3, Numberz arg4, long arg5, AsyncMethodCallback<Xtruct> resultHandler) throws TException { resultHandler.onComplete(handler.testMulti(arg0,arg1,arg2,arg3,arg4,arg5)); } @Override public void testException(String arg, AsyncMethodCallback<Void> resultHandler) throws TException { System.out.print("testException("+arg+")\n"); if ("Xception".equals(arg)) { Xception x = new Xception(); x.errorCode = 1001; x.message = arg; // throw and onError yield the same result. // throw x; resultHandler.onError(x); return; } else if ("TException".equals(arg)) { // throw and onError yield the same result. // resultHandler.onError(new TException(arg)); // return; // Unspecified exception should yield a TApplicationException on client side throw new RuntimeException(arg); } resultHandler.onComplete(null); } @Override public void testMultiException(String arg0, String arg1, AsyncMethodCallback<Xtruct> resultHandler) throws TException { //To change body of implemented methods use File | Settings | File Templates. } @Override public void testOneway(int secondsToSleep, AsyncMethodCallback<Void> resultHandler) throws TException { handler.testOneway(secondsToSleep); resultHandler.onComplete(null); } } }
package com.ownedthx.xmldoclet; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import com.ownedthx.xmldoclet.xmlbindings.Package; import com.ownedthx.xmldoclet.xmlbindings.*; import com.ownedthx.xmldoclet.simpledata.*; /** * Unit test group for Annotations */ public class AnnotationTest extends TestCase { /** * Create the test case * * @param testName name of the test case */ public AnnotationTest( String testName ) { super( testName ); } /** * @return the suite of tests being tested */ public static Test suite() { return new TestSuite( AnnotationTest.class ); } public void setUp() { TestGlobals.initializeLogging(); } /** * Rigourous Parser :-) */ public void testSampledoc() { App app = new App(); Root root = app.processSource( ".", new String[] { "./src/test/java"}, null, null, new String[] { "com" }); assertTrue( true ); } /** * testing an annotation with nothing defined */ public void testAnnotation1() { App app = new App(); String[] sourceFiles = new String[] { "./src/test/java/com/ownedthx/xmldoclet/simpledata/Annotation1.java" }; Root root = app.processSource( null, null, null, sourceFiles, null ); Package sdPackage = root.packages[0]; Annotation sdAnnotation1 = sdPackage.annotations[0]; assertEquals(root.packages.length, 1); assertEquals(sdPackage.comment, ""); assertEquals(sdPackage.name, "com.ownedthx.xmldoclet.simpledata"); assertNull(sdPackage.interfaces); assertNull(sdPackage.exceptions); assertEquals(sdPackage.annotations.length, 1); assertNull(sdPackage.enums); assertNull(sdPackage.classes); assertEquals(sdAnnotation1.comment, "Annotation1"); assertEquals(sdAnnotation1.name, Annotation1.class.getSimpleName()); assertEquals(sdAnnotation1.qualifiedName, Annotation1.class.getName()); assertEquals(sdAnnotation1.scope, "public"); assertNull(sdAnnotation1.annotationInstances); assertNull(sdAnnotation1.elements); assertTrue(sdAnnotation1.isIncluded); } /** * testing an annotation with an annotation decorating it */ public void testAnnotation2() { App app = new App(); String[] sourceFiles = new String[] { "./src/test/java/com/ownedthx/xmldoclet/simpledata/Annotation2.java" }; Root root = app.processSource( null, null, null, sourceFiles, null ); Package sdPackage = root.packages[0]; Annotation sdAnnotation1 = sdPackage.annotations[0]; AnnotationInstance annotation = sdAnnotation1.annotationInstances[0]; assertEquals(root.packages.length, 1); assertEquals(sdPackage.comment, ""); assertEquals(sdPackage.name, "com.ownedthx.xmldoclet.simpledata"); assertNull(sdPackage.interfaces); assertNull(sdPackage.exceptions); assertEquals(sdPackage.annotations.length, 1); assertNull(sdPackage.enums); assertNull(sdPackage.classes); assertEquals(sdAnnotation1.comment, "Annotation2"); assertEquals(sdAnnotation1.name, Annotation2.class.getSimpleName()); assertEquals(sdAnnotation1.qualifiedName, Annotation2.class.getName()); assertEquals(sdAnnotation1.scope, "public"); assertEquals(sdAnnotation1.annotationInstances.length, 1); assertNull(sdAnnotation1.elements); assertTrue(sdAnnotation1.isIncluded); // test annotation 'deprecated' on class assertEquals(annotation.qualifiedName, "java.lang.Deprecated"); assertEquals(annotation.name, "Deprecated"); assertNull(annotation.arguments); } /** * testing an annotation with one element field */ public void testAnnotation3() { App app = new App(); String[] sourceFiles = new String[] { "./src/test/java/com/ownedthx/xmldoclet/simpledata/Annotation3.java" }; Root root = app.processSource( null, null, null, sourceFiles, null ); Package sdPackage = root.packages[0]; Annotation sdAnnotation1 = sdPackage.annotations[0]; AnnotationElement element = sdAnnotation1.elements[0]; assertEquals(root.packages.length, 1); assertEquals(sdPackage.comment, ""); assertEquals(sdPackage.name, "com.ownedthx.xmldoclet.simpledata"); assertNull(sdPackage.interfaces); assertNull(sdPackage.exceptions); assertEquals(sdPackage.annotations.length, 1); assertNull(sdPackage.enums); assertNull(sdPackage.classes); assertEquals(sdAnnotation1.comment, "Annotation3"); assertEquals(sdAnnotation1.name, Annotation3.class.getSimpleName()); assertEquals(sdAnnotation1.qualifiedName, Annotation3.class.getName()); assertEquals(sdAnnotation1.scope, "public"); assertNull(sdAnnotation1.annotationInstances); assertEquals(sdAnnotation1.elements.length, 1); assertTrue(sdAnnotation1.isIncluded); // test annotation element assertEquals(element.name, "id"); assertEquals(element.qualifiedName, "com.ownedthx.xmldoclet.simpledata.Annotation3.id"); assertEquals(element.type, "int"); assertEquals(element.defaultValue, Integer.toString(3)); } /** * testing an annotation with non-public definition */ public void testAnnotation4() { App app = new App(); String[] sourceFiles = new String[] { "./src/test/java/com/ownedthx/xmldoclet/simpledata/Annotation4.java" }; Root root = app.processSource( null, null, null, sourceFiles, null ); Package sdPackage = root.packages[0]; Annotation sdAnnotation1 = sdPackage.annotations[0]; assertEquals(root.packages.length, 1); assertEquals(sdPackage.comment, ""); assertEquals(sdPackage.name, "com.ownedthx.xmldoclet.simpledata"); assertNull(sdPackage.interfaces); assertNull(sdPackage.exceptions); assertEquals(sdPackage.annotations.length, 1); assertNull(sdPackage.enums); assertNull(sdPackage.classes); assertEquals(sdAnnotation1.comment, "Annotation4"); assertEquals(sdAnnotation1.name, "Annotation4"); assertEquals(sdAnnotation1.qualifiedName, "com.ownedthx.xmldoclet.simpledata.Annotation4"); assertEquals(sdAnnotation1.scope, "packageprivate"); assertNull(sdAnnotation1.annotationInstances); assertNull(sdAnnotation1.elements); assertTrue(sdAnnotation1.isIncluded); } }
/* * Copyright 2015 Sebastian Schmidl * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package sas.systems.imflux.functionaltest.session; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import org.junit.After; import org.junit.Test; import sas.systems.imflux.packet.DataPacket; import sas.systems.imflux.participant.RtpParticipant; import sas.systems.imflux.participant.RtpParticipantInfo; import sas.systems.imflux.session.rtp.RtpSession; import sas.systems.imflux.session.rtp.RtpSessionDataListener; import sas.systems.imflux.session.rtp.RtpSessionEventAdapter; import sas.systems.imflux.session.rtp.SingleParticipantSession; /** * Functional test for the class {@link SingleParticipantSession}. * * @author <a href="http://bruno.biasedbit.com/">Bruno de Carvalho</a> * @author <a href="https://github.com/CodeLionX">CodeLionX</a> */ public class SingleParticipantSessionFunctionalTest { final private int PT_H263 = 34; private SingleParticipantSession session1; private SingleParticipantSession session2; /** * Terminate both RTP sessions after each test-method. */ @After public void tearDown() { if (this.session1 != null) { this.session1.terminate(); } if (this.session2 != null) { this.session2.terminate(); } } /** * Creates both sessions and sends a {@link DataPacket} from each one to the other. * * @throws Exception */ @Test public void testSendAndReceive() throws Exception { final CountDownLatch latch = new CountDownLatch(2); // first session setup final RtpParticipant local1 = RtpParticipant.createReceiver(new RtpParticipantInfo(1), "127.0.0.1", 6000, 6001); final RtpParticipant remote1 = RtpParticipant.createReceiver(new RtpParticipantInfo(2), "127.0.0.1", 7000, 7001); this.session1 = new SingleParticipantSession("Session1", PT_H263, local1, remote1); assertTrue(this.session1.init()); this.session1.addDataListener(new RtpSessionDataListener() { @Override public void dataPacketReceived(RtpSession session, RtpParticipantInfo participant, DataPacket packet) { System.err.println("Session 1 received packet: " + packet + "(session: " + session.getId() + ")"); latch.countDown(); } }); // second session setup final RtpParticipant local2 = RtpParticipant.createReceiver(new RtpParticipantInfo(2), "127.0.0.1", 7000, 7001); final RtpParticipant remote2 = RtpParticipant.createReceiver(new RtpParticipantInfo(1), "127.0.0.1", 6000, 6001); this.session2 = new SingleParticipantSession("Session2", PT_H263, local2, remote2); assertTrue(this.session2.init()); this.session2.addDataListener(new RtpSessionDataListener() { @Override public void dataPacketReceived(RtpSession session, RtpParticipantInfo participant, DataPacket packet) { System.err.println("Session 2 received packet: " + packet + "(session: " + session.getId() + ")"); latch.countDown(); } }); // send test data DataPacket packet = new DataPacket(); packet.setData(new byte[]{0x45, 0x45, 0x45, 0x45}); assertTrue(this.session1.sendDataPacket(packet)); assertTrue(this.session2.sendDataPacket(packet)); try { assertTrue(latch.await(2000L, TimeUnit.MILLISECONDS)); } catch (Exception e) { fail("Exception caught: " + e.getClass().getSimpleName() + " - " + e.getMessage()); } } /** * Test for port updating of the remote participant if it was set wrong * @throws Exception */ @Test public void testSendAndReceiveUpdatingRemote() throws Exception { final CountDownLatch latch1 = new CountDownLatch(1); final CountDownLatch latch2 = new CountDownLatch(1); // first session setup final RtpParticipant local1 = RtpParticipant.createReceiver(new RtpParticipantInfo(1), "127.0.0.1", 6000, 6001); final RtpParticipant remote1 = RtpParticipant.createReceiver(new RtpParticipantInfo(2), "127.0.0.1", 7000, 7001); this.session1 = new SingleParticipantSession("Session1", PT_H263, local1, remote1); assertTrue(this.session1.init()); this.session1.addDataListener(new RtpSessionDataListener() { @Override public void dataPacketReceived(RtpSession session, RtpParticipantInfo participant, DataPacket packet) { System.err.println("Session 1 received packet: " + packet + "(session: " + session.getId() + ")"); latch1.countDown(); } }); // session 2 setup final RtpParticipant local2 = RtpParticipant.createReceiver(new RtpParticipantInfo(2), "127.0.0.1", 7000, 7001); final RtpParticipant remote2 = RtpParticipant.createReceiver(new RtpParticipantInfo(1), "127.0.0.1", 9000, 9001); // <-- note the other ports this.session2 = new SingleParticipantSession("Session2", PT_H263, local2, remote2); this.session2.setSendToLastOrigin(true); assertTrue(this.session2.init()); this.session2.addDataListener(new RtpSessionDataListener() { @Override public void dataPacketReceived(RtpSession session, RtpParticipantInfo participant, DataPacket packet) { System.err.println("Session 2 received packet: " + packet + "(session: " + session.getId() + ")"); latch2.countDown(); } }); // test initial configuration assertEquals("/127.0.0.1:7000", this.session1.getRemoteParticipant().getDataDestination().toString()); assertEquals("/127.0.0.1:9000", this.session2.getRemoteParticipant().getDataDestination().toString()); assertNull(this.session1.getRemoteParticipant().getLastDataOrigin()); assertNull(this.session2.getRemoteParticipant().getLastDataOrigin()); System.out.println("Initial address of remote participant: " + this.session2.getRemoteParticipant().getDataDestination().toString() + " (session2)"); System.out.println("Initial address of remote participant: " + this.session1.getRemoteParticipant().getDataDestination().toString() + " (session1)"); // send test data DataPacket packet = new DataPacket(); packet.setData(new byte[]{0x45, 0x45, 0x45, 0x45}); // this packet should not reach the target assertTrue(this.session2.sendDataPacket(packet)); try { assertFalse(latch1.await(2000L, TimeUnit.MILLISECONDS)); } catch (Exception e) { fail("Exception caught: " + e.getClass().getSimpleName() + " - " + e.getMessage()); } // send packet from session1 to allow resolving ip/port issue assertTrue(this.session1.sendDataPacket(packet)); try { assertTrue(latch2.await(2000L, TimeUnit.MILLISECONDS)); } catch (Exception e) { fail("Exception caught: " + e.getClass().getSimpleName() + " - " + e.getMessage()); } // test changed ip of second session assertNull(this.session1.getRemoteParticipant().getLastDataOrigin()); // should not change assertEquals("/127.0.0.1:6000", this.session2.getRemoteParticipant().getLastDataOrigin().toString()); // should change System.out.println("New address of remote participant: " + this.session2.getRemoteParticipant().getLastDataOrigin().toString() + " (session2) <-- should have changed!"); System.out.println("New address of remote participant: " + this.session1.getRemoteParticipant().getDataDestination().toString() + " (session1)"); // test if the packet now reaches its destination assertTrue(this.session2.sendDataPacket(packet)); try { assertTrue(latch1.await(2000L, TimeUnit.MILLISECONDS)); } catch (Exception e) { fail("Exception caught: " + e.getClass().getSimpleName() + " - " + e.getMessage()); } } /** * Test for ignoring packages from other remotes (with different SSRCs). * * @throws Exception */ @Test public void testIgnoreFromUnexpectedSsrc() throws Exception { final AtomicInteger counter = new AtomicInteger(); // setup session1 final RtpParticipant local1 = RtpParticipant.createReceiver("127.0.0.1", 6000, 6001); final RtpParticipant remote1 = RtpParticipant.createReceiver("127.0.0.1", 7000, 7001); this.session1 = new SingleParticipantSession("Session1", PT_H263, local1, remote1); assertTrue(this.session1.init()); this.session1.addDataListener(new RtpSessionDataListener() { @Override public void dataPacketReceived(RtpSession session, RtpParticipantInfo participant, DataPacket packet) { System.err.println("Session 1 received packet: " + packet + "(session: " + session.getId() + ")"); counter.incrementAndGet(); } }); // setup session2 final RtpParticipant local2 = RtpParticipant.createReceiver(new RtpParticipantInfo(2), "127.0.0.1", 7000, 7001); final RtpParticipant remote2 = RtpParticipant.createReceiver(new RtpParticipantInfo(1), "127.0.0.1", 6000, 6001); this.session2 = new SingleParticipantSession("Session2", PT_H263, local2, remote2) { @Override public boolean sendDataPacket(DataPacket packet) { if (!this.running.get()) { return false; } packet.setPayloadType(this.payloadType); // explicitly commented this one out to allow SSRC override! //packet.setSsrc(this.localParticipant.getSsrc()); packet.setSequenceNumber(this.sequence.incrementAndGet()); this.internalSendData(packet); return true; } }; assertTrue(this.session2.init()); // send testdata DataPacket packet = new DataPacket(); packet.setData(new byte[]{0x45, 0x45, 0x45, 0x45}); packet.setSsrc(local2.getSsrc()); assertTrue(this.session2.sendDataPacket(packet)); packet.setSsrc(local2.getSsrc() + 1); assertTrue(this.session2.sendDataPacket(packet)); Thread.sleep(2000L); // Make sure it was discarded // first one should go through, but second one has a wrong SSRC assertEquals(1, counter.get()); } /** * Tests the SSRC collision resolution algorithm. * @throws Exception */ @Test public void testCollisionResolution() throws Exception { final CountDownLatch latch = new CountDownLatch(1); final CountDownLatch latch2 = new CountDownLatch(1); // setup session1 final RtpParticipant local1 = RtpParticipant.createReceiver(new RtpParticipantInfo(2), "127.0.0.1", 6000, 6001); final RtpParticipant remote1 = RtpParticipant.createReceiver(new RtpParticipantInfo(1), "127.0.0.1", 7000, 7001); this.session1 = new SingleParticipantSession("Session1", PT_H263, local1, remote1); assertTrue(this.session1.init()); this.session1.addDataListener(new RtpSessionDataListener() { @Override public void dataPacketReceived(RtpSession session, RtpParticipantInfo participant, DataPacket packet) { System.err.println("Session 1 received packet: " + packet + "(session: " + session.getId() + ")"); } }); this.session1.addEventListener(new RtpSessionEventAdapter() { @Override public void resolvedSsrcConflict(RtpSession session, long oldSsrc, long newSsrc) { System.err.println("Resolved SSRC conflict, local SSRC was " + oldSsrc + " and now is " + newSsrc); latch.countDown(); } @Override public void sessionTerminated(RtpSession session, Throwable cause) { System.err.println("Session terminated: " + cause.getMessage()); } }); // setup session2 final RtpParticipant local2 = RtpParticipant.createReceiver(new RtpParticipantInfo(2), "127.0.0.1", 7000, 7001); final RtpParticipant remote2 = RtpParticipant.createReceiver(new RtpParticipantInfo(1), "127.0.0.1", 6000, 6001); this.session2 = new SingleParticipantSession("Session2", PT_H263, local2, remote2); assertTrue(this.session2.init()); this.session2.addDataListener(new RtpSessionDataListener() { @Override public void dataPacketReceived(RtpSession session, RtpParticipantInfo participant, DataPacket packet) { System.err.println("Session 2 received packet: " + packet + "(session: " + session.getId() + ")"); latch2.countDown(); } }); // send test data final long oldSsrc = this.session1.getLocalParticipant().getSsrc(); assertTrue(this.session2.sendData(new byte[]{0x45, 0x45, 0x45, 0x45}, 6969, false)); assertTrue(latch.await(1000L, TimeUnit.MILLISECONDS)); // Make sure SSRC was updated and send it to S1 to ensure it received the expected SSRC assertTrue(oldSsrc != this.session1.getLocalParticipant().getSsrc()); assertEquals(1, this.session2.getRemoteParticipant().getSsrc()); assertTrue(this.session1.sendData(new byte[]{0x45, 0x45, 0x45, 0x45}, 6969, false)); assertTrue(latch2.await(1000L, TimeUnit.MILLISECONDS)); assertEquals(this.session1.getLocalParticipant().getSsrc(), this.session2.getRemoteParticipant().getSsrc()); assertEquals(this.session2.getLocalParticipant().getSsrc(), this.session1.getRemoteParticipant().getSsrc()); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.daemon.impl; import com.intellij.codeInsight.daemon.impl.analysis.ErrorQuickFixProvider; import com.intellij.codeInsight.daemon.impl.analysis.HighlightInfoHolder; import com.intellij.codeInsight.highlighting.HighlightErrorFilter; import com.intellij.lang.LanguageUtil; import com.intellij.lang.annotation.Annotation; import com.intellij.lang.annotation.Annotator; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.FileViewProvider; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiErrorElement; import com.intellij.psi.PsiFile; import org.jetbrains.annotations.NotNull; import java.util.List; /** * @author yole */ class DefaultHighlightVisitor implements HighlightVisitor, DumbAware { private AnnotationHolderImpl myAnnotationHolder; private final HighlightErrorFilter[] myErrorFilters; private final Project myProject; private final boolean myHighlightErrorElements; private final boolean myRunAnnotators; private final DumbService myDumbService; private HighlightInfoHolder myHolder; private final boolean myBatchMode; private final CachedAnnotators myCachedAnnotators; @SuppressWarnings("UnusedDeclaration") DefaultHighlightVisitor(@NotNull Project project, @NotNull CachedAnnotators cachedAnnotators) { this(project, true, true, false, cachedAnnotators); } DefaultHighlightVisitor(@NotNull Project project, boolean highlightErrorElements, boolean runAnnotators, boolean batchMode, @NotNull CachedAnnotators cachedAnnotators) { myProject = project; myHighlightErrorElements = highlightErrorElements; myRunAnnotators = runAnnotators; myCachedAnnotators = cachedAnnotators; myErrorFilters = Extensions.getExtensions(HighlightErrorFilter.EP_NAME, project); myDumbService = DumbService.getInstance(project); myBatchMode = batchMode; } @Override public boolean suitableForFile(@NotNull final PsiFile file) { return true; } @Override public boolean analyze(@NotNull final PsiFile file, final boolean updateWholeFile, @NotNull final HighlightInfoHolder holder, @NotNull final Runnable action) { myHolder = holder; myAnnotationHolder = new AnnotationHolderImpl(holder.getAnnotationSession(), myBatchMode); try { action.run(); } finally { myAnnotationHolder.clear(); myAnnotationHolder = null; myHolder = null; } return true; } @Override public void visit(@NotNull PsiElement element) { if (element instanceof PsiErrorElement) { if (myHighlightErrorElements) visitErrorElement((PsiErrorElement)element); } else { if (myRunAnnotators) runAnnotators(element); } if (myAnnotationHolder.hasAnnotations()) { for (Annotation annotation : myAnnotationHolder) { myHolder.add(HighlightInfo.fromAnnotation(annotation, null, myBatchMode)); } myAnnotationHolder.clear(); } } @SuppressWarnings("CloneDoesntCallSuperClone") @Override @NotNull public HighlightVisitor clone() { return new DefaultHighlightVisitor(myProject, myHighlightErrorElements, myRunAnnotators, myBatchMode, myCachedAnnotators); } @Override public int order() { return 2; } private void runAnnotators(PsiElement element) { List<Annotator> annotators = myCachedAnnotators.get(element.getLanguage().getID()); if (annotators.isEmpty()) return; final boolean dumb = myDumbService.isDumb(); //noinspection ForLoopReplaceableByForEach for (int i = 0; i < annotators.size(); i++) { Annotator annotator = annotators.get(i); if (dumb && !DumbService.isDumbAware(annotator)) { continue; } ProgressManager.checkCanceled(); annotator.annotate(element, myAnnotationHolder); } } private void visitErrorElement(final PsiErrorElement element) { for(HighlightErrorFilter errorFilter: myErrorFilters) { if (!errorFilter.shouldHighlightErrorElement(element)) { return; } } HighlightInfo info = createErrorElementInfo(element); myHolder.add(info); } private static HighlightInfo createErrorElementInfo(@NotNull PsiErrorElement element) { TextRange range = element.getTextRange(); String errorDescription = element.getErrorDescription(); if (!range.isEmpty()) { HighlightInfo.Builder builder = HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(range); if (errorDescription != null) { builder.descriptionAndTooltip(errorDescription); } final HighlightInfo info = builder.create(); if (info != null) { for(ErrorQuickFixProvider provider: Extensions.getExtensions(ErrorQuickFixProvider.EP_NAME)) { provider.registerErrorQuickFix(element, info); } } return info; } int offset = range.getStartOffset(); PsiFile containingFile = element.getContainingFile(); int fileLength = containingFile.getTextLength(); FileViewProvider viewProvider = containingFile.getViewProvider(); PsiElement elementAtOffset = viewProvider.findElementAt(offset, LanguageUtil.getRootLanguage(element)); String text = elementAtOffset == null ? null : elementAtOffset.getText(); HighlightInfo info; if (offset < fileLength && text != null && !StringUtil.startsWithChar(text, '\n') && !StringUtil.startsWithChar(text, '\r')) { HighlightInfo.Builder builder = HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(offset, offset + 1); if (errorDescription != null) { builder.descriptionAndTooltip(errorDescription); } info = builder.create(); } else { int start; int end; if (offset > 0) { start = offset/* - 1*/; end = offset; } else { start = offset; end = offset < fileLength ? offset + 1 : offset; } HighlightInfo.Builder builder = HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR).range(element, start, end); if (errorDescription != null) { builder.descriptionAndTooltip(errorDescription); } builder.endOfLine(); info = builder.create(); } return info; } }
package com.droidkit.engine.event; import android.annotation.SuppressLint; import android.os.Handler; import android.os.Looper; import com.droidkit.actors.dispatch.RunnableDispatcher; import com.droidkit.engine.Engines; import com.droidkit.engine._internal.util.Utils; import com.droidkit.engine._internal.util.WeakEqualReference; import java.util.Collections; import java.util.Iterator; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; public class NotificationCenter { static { Engines.init(); } private static final String KEY_DELIMITER = "-"; private static final String TAG = "NotificationCenter"; private static final int NO_ID = Integer.MIN_VALUE; private volatile static NotificationCenter instance; public static NotificationCenter getInstance() { if (instance == null) { synchronized (NotificationCenter.class) { if (instance == null) { instance = new NotificationCenter(); } } } return instance; } @SuppressLint("NewApi") private NotificationCenter() { backgroundFireLoop = new RunnableDispatcher(1); listeners = Collections.newSetFromMap(new ConcurrentHashMap<WeakEqualReference<OnNotificationListenerContainer>, Boolean>()); states = new ConcurrentHashMap<Integer, State>(); statesValues = new ConcurrentHashMap<String, Object[]>(); } ///////////////////////////////////////////////////////////////////////////////////////////////////// private final Handler handler = new Handler(Looper.getMainLooper()); private final Object fireRemoveSyncObject = new Object(); /** * Loop for background event fires */ private final RunnableDispatcher backgroundFireLoop; /** * Active listeners storage */ private final Set<WeakEqualReference<OnNotificationListenerContainer>> listeners; /** * States storage */ private final ConcurrentHashMap<Integer, State> states; /** * States values in-memory storage */ private final ConcurrentHashMap<String, Object[]> statesValues; ///////////////////////////////////////////////////////////////////////////////////////////////////// public State registerState(final int eventType, final StateInitValue stateInitValue) { final State state = new State(eventType, stateInitValue); states.put(eventType, state); return state; } /** * Add new listener for specified eventType and eventId * You must keep a strong reference to your listener somewhere to prevent GC from removing it from memory * * @param eventType * @param eventId * @param notificationListener */ public void addListener(int eventType, int eventId, NotificationListener notificationListener) { final State state = states.get(eventType); if (state != null) { Object[] value = statesValues.get(getKeyForEvent(eventType, eventId)); if (value == null && state.getStateInitValue() != null) { value = state.getStateInitValue().initState(eventType, eventId); } if (notificationListener != null) { notificationListener.onNotification(eventType, eventId, value); } } if (notificationListener != null) { listeners.add(new WeakEqualReference<OnNotificationListenerContainer>( new OnNotificationListenerContainer(eventType, eventId, notificationListener, Utils.isUIThread()) )); } } /** * The same addListener(int, int, NotificationListener), but with eventId == NO_ID * * @param eventType * @param notificationListener */ public void addListener(int eventType, NotificationListener notificationListener) { addListener(eventType, NO_ID, notificationListener); } public void removeListener(NotificationListener notificationListener) { final Iterator<WeakEqualReference<OnNotificationListenerContainer>> it = listeners.iterator(); while (it.hasNext()) { final WeakEqualReference<OnNotificationListenerContainer> weakListenerContainer = it.next(); final OnNotificationListenerContainer listenerContainer = weakListenerContainer.get(); if (listenerContainer == null) { it.remove(); } else if (listenerContainer.listener == notificationListener) { synchronized (fireRemoveSyncObject) { it.remove(); listenerContainer.setDeleted(true); } //continue iterate after that, because we can have the same NotificationListener //for different eventType and eventId parameters } } } public void fireEvent(final int eventType, final int eventId, final Object[] args) { final Iterator<WeakEqualReference<OnNotificationListenerContainer>> it = listeners.iterator(); final boolean isUiThread = Utils.isUIThread(); final State state = states.get(eventType); if (state != null) { statesValues.put(getKeyForEvent(eventType, eventId), args); } while (it.hasNext()) { final WeakEqualReference<OnNotificationListenerContainer> weakListenerContainer = it.next(); final OnNotificationListenerContainer listenerContainer = weakListenerContainer.get(); if (listenerContainer == null) { it.remove(); } else if (listenerContainer.eventType == eventType && listenerContainer.eventId == eventId) { synchronized (fireRemoveSyncObject) { if (!listenerContainer.isDeleted()) { if (isUiThread && listenerContainer.wasAddedInUIThread) { listenerContainer.listener.onNotification(eventType, eventId, args); continue; } final Runnable fireEvent = new Runnable() { @Override public void run() { synchronized (fireRemoveSyncObject) { //double-check here if (!listenerContainer.isDeleted()) { listenerContainer.listener.onNotification(eventType, eventId, args); } } } }; if (listenerContainer.wasAddedInUIThread) { handler.post(fireEvent); } else { backgroundFireLoop.postAction(fireEvent); } } } } } } /** * The same as fireEvent(int, int, Object[]), but with args == null * * @param eventType * @param eventId */ public void fireEvent(final int eventType, final int eventId) { fireEvent(eventType, eventId, null); } /** * The same as fireEvent(int, int, Object[]), but with eventId == NO_ID * * @param eventType * @param args */ public void fireEvent(final int eventType, final Object[] args) { fireEvent(eventType, NO_ID, args); } /** * The same as fireEvent(int, int, Object[]), but with args == null and eventId == NO_ID * * @param eventType */ public void fireEvent(final int eventType) { fireEvent(eventType, NO_ID, null); } private String getKeyForEvent(final int eventType, final int eventId) { return eventType + KEY_DELIMITER + eventId; } /** * Container to store NotificationListener in listener's container */ private class OnNotificationListenerContainer { int eventType; int eventId; NotificationListener listener; boolean wasAddedInUIThread; volatile boolean deleted = false; private OnNotificationListenerContainer(int eventType, int eventId, NotificationListener listener, boolean wasAddedInUIThread) { this.eventType = eventType; this.eventId = eventId; this.listener = listener; this.wasAddedInUIThread = wasAddedInUIThread; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; OnNotificationListenerContainer that = (OnNotificationListenerContainer) o; if (eventId != that.eventId) return false; if (eventType != that.eventType) return false; if (!listener.equals(that.listener)) return false; return true; } @Override public int hashCode() { int result = eventType; result = 31 * result + eventId; result = 31 * result + listener.hashCode(); return result; } @Override public String toString() { return "OnNotificationListenerContainer{" + "eventType=" + eventType + ", eventId=" + eventId + ", listener=" + listener + '}'; } public boolean isDeleted() { return deleted; } public void setDeleted(final boolean deleted) { this.deleted = deleted; } } }
/******************************************************************************* * This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html. * * This file is a derivative of code released by the University of * California under the terms listed below. * * Refinement Analysis Tools is Copyright (c) 2007 The Regents of the * University of California (Regents). Provided that this notice and * the following two paragraphs are included in any distribution of * Refinement Analysis Tools or its derivative work, Regents agrees * not to assert any of Regents' copyright rights in Refinement * Analysis Tools against recipient for recipient's reproduction, * preparation of derivative works, public display, public * performance, distribution or sublicensing of Refinement Analysis * Tools and derivative works, in source code and object code form. * This agreement not to assert does not confer, by implication, * estoppel, or otherwise any license or rights in any intellectual * property of Regents, including, but not limited to, any patents * of Regents or Regents' employees. * * IN NO EVENT SHALL REGENTS BE LIABLE TO ANY PARTY FOR DIRECT, * INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, * INCLUDING LOST PROFITS, ARISING OUT OF THE USE OF THIS SOFTWARE * AND ITS DOCUMENTATION, EVEN IF REGENTS HAS BEEN ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * * REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE AND FURTHER DISCLAIMS ANY STATUTORY * WARRANTY OF NON-INFRINGEMENT. THE SOFTWARE AND ACCOMPANYING * DOCUMENTATION, IF ANY, PROVIDED HEREUNDER IS PROVIDED "AS * IS". REGENTS HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, * UPDATES, ENHANCEMENTS, OR MODIFICATIONS. */ package com.ibm.wala.demandpa.flowgraph; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.Map; import java.util.Set; import com.ibm.wala.cfg.ControlFlowGraph; import com.ibm.wala.cfg.IBasicBlock; import com.ibm.wala.classLoader.CallSiteReference; import com.ibm.wala.demandpa.util.MemoryAccessMap; import com.ibm.wala.ipa.callgraph.CGNode; import com.ibm.wala.ipa.callgraph.CallGraph; import com.ibm.wala.ipa.callgraph.propagation.HeapModel; import com.ibm.wala.ipa.callgraph.propagation.LocalPointerKey; import com.ibm.wala.ipa.callgraph.propagation.PointerKey; import com.ibm.wala.ipa.callgraph.propagation.ReturnValueKey; import com.ibm.wala.ipa.callgraph.propagation.SSAPropagationCallGraphBuilder; import com.ibm.wala.ipa.callgraph.propagation.cfa.CallerSiteContext; import com.ibm.wala.ipa.cha.IClassHierarchy; import com.ibm.wala.ssa.DefUse; import com.ibm.wala.ssa.IR; import com.ibm.wala.ssa.ISSABasicBlock; import com.ibm.wala.ssa.SSAAbstractInvokeInstruction; import com.ibm.wala.ssa.SSAInstruction; import com.ibm.wala.ssa.SSAInvokeInstruction; import com.ibm.wala.ssa.SSAPhiInstruction; import com.ibm.wala.util.collections.EmptyIterator; import com.ibm.wala.util.collections.HashMapFactory; import com.ibm.wala.util.collections.HashSetFactory; import com.ibm.wala.util.intset.BitVectorIntSet; import com.ibm.wala.util.ref.ReferenceCleanser; /** * A graph representing program flow, constructed method-by-method on demand */ public abstract class AbstractDemandFlowGraph extends AbstractFlowGraph { private final static boolean DEBUG = false; /** * Counter for wiping soft caches */ private static int wipeCount = 0; /** * node numbers of CGNodes we have already visited */ final BitVectorIntSet cgNodesVisited = new BitVectorIntSet(); /* * @see com.ibm.wala.demandpa.flowgraph.IFlowGraph#addSubgraphForNode(com.ibm.wala.ipa.callgraph.CGNode) */ @Override public void addSubgraphForNode(CGNode node) throws IllegalArgumentException { if (node == null) { throw new IllegalArgumentException("node == null"); } IR ir = node.getIR(); if (ir == null) { throw new IllegalArgumentException("no ir for node " + node); } int n = cg.getNumber(node); if (!cgNodesVisited.contains(n)) { cgNodesVisited.add(n); unconditionallyAddConstraintsFromNode(node, ir); addNodesForInvocations(node, ir); addNodesForParameters(node, ir); } } /* * @see com.ibm.wala.demandpa.flowgraph.IFlowGraph#hasSubgraphForNode(com.ibm.wala.ipa.callgraph.CGNode) */ @Override public boolean hasSubgraphForNode(CGNode node) { return cgNodesVisited.contains(cg.getNumber(node)); } /* * @see com.ibm.wala.demandpa.flowgraph.IFlowGraph#getParamSuccs(com.ibm.wala.ipa.callgraph.propagation.LocalPointerKey) */ public Iterator<PointerKeyAndCallSite> getParamSuccs(LocalPointerKey pk) { // TODO cache this result // TODO take some cgnode as parameter if we have calling context? CGNode cgNode = params.get(pk); if (cgNode == null) { return EmptyIterator.instance(); } int paramPos = pk.getValueNumber() - 1; ArrayList<PointerKeyAndCallSite> paramSuccs = new ArrayList<PointerKeyAndCallSite>(); // iterate over callers for (CGNode caller : cg) { // TODO optimization: we don't need to add the graph if null is passed // as the argument addSubgraphForNode(caller); IR ir = caller.getIR(); for (Iterator<CallSiteReference> iterator = ir.iterateCallSites(); iterator.hasNext();) { CallSiteReference call = iterator.next(); if (cg.getPossibleTargets(caller, call).contains(cgNode)) { SSAAbstractInvokeInstruction[] callInstrs = ir.getCalls(call); for (int i = 0; i < callInstrs.length; i++) { SSAAbstractInvokeInstruction callInstr = callInstrs[i]; PointerKey actualPk = heapModel.getPointerKeyForLocal(caller, callInstr.getUse(paramPos)); assert containsNode(actualPk); assert containsNode(pk); paramSuccs.add(new PointerKeyAndCallSite(actualPk, call)); } } } } return paramSuccs.iterator(); } /* * @see com.ibm.wala.demandpa.flowgraph.IFlowGraph#getParamPreds(com.ibm.wala.ipa.callgraph.propagation.LocalPointerKey) */ public Iterator<PointerKeyAndCallSite> getParamPreds(LocalPointerKey pk) { // TODO Set<SSAAbstractInvokeInstruction> instrs = callParams.get(pk); if (instrs == null) { return EmptyIterator.instance(); } ArrayList<PointerKeyAndCallSite> paramPreds = new ArrayList<PointerKeyAndCallSite>(); for (SSAAbstractInvokeInstruction callInstr : instrs) { for (int i = 0; i < callInstr.getNumberOfUses(); i++) { if (pk.getValueNumber() != callInstr.getUse(i)) continue; CallSiteReference callSiteRef = callInstr.getCallSite(); // get call targets Collection<CGNode> possibleCallees = cg.getPossibleTargets(pk.getNode(), callSiteRef); // construct graph for each target for (CGNode callee : possibleCallees) { addSubgraphForNode(callee); // TODO test this!!! // TODO test passing null as an argument PointerKey paramVal = heapModel.getPointerKeyForLocal(callee, i + 1); assert containsNode(paramVal); paramPreds.add(new PointerKeyAndCallSite(paramVal, callSiteRef)); } } } return paramPreds.iterator(); } /* * @see com.ibm.wala.demandpa.flowgraph.IFlowGraph#getReturnSuccs(com.ibm.wala.ipa.callgraph.propagation.LocalPointerKey) */ public Iterator<PointerKeyAndCallSite> getReturnSuccs(LocalPointerKey pk) { SSAAbstractInvokeInstruction callInstr = callDefs.get(pk); if (callInstr == null) return EmptyIterator.instance(); ArrayList<PointerKeyAndCallSite> returnSuccs = new ArrayList<PointerKeyAndCallSite>(); boolean isExceptional = pk.getValueNumber() == callInstr.getException(); CallSiteReference callSiteRef = callInstr.getCallSite(); // get call targets Collection<CGNode> possibleCallees = cg.getPossibleTargets(pk.getNode(), callSiteRef); // construct graph for each target for (CGNode callee : possibleCallees) { addSubgraphForNode(callee); PointerKey retVal = isExceptional ? heapModel.getPointerKeyForExceptionalReturnValue(callee) : heapModel .getPointerKeyForReturnValue(callee); assert containsNode(retVal); returnSuccs.add(new PointerKeyAndCallSite(retVal, callSiteRef)); } return returnSuccs.iterator(); } /* * @see com.ibm.wala.demandpa.flowgraph.IFlowGraph#getReturnPreds(com.ibm.wala.ipa.callgraph.propagation.LocalPointerKey) */ public Iterator<PointerKeyAndCallSite> getReturnPreds(LocalPointerKey pk) { CGNode cgNode = returns.get(pk); if (cgNode == null) { return EmptyIterator.instance(); } boolean isExceptional = pk == heapModel.getPointerKeyForExceptionalReturnValue(cgNode); ArrayList<PointerKeyAndCallSite> returnPreds = new ArrayList<PointerKeyAndCallSite>(); // iterate over callers for (CGNode caller : cg) { // TODO we don't need to add the graph if null is passed // as the argument addSubgraphForNode(caller); IR ir = caller.getIR(); for (Iterator<CallSiteReference> iterator = ir.iterateCallSites(); iterator.hasNext();) { CallSiteReference call = iterator.next(); if (cg.getPossibleTargets(caller, call).contains(cgNode)) { SSAAbstractInvokeInstruction[] callInstrs = ir.getCalls(call); for (int i = 0; i < callInstrs.length; i++) { SSAAbstractInvokeInstruction callInstr = callInstrs[i]; PointerKey returnPk = heapModel.getPointerKeyForLocal(caller, isExceptional ? callInstr.getException() : callInstr .getDef()); assert containsNode(returnPk); assert containsNode(pk); returnPreds.add(new PointerKeyAndCallSite(returnPk, call)); } } } } return returnPreds.iterator(); } protected abstract void addNodesForParameters(CGNode node, IR ir); protected void unconditionallyAddConstraintsFromNode(CGNode node, IR ir) { if (DEBUG) { System.err.println(("Adding constraints for CGNode " + node)); } if (SSAPropagationCallGraphBuilder.PERIODIC_WIPE_SOFT_CACHES) { wipeCount++; if (wipeCount >= SSAPropagationCallGraphBuilder.WIPE_SOFT_CACHE_INTERVAL) { wipeCount = 0; ReferenceCleanser.clearSoftCaches(); } } debugPrintIR(ir); if (ir == null) { return; } DefUse du = node.getDU(); addNodeInstructionConstraints(node, ir, du); addNodePassthruExceptionConstraints(node, ir); addNodeConstantConstraints(node, ir); } /** * Add pointer flow constraints based on instructions in a given node */ protected void addNodeInstructionConstraints(CGNode node, IR ir, DefUse du) { FlowStatementVisitor v = makeVisitor(node); ControlFlowGraph<SSAInstruction, ISSABasicBlock> cfg = ir.getControlFlowGraph(); for (ISSABasicBlock b : cfg) { addBlockInstructionConstraints(node, cfg, b, v); } } /** * Add constraints for a particular basic block. */ protected void addBlockInstructionConstraints(CGNode node, ControlFlowGraph<SSAInstruction, ISSABasicBlock> cfg, ISSABasicBlock b, FlowStatementVisitor v) { v.setBasicBlock(b); // visit each instruction in the basic block. for (Iterator<SSAInstruction> it = b.iterator(); it.hasNext();) { SSAInstruction s = it.next(); if (s != null) { s.visit(v); } } addPhiConstraints(node, cfg, b); } private void addPhiConstraints(CGNode node, ControlFlowGraph<SSAInstruction, ISSABasicBlock> cfg, ISSABasicBlock b) { // visit each phi instruction in each successor block for (Iterator<? extends IBasicBlock> iter = cfg.getSuccNodes(b); iter.hasNext();) { ISSABasicBlock sb = (ISSABasicBlock) iter.next(); if (sb.isExitBlock()) { // an optimization based on invariant that exit blocks should // have no // phis. continue; } int n = 0; // set n to be whichPred(this, sb); for (Iterator<? extends IBasicBlock> back = cfg.getPredNodes(sb); back.hasNext(); n++) { if (back.next() == b) { break; } } assert n < cfg.getPredNodeCount(sb); for (Iterator<SSAPhiInstruction> phis = sb.iteratePhis(); phis.hasNext();) { // Assertions.UNREACHABLE(); SSAPhiInstruction phi = phis.next(); if (phi == null) { continue; } PointerKey def = heapModel.getPointerKeyForLocal(node, phi.getDef()); if (phi.getUse(n) > 0) { PointerKey use = heapModel.getPointerKeyForLocal(node, phi.getUse(n)); addNode(def); addNode(use); addEdge(def, use, AssignLabel.noFilter()); } // } // } } } } protected abstract FlowStatementVisitor makeVisitor(CGNode node); private void debugPrintIR(IR ir) { if (DEBUG) { if (ir == null) { System.err.println("\n No statements\n"); } else { try { System.err.println(ir.toString()); } catch (Error e) { // TODO Auto-generated catch block e.printStackTrace(); } } } } final Map<CGNode, Set<CallerSiteContext>> callerCache = HashMapFactory.make(); @Override public Set<CallerSiteContext> getPotentialCallers(PointerKey formalPk) { CGNode callee = null; if (formalPk instanceof LocalPointerKey) { callee = ((LocalPointerKey) formalPk).getNode(); } else if (formalPk instanceof ReturnValueKey) { callee = ((ReturnValueKey) formalPk).getNode(); } else { throw new IllegalArgumentException("formalPk must represent a local"); } Set<CallerSiteContext> ret = callerCache.get(callee); if (ret == null) { ret = HashSetFactory.make(); for (Iterator<? extends CGNode> predNodes = cg.getPredNodes(callee); predNodes.hasNext();) { CGNode caller = predNodes.next(); for (Iterator<CallSiteReference> iterator = cg.getPossibleSites(caller, callee); iterator.hasNext();) { CallSiteReference call = iterator.next(); ret.add(new CallerSiteContext(caller, call)); } } callerCache.put(callee, ret); } return ret; } @Override public Set<CGNode> getPossibleTargets(CGNode node, CallSiteReference site, LocalPointerKey actualPk) { return cg.getPossibleTargets(node, site); } protected interface FlowStatementVisitor extends SSAInstruction.IVisitor { void setBasicBlock(ISSABasicBlock b); } public AbstractDemandFlowGraph(final CallGraph cg, final HeapModel heapModel, final MemoryAccessMap mam, final IClassHierarchy cha) { super(mam, heapModel, cha, cg); } }
/* * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. * * Copyright 1997-2010 Oracle and/or its affiliates. All rights reserved. * * Oracle and Java are registered trademarks of Oracle and/or its affiliates. * Other names may be trademarks of their respective owners. * * The contents of this file are subject to the terms of either the GNU * General Public License Version 2 only ("GPL") or the Common * Development and Distribution License("CDDL") (collectively, the * "License"). You may not use this file except in compliance with the * License. You can obtain a copy of the License at * http://www.netbeans.org/cddl-gplv2.html * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the * specific language governing permissions and limitations under the * License. When distributing the software, include this License Header * Notice in each file and include the License file at * nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the GPL Version 2 section of the License file that * accompanied this code. If applicable, add the following below the * License Header, with the fields enclosed by brackets [] replaced by * your own identifying information: * "Portions Copyrighted [year] [name of copyright owner]" * * Contributor(s): * * The Original Software is NetBeans. The Initial Developer of the Original * Software is Sun Microsystems, Inc. Portions Copyright 1997-2006 Sun * Microsystems, Inc. All Rights Reserved. * * If you wish your version of this file to be governed by only the CDDL * or only the GPL Version 2, indicate your decision by adding * "[Contributor] elects to include this software in this distribution * under the [CDDL or GPL Version 2] license." If you do not indicate a * single choice of license, a recipient has the option to distribute * your version of this file under either the CDDL, the GPL Version 2 or * to extend the choice of license to its licensees as provided above. * However, if you add GPL Version 2 code and therefore, elected the GPL * Version 2 license, then the option applies only if the new code is * made subject to such option by the copyright holder. */ package com.bearsoft.org.netbeans.modules.form.layoutsupport.delegates; import com.bearsoft.org.netbeans.modules.form.FormProperty; import com.bearsoft.org.netbeans.modules.form.RADComponent; import com.bearsoft.org.netbeans.modules.form.layoutsupport.*; import com.eas.client.forms.Orientation; import com.eas.client.forms.layouts.BoxLayout; import java.awt.*; import java.beans.*; import java.lang.reflect.InvocationTargetException; import javax.swing.JPanel; import org.openide.util.ImageUtilities; /** * Support class for BoxLayout. This is an example of support for layout manager * which is not a JavaBean - some general functionality from * AbstractLayoutSupport must be overridden and handled differently. * * @author Tran Duc Trung, Tomas Pavek */ // Expects ltr orientation of the designer public class BoxLayoutSupport extends AbstractLayoutSupport { /** * The icon for BoxLayout. */ private static final String iconURL = "com/bearsoft/org/netbeans/modules/form/beaninfo/swing/boxLayout.gif"; // NOI18N /** * The icon for BoxLayout. */ private static final String icon32URL = "com/bearsoft/org/netbeans/modules/form/beaninfo/swing/boxLayout32.gif"; // NOI18N private FormProperty<?>[] properties; /** * Gets the supported layout manager class - BoxLayout. * * @return the class supported by this delegate */ @Override public Class<?> getSupportedClass() { return BoxLayout.class; } /** * Provides an icon to be used for the layout node in Component Inspector. * Only 16x16 color icon is required. * * @param type is one of BeanInfo constants: ICON_COLOR_16x16, * ICON_COLOR_32x32, ICON_MONO_16x16, ICON_MONO_32x32 * @return icon to be displayed for node in Component Inspector */ @Override public Image getIcon(int type) { switch (type) { case BeanInfo.ICON_COLOR_16x16: case BeanInfo.ICON_MONO_16x16: return ImageUtilities.loadImage(iconURL); default: return ImageUtilities.loadImage(icon32URL); } } /** * This method is called after a property of the layout is changed by the * user. The delagate implementation may check whether the layout is valid * after the change and throw PropertyVetoException if the change should be * reverted. * * @param ev PropertyChangeEvent object describing the change * @throws java.beans.PropertyVetoException */ @Override public void acceptContainerLayoutChange(PropertyChangeEvent ev) throws PropertyVetoException { updateLayoutInstance(); super.acceptContainerLayoutChange(ev); } /** * This method calculates position (index) for a component dragged over a * container (or just for mouse cursor being moved over container, without * any component). * * @param container instance of a real container over/in which the component * is dragged * @param containerDelegate effective container delegate of the container * (for layout managers we always use container delegate instead of the * container) * @param component the real component being dragged; not needed here * @param index position (index) of the component in its current container; * not needed here * @param posInCont position of mouse in the container delegate * @param posInComp position of mouse in the dragged component; not needed * here * @return index corresponding to the position of the component in the * container */ @Override public int getNewIndex(Container container, Container containerDelegate, Component component, int index, Point posInCont, Point posInComp) { if (!(containerDelegate.getLayout() instanceof BoxLayout)) { return -1; } assistantParams = 0; Component[] components = containerDelegate.getComponents(); for (int i = 0; i < components.length; i++) { if (components[i] == component) { assistantParams--; continue; } Rectangle b = components[i].getBounds(); if (getOrientation() == Orientation.HORIZONTAL) { if (posInCont.x < b.x + b.width / 2) { assistantParams += i; return i; } } else { if (posInCont.y < b.y + b.height / 2) { assistantParams += i; return i; } } } assistantParams += components.length; return components.length; } private int assistantParams; @Override public String getAssistantContext() { return "boxLayout"; // NOI18N } @Override public Object[] getAssistantParams() { return new Object[]{assistantParams + 1}; } /** * This method paints a dragging feedback for a component dragged over a * container (or just for mouse cursor being moved over container, without * any component). * * @param container instance of a real container over/in which the component * is dragged * @param containerDelegate effective container delegate of the container * (for layout managers we always use container delegate instead of the * container) * @param component the real component being dragged, not needed here * @param newConstraints component layout constraints to be presented; not * used for BoxLayout * @param newIndex component's index position to be presented * @param g Graphics object for painting (with color and line style set) * @return whether any feedback was painted (true in this case) */ @Override public boolean paintDragFeedback(Container container, Container containerDelegate, Component component, LayoutConstraints<?> newConstraints, int newIndex, Graphics g) { if (containerDelegate.getLayout() instanceof BoxLayout) { Dimension containerSize = containerDelegate.getSize(); Component[] components = containerDelegate.getComponents(); Rectangle rect; if ((components.length == 0) || ((components.length == 1) && (components[0] == component))) { Insets ins = containerDelegate.getInsets(); rect = (getOrientation() == Orientation.HORIZONTAL) ? new Rectangle(ins.left, ins.top, 30, containerSize.height) : new Rectangle(ins.left, ins.top, containerSize.width, 20); } else if (newIndex < 0 || newIndex >= components.length) { Component comp = components[components.length - 1]; if (comp == component) { comp = components[components.length - 2]; } Rectangle b = comp.getBounds(); rect = (getOrientation() == Orientation.HORIZONTAL) ? new Rectangle(b.x + b.width - 10, b.y, 30, b.height) : new Rectangle(b.x, b.y + b.height - 10, b.width, 20); } else { Rectangle b = components[newIndex].getBounds(); rect = (getOrientation() == Orientation.HORIZONTAL) ? new Rectangle(b.x - 10, b.y, 30, b.height) : new Rectangle(b.x, b.y - 10, b.width, 20); } g.drawRect(rect.x, rect.y, rect.width, rect.height); return true; } else { return false; } } /** * Sets up the layout (without adding components) on a real container, * according to the internal metadata representation. This method must * override AbstractLayoutSupport because BoxLayout instance cannot be used * universally - new instance must be created for each container. * * @param container instance of a real container to be set * @param containerDelegate effective container delegate of the container; * for layout managers we always use container delegate instead of the * container */ @Override public void setLayoutToContainer(Container container, Container containerDelegate) { containerDelegate.setLayout(cloneLayoutInstance(container, containerDelegate)); } @Override public void addComponentsToContainer(Container container, Container containerDelegate, Component[] components, int index) { // Issue 63955 and JDK bug 4294758 ((LayoutManager2) containerDelegate.getLayout()).invalidateLayout(containerDelegate); /* int axis = ((BoxLayout) containerDelegate.getLayout()).getAxis(); for (Component comp : components) { if (comp instanceof JComponent) { comp.setPreferredSize(comp.getSize()); ((JComponent) comp).setAlignmentX(1.0f); ((JComponent) comp).setAlignmentY(1.0f); SwingFactory.prefToMaxForBox(axis, comp); } } */ super.addComponentsToContainer(container, containerDelegate, components, index); } // ------------ /** * Creates a default instance of LayoutManager (for internal use). This * method must override AbstractLayoutSupport because BoxLayout is not a * bean (so it cannot be created automatically). * * @return new instance of BoxLayout */ @Override protected LayoutManager createDefaultLayoutInstance() { return new BoxLayout(new JPanel(), BoxLayout.X_AXIS); } /** * Cloning method - creates a clone of the reference LayoutManager instance * (for external use). This method must override AbstractLayoutSupport * because BoxLayout is not a bean (so it cannot be cloned automatically). * * @param container instance of a real container in whose container delegate * the layout manager will be probably used * @param containerDelegate effective container delegate of the container * @return cloned instance of BoxLayout */ @Override protected LayoutManager cloneLayoutInstance(Container container, Container containerDelegate) { int axis = BoxLayout.X_AXIS; if (getOrientation() == Orientation.HORIZONTAL) { axis = BoxLayout.X_AXIS; } else if (getOrientation() == Orientation.VERTICAL) { axis = BoxLayout.Y_AXIS; } BoxLayout layout = (BoxLayout) getRadLayout().getBeanInstance(); return new BoxLayout(containerDelegate, axis, layout.getHgap(), layout.getVgap()); } /** * Since BoxLayout is not a bean, we must specify its properties explicitly. * This method is called from getPropertySets() implementation to obtain the * default property set for the layout (assuming there's only one property * set). So it woul be also possible to override (more generally) * getPropertySets() instead. * * @return array of properties of the layout manager */ @Override protected FormProperty<?>[] getProperties() { if (properties == null) { // we cannot use RADProperty because "axis" is not a real // bean property - we must create a special FormProperty properties = new FormProperty<?>[]{ new FormProperty<Integer>( "orientation", // NOI18N Integer.TYPE, getBundle().getString("PROP_axis"), // NOI18N getBundle().getString("HINT_axis")) // NOI18N { protected OrientationEditor editor; @Override public PropertyEditor getPropertyEditor() { if (editor == null) { editor = new OrientationEditor(); } return editor; } @Override public Integer getValue() { return getOrientation(); } @Override public void setValue(Integer value) { Integer oldValue = getValue(); setOrientation(value); propertyValueChanged(oldValue, value); } @Override public boolean supportsDefaultValue() { return true; } @Override public Integer getDefaultValue() { return Orientation.HORIZONTAL; } }, new FormProperty<Integer>( "hgap", // NOI18N Integer.TYPE, getBundle().getString("PROP_hgap"), // NOI18N getBundle().getString("HINT_hgap")) { @Override public Integer getValue() throws IllegalAccessException, InvocationTargetException { BoxLayout layout = (BoxLayout) getRadLayout().getBeanInstance(); return layout.getHgap(); } @Override public void setValue(Integer aValue) throws IllegalAccessException, IllegalArgumentException, InvocationTargetException { BoxLayout layout = (BoxLayout) getRadLayout().getBeanInstance(); int oldValue = layout.getHgap(); layout.setHgap(aValue != null ? aValue : 0); propertyValueChanged(oldValue, layout.getHgap()); } @Override public boolean supportsDefaultValue() { return true; } @Override public Integer getDefaultValue() { return 0; } }, // NOI18N new FormProperty<Integer>( "vgap", // NOI18N Integer.TYPE, getBundle().getString("PROP_vgap"), // NOI18N getBundle().getString("HINT_vgap")) { @Override public Integer getValue() throws IllegalAccessException, InvocationTargetException { BoxLayout layout = (BoxLayout) getRadLayout().getBeanInstance(); return layout.getVgap(); } @Override public void setValue(Integer aValue) throws IllegalAccessException, IllegalArgumentException, InvocationTargetException { BoxLayout layout = (BoxLayout) getRadLayout().getBeanInstance(); int oldValue = layout.getVgap(); layout.setVgap(aValue != null ? aValue : 0); propertyValueChanged(oldValue, layout.getVgap()); } @Override public boolean supportsDefaultValue() { return true; } @Override public Integer getDefaultValue() { return 0; } } // NOI18N }; } return properties; } private int getOrientation() { BoxLayout layout = (BoxLayout) getRadLayout().getBeanInstance(); int axis = layout.getAxis(); if (axis == BoxLayout.Y_AXIS || axis == BoxLayout.PAGE_AXIS) { return Orientation.VERTICAL; } else { return Orientation.HORIZONTAL; } } private void setOrientation(int aValue) { BoxLayout layout = (BoxLayout) getRadLayout().getBeanInstance(); if (aValue == Orientation.VERTICAL) { layout.setAxis(BoxLayout.Y_AXIS); } else { layout.setAxis(BoxLayout.X_AXIS); } } /** * Method to obtain just one propetry of given name. Must be override * AbstractLayoutSupport because alternative properties are used for * BoxLayout (see getProperties method) * * @param propName * @return layout property of given name */ @Override protected FormProperty<?> getProperty(String propName) { return "axis".equals(propName) ? getProperties()[0] : null; // NOI18N } public static final class OrientationEditor extends PropertyEditorSupport { private final String[] tags = { getBundle().getString("VALUE_axis_x"), // NOI18N getBundle().getString("VALUE_axis_y") // NOI18N }; private final Integer[] values = { Orientation.HORIZONTAL, Orientation.VERTICAL }; private final String[] javaInitStrings = { BoxLayout.class.getName() + ".X_AXIS", // NOI18N BoxLayout.class.getName() + ".Y_AXIS" // NOI18N }; @Override public String[] getTags() { return tags; } @Override public String getAsText() { Object value = getValue(); for (int i = 0; i < values.length; i++) { if (values[i].equals(value)) { return tags[i]; } } return null; } @Override public void setAsText(String str) { for (int i = 0; i < values.length; i++) { if (tags[i].equals(str)) { setValue(values[i]); break; } } } @Override public String getJavaInitializationString() { Object value = getValue(); for (int i = 0; i < values.length; i++) { if (values[i].equals(value)) { return javaInitStrings[i]; } } return null; } } @Override public void addComponents(RADComponent<?>[] newComps, LayoutConstraints<?>[] newConstraints, int index) { // no op here because box layout has no per-component constraints. } @Override public void removeComponent(int index) { // no op here because box layout has no per-component constraints. } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.ui.impl; import com.intellij.ide.DataManager; import com.intellij.ide.IdeEventQueue; import com.intellij.ide.impl.TypeSafeDataProviderAdapter; import com.intellij.ide.ui.AntialiasingType; import com.intellij.ide.ui.UISettings; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ex.ApplicationEx; import com.intellij.openapi.application.ex.ApplicationManagerEx; import com.intellij.openapi.application.impl.LaterInvocator; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.command.CommandProcessorEx; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.DialogWrapperDialog; import com.intellij.openapi.ui.DialogWrapperPeer; import com.intellij.openapi.ui.Queryable; import com.intellij.openapi.ui.popup.StackingPopupDispatcher; import com.intellij.openapi.util.*; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.openapi.wm.IdeFrame; import com.intellij.openapi.wm.WindowManager; import com.intellij.openapi.wm.ex.LayoutFocusTraversalPolicyExt; import com.intellij.openapi.wm.ex.WindowManagerEx; import com.intellij.openapi.wm.impl.IdeFrameImpl; import com.intellij.openapi.wm.impl.IdeGlassPaneImpl; import com.intellij.reference.SoftReference; import com.intellij.ui.*; import com.intellij.ui.components.JBLayeredPane; import com.intellij.ui.mac.foundation.Foundation; import com.intellij.ui.mac.foundation.ID; import com.intellij.ui.mac.foundation.MacUtil; import com.intellij.util.IJSwingUtilities; import com.intellij.util.ReflectionUtil; import com.intellij.util.ui.GraphicsUtil; import com.intellij.util.ui.JBInsets; import com.intellij.util.ui.OwnerOptional; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.awt.event.*; import java.awt.image.BufferStrategy; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.List; import java.util.Map; public class DialogWrapperPeerImpl extends DialogWrapperPeer implements FocusTrackbackProvider { private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.ui.DialogWrapper"); private final DialogWrapper myWrapper; private AbstractDialog myDialog; private boolean myCanBeParent = true; private WindowManagerEx myWindowManager; private final List<Runnable> myDisposeActions = new ArrayList<>(); private Project myProject; private final ActionCallback myWindowFocusedCallback = new ActionCallback("DialogFocusedCallback"); private final ActionCallback myTypeAheadDone = new ActionCallback("DialogTypeAheadDone"); private ActionCallback myTypeAheadCallback; protected DialogWrapperPeerImpl(@NotNull DialogWrapper wrapper, @Nullable Project project, boolean canBeParent, @NotNull DialogWrapper.IdeModalityType ideModalityType) { myWrapper = wrapper; myTypeAheadCallback = myWrapper.isTypeAheadEnabled() ? new ActionCallback() : null; myWindowManager = null; Application application = ApplicationManager.getApplication(); if (application != null && application.hasComponent(WindowManager.class)) { myWindowManager = (WindowManagerEx)WindowManager.getInstance(); } Window window = null; if (myWindowManager != null) { if (project == null) { //noinspection deprecation project = CommonDataKeys.PROJECT.getData(DataManager.getInstance().getDataContext()); } myProject = project; window = myWindowManager.suggestParentWindow(project); if (window == null) { Window focusedWindow = myWindowManager.getMostRecentFocusedWindow(); if (focusedWindow instanceof IdeFrameImpl) { window = focusedWindow; } } if (window == null) { IdeFrame[] frames = myWindowManager.getAllProjectFrames(); for (IdeFrame frame : frames) { if (frame instanceof IdeFrameImpl && ((IdeFrameImpl)frame).isActive()) { window = (IdeFrameImpl)frame; break; } } } } Window owner; if (window != null) { owner = window; } else { if (!isHeadless()) { owner = JOptionPane.getRootFrame(); } else { owner = null; } } createDialog(owner, canBeParent, ideModalityType); } /** * Creates modal {@code DialogWrapper}. The currently active window will be the dialog's parent. * * @param project parent window for the dialog will be calculated based on focused window for the * specified {@code project}. This parameter can be {@code null}. In this case parent window * will be suggested based on current focused window. * @param canBeParent specifies whether the dialog can be parent for other windows. This parameter is used * by {@code WindowManager}. */ protected DialogWrapperPeerImpl(@NotNull DialogWrapper wrapper, @Nullable Project project, boolean canBeParent) { this(wrapper, project, canBeParent, DialogWrapper.IdeModalityType.IDE); } protected DialogWrapperPeerImpl(@NotNull DialogWrapper wrapper, boolean canBeParent) { this(wrapper, (Project)null, canBeParent); } @Override public boolean isHeadless() { return isHeadlessEnv(); } @Override public Object[] getCurrentModalEntities() { return LaterInvocator.getCurrentModalEntities(); } public static boolean isHeadlessEnv() { Application app = ApplicationManager.getApplication(); if (app == null) return GraphicsEnvironment.isHeadless(); return app.isUnitTestMode() || app.isHeadlessEnvironment(); } /** * @param parent parent component which is used to calculate heavy weight window ancestor. * {@code parent} cannot be {@code null} and must be showing. */ protected DialogWrapperPeerImpl(@NotNull DialogWrapper wrapper, @NotNull Component parent, final boolean canBeParent) { myWrapper = wrapper; myWindowManager = null; Application application = ApplicationManager.getApplication(); if (application != null && application.hasComponent(WindowManager.class)) { myWindowManager = (WindowManagerEx)WindowManager.getInstance(); } OwnerOptional.fromComponent(parent).ifWindow(window -> { createDialog(window, canBeParent); }); } public DialogWrapperPeerImpl(@NotNull final DialogWrapper wrapper,final Window owner, final boolean canBeParent, final DialogWrapper.IdeModalityType ideModalityType ) { myWrapper = wrapper; myWindowManager = null; Application application = ApplicationManager.getApplication(); if (application != null && application.hasComponent(WindowManager.class)) { myWindowManager = (WindowManagerEx)WindowManager.getInstance(); } createDialog(owner, canBeParent); if (!isHeadless()) { Dialog.ModalityType modalityType = DialogWrapper.IdeModalityType.IDE.toAwtModality(); if (Registry.is("ide.perProjectModality")) { modalityType = ideModalityType.toAwtModality(); } myDialog.setModalityType(modalityType); } } /** @see DialogWrapper#DialogWrapper(boolean, boolean) */ @Deprecated public DialogWrapperPeerImpl(@NotNull DialogWrapper wrapper, final boolean canBeParent, final boolean applicationModalIfPossible) { this(wrapper, null, canBeParent, applicationModalIfPossible); } @Deprecated public DialogWrapperPeerImpl(@NotNull DialogWrapper wrapper,final Window owner, final boolean canBeParent, final boolean applicationModalIfPossible) { this(wrapper, owner, canBeParent, applicationModalIfPossible ? DialogWrapper.IdeModalityType.IDE : DialogWrapper.IdeModalityType.PROJECT); } @Override public void setUndecorated(boolean undecorated) { myDialog.setUndecorated(undecorated); } @Override public void addMouseListener(MouseListener listener) { myDialog.addMouseListener(listener); } @Override public void addMouseListener(MouseMotionListener listener) { myDialog.addMouseMotionListener(listener); } @Override public void addKeyListener(KeyListener listener) { myDialog.addKeyListener(listener); } private void createDialog(@Nullable Window owner, boolean canBeParent, @NotNull DialogWrapper.IdeModalityType ideModalityType) { if (isHeadless()) { myDialog = new HeadlessDialog(myWrapper); } else { myDialog = new MyDialog(owner, myWrapper, myProject, myWindowFocusedCallback, myTypeAheadDone, myTypeAheadCallback); UIUtil.suppressFocusStealing(getWindow()); myDialog.setModalityType(ideModalityType.toAwtModality()); myCanBeParent = canBeParent; } } private void createDialog(@Nullable Window owner, boolean canBeParent) { createDialog(owner, canBeParent, DialogWrapper.IdeModalityType.IDE); } @Override public void toFront() { myDialog.toFront(); } @Override public void toBack() { myDialog.toBack(); } @Override @SuppressWarnings("SSBasedInspection") protected void dispose() { LOG.assertTrue(EventQueue.isDispatchThread(), "Access is allowed from event dispatch thread only"); for (Runnable runnable : myDisposeActions) { runnable.run(); } myDisposeActions.clear(); Runnable disposer = () -> { Disposer.dispose(myDialog); myProject = null; SwingUtilities.invokeLater(() -> { if (myDialog != null && myDialog.getRootPane() != null) { myDialog.remove(myDialog.getRootPane()); } }); }; UIUtil.invokeLaterIfNeeded(disposer); } private boolean isProgressDialog() { return myWrapper.isModalProgress(); } @Override @Nullable public Container getContentPane() { return getRootPane() != null ? myDialog.getContentPane() : null; } /** * @see javax.swing.JDialog#validate */ @Override public void validate() { myDialog.validate(); } /** * @see javax.swing.JDialog#repaint */ @Override public void repaint() { myDialog.repaint(); } @Override public Window getOwner() { return myDialog.getOwner(); } @Override public Window getWindow() { return myDialog.getWindow(); } @Override public JRootPane getRootPane() { return myDialog.getRootPane(); } @Override public Dimension getSize() { return myDialog.getSize(); } @Override public String getTitle() { return myDialog.getTitle(); } /** * @see java.awt.Window#pack */ @Override public void pack() { myDialog.pack(); } @Override public void setAppIcons() { AppUIUtil.updateWindowIcon(getWindow()); } @Override public Dimension getPreferredSize() { return myDialog.getPreferredSize(); } @Override public void setModal(boolean modal) { myDialog.setModal(modal); } @Override public boolean isModal() { return myDialog.isModal(); } @Override public boolean isVisible() { return myDialog.isVisible(); } @Override public boolean isShowing() { return myDialog.isShowing(); } @Override public void setSize(int width, int height) { myDialog.setSize(width, height); } @Override public void setTitle(String title) { myDialog.setTitle(title); } @Override public void isResizable() { myDialog.isResizable(); } @Override public void setResizable(boolean resizable) { myDialog.setResizable(resizable); } @NotNull @Override public Point getLocation() { return myDialog.getLocation(); } @Override public void setLocation(@NotNull Point p) { myDialog.setLocation(p); } @Override public void setLocation(int x, int y) { myDialog.setLocation(x, y); } @Override public ActionCallback show() { LOG.assertTrue(EventQueue.isDispatchThread(), "Access is allowed from event dispatch thread only"); if (myTypeAheadCallback != null) { IdeFocusManager.getInstance(myProject).typeAheadUntil(myTypeAheadCallback); } LOG.assertTrue(EventQueue.isDispatchThread(), "Access is allowed from event dispatch thread only"); final ActionCallback result = new ActionCallback(); final AnCancelAction anCancelAction = new AnCancelAction(); final JRootPane rootPane = getRootPane(); UIUtil.decorateFrame(rootPane); anCancelAction.registerCustomShortcutSet(CommonShortcuts.ESCAPE, rootPane); myDisposeActions.add(() -> anCancelAction.unregisterCustomShortcutSet(rootPane)); if (!myCanBeParent && myWindowManager != null) { myWindowManager.doNotSuggestAsParent(myDialog.getWindow()); } final CommandProcessorEx commandProcessor = ApplicationManager.getApplication() != null ? (CommandProcessorEx)CommandProcessor.getInstance() : null; final boolean appStarted = commandProcessor != null; boolean changeModalityState = appStarted && myDialog.isModal() && !isProgressDialog(); // ProgressWindow starts a modality state itself Project project = myProject; if (changeModalityState) { commandProcessor.enterModal(); if (Registry.is("ide.perProjectModality")) { LaterInvocator.enterModal(project, myDialog.getWindow()); } else { LaterInvocator.enterModal(myDialog); } } if (appStarted) { hidePopupsIfNeeded(); } try { myDialog.show(); } finally { if (changeModalityState) { commandProcessor.leaveModal(); if (Registry.is("ide.perProjectModality")) { LaterInvocator.leaveModal(project, myDialog.getWindow()); } else { LaterInvocator.leaveModal(myDialog); } } myDialog.getFocusManager().doWhenFocusSettlesDown(result.createSetDoneRunnable()); } return result; } //hopefully this whole code will go away private void hidePopupsIfNeeded() { if (!SystemInfo.isMac) return; StackingPopupDispatcher.getInstance().hidePersistentPopups(); myDisposeActions.add(() -> StackingPopupDispatcher.getInstance().restorePersistentPopups()); } @Override public FocusTrackback getFocusTrackback() { return myDialog.getFocusTrackback(); } private class AnCancelAction extends AnAction implements DumbAware { @Override public void update(AnActionEvent e) { Component focusOwner = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner(); e.getPresentation().setEnabled(false); if (focusOwner instanceof JComponent && SpeedSearchBase.hasActiveSpeedSearch((JComponent)focusOwner)) { return; } if (StackingPopupDispatcher.getInstance().isPopupFocused()) return; JTree tree = UIUtil.getParentOfType(JTree.class, focusOwner); JTable table = UIUtil.getParentOfType(JTable.class, focusOwner); if (tree != null || table != null) { if (hasNoEditingTreesOrTablesUpward(focusOwner)) { e.getPresentation().setEnabled(true); } } } private boolean hasNoEditingTreesOrTablesUpward(Component comp) { while (comp != null) { if (isEditingTreeOrTable(comp)) return false; comp = comp.getParent(); } return true; } private boolean isEditingTreeOrTable(Component comp) { if (comp instanceof JTree) { return ((JTree)comp).isEditing(); } else if (comp instanceof JTable) { return ((JTable)comp).isEditing(); } return false; } @Override public void actionPerformed(AnActionEvent e) { myWrapper.doCancelAction(e.getInputEvent()); } } private static class MyDialog extends JDialog implements DialogWrapperDialog, DataProvider, FocusTrackback.Provider, Queryable, AbstractDialog { private final WeakReference<DialogWrapper> myDialogWrapper; /** * Initial size of the dialog. When the dialog is being closed and * current size of the dialog is not equals to the initial size then the * current (changed) size is stored in the {@code DimensionService}. */ private Dimension myInitialSize; private String myDimensionServiceKey; private boolean myOpened = false; private boolean myActivated = false; private FocusTrackback myFocusTrackback; private MyDialog.MyWindowListener myWindowListener; private final WeakReference<Project> myProject; private final ActionCallback myFocusedCallback; private final ActionCallback myTypeAheadDone; private final ActionCallback myTypeAheadCallback; public MyDialog(Window owner, DialogWrapper dialogWrapper, Project project, @NotNull ActionCallback focused, @NotNull ActionCallback typeAheadDone, ActionCallback typeAheadCallback) { super(owner); myDialogWrapper = new WeakReference<>(dialogWrapper); myProject = project != null ? new WeakReference<>(project) : null; setFocusTraversalPolicy(new LayoutFocusTraversalPolicyExt() { @Override protected boolean accept(Component aComponent) { if (UIUtil.isFocusProxy(aComponent)) return false; return super.accept(aComponent); } }); myFocusedCallback = focused; myTypeAheadDone = typeAheadDone; myTypeAheadCallback = typeAheadCallback; final long typeAhead = getDialogWrapper().getTypeAheadTimeoutMs(); if (typeAhead <= 0) { myTypeAheadDone.setDone(); } setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE); myWindowListener = new MyWindowListener(); addWindowListener(myWindowListener); } @Override public JDialog getWindow() { return this; } @Override public void putInfo(@NotNull Map<String, String> info) { info.put("dialog", getTitle()); } @Override public FocusTrackback getFocusTrackback() { return myFocusTrackback; } @Override public DialogWrapper getDialogWrapper() { return myDialogWrapper.get(); } @Override public void centerInParent() { setLocationRelativeTo(getOwner()); } @Override public Object getData(String dataId) { final DialogWrapper wrapper = myDialogWrapper.get(); if (wrapper instanceof DataProvider) { return ((DataProvider)wrapper).getData(dataId); } if (wrapper instanceof TypeSafeDataProvider) { TypeSafeDataProviderAdapter adapter = new TypeSafeDataProviderAdapter((TypeSafeDataProvider)wrapper); return adapter.getData(dataId); } return null; } @Override public void setSize(int width, int height) { _setSizeForLocation(width, height, null); } private void _setSizeForLocation(int width, int height, @Nullable Point initial) { Point location = initial != null ? initial : getLocation(); Rectangle rect = new Rectangle(location.x, location.y, width, height); ScreenUtil.fitToScreen(rect); if (initial != null || location.x != rect.x || location.y != rect.y) { setLocation(rect.x, rect.y); } super.setSize(rect.width, rect.height); } @Override public void setBounds(int x, int y, int width, int height) { Rectangle rect = new Rectangle(x, y, width, height); ScreenUtil.fitToScreen(rect); super.setBounds(rect.x, rect.y, rect.width, rect.height); } @Override public void setBounds(Rectangle r) { ScreenUtil.fitToScreen(r); super.setBounds(r); } @NotNull @Override protected JRootPane createRootPane() { return new DialogRootPane(); } @Override @SuppressWarnings("deprecation") public void show() { myFocusTrackback = new FocusTrackback(getDialogWrapper(), getParent(), true); final DialogWrapper dialogWrapper = getDialogWrapper(); boolean isAutoAdjustable = dialogWrapper.isAutoAdjustable(); Point location = null; if (isAutoAdjustable) { pack(); Dimension packedSize = getSize(); Dimension minSize = getMinimumSize(); setSize(Math.max(packedSize.width, minSize.width), Math.max(packedSize.height, minSize.height)); setSize((int)(getWidth() * dialogWrapper.getHorizontalStretch()), (int)(getHeight() * dialogWrapper.getVerticalStretch())); // Restore dialog's size and location myDimensionServiceKey = dialogWrapper.getDimensionKey(); if (myDimensionServiceKey != null) { final Project projectGuess = CommonDataKeys.PROJECT.getData(DataManager.getInstance().getDataContext(this)); location = DimensionService.getInstance().getLocation(myDimensionServiceKey, projectGuess); Dimension size = DimensionService.getInstance().getSize(myDimensionServiceKey, projectGuess); if (size != null) { myInitialSize = new Dimension(size); _setSizeForLocation(myInitialSize.width, myInitialSize.height, location); } } if (myInitialSize == null) { myInitialSize = getSize(); } } if (location == null) { location = dialogWrapper.getInitialLocation(); } if (location != null) { setLocation(location); } else { setLocationRelativeTo(getOwner()); } if (isAutoAdjustable) { final Rectangle bounds = getBounds(); ScreenUtil.fitToScreen(bounds); setBounds(bounds); } if (Registry.is("actionSystem.fixLostTyping")) { final IdeEventQueue queue = IdeEventQueue.getInstance(); if (queue != null) { queue.getKeyEventDispatcher().resetState(); } } // Workaround for switching workspaces on dialog show if (SystemInfo.isMac && myProject != null && Registry.is("ide.mac.fix.dialog.showing") && !dialogWrapper.isModalProgress()) { final IdeFrame frame = WindowManager.getInstance().getIdeFrame(myProject.get()); AppIcon.getInstance().requestFocus(frame); } setBackground(UIUtil.getPanelBackground()); final ApplicationEx app = ApplicationManagerEx.getApplicationEx(); if (app != null && !app.isLoaded() && Splash.BOUNDS != null) { final Point loc = getLocation(); loc.y = Splash.BOUNDS.y + Splash.BOUNDS.height; setLocation(loc); } super.show(); } @Nullable private Project getProject() { return SoftReference.dereference(myProject); } @NotNull @Override public IdeFocusManager getFocusManager() { Project project = getProject(); if (project != null && !project.isDisposed()) { return IdeFocusManager.getInstance(project); } else { return IdeFocusManager.findInstance(); } } private void disposeFocusTrackbackIfNoChildWindowFocused(@Nullable IdeFocusManager focusManager) { if (myFocusTrackback == null) return; final DialogWrapper wrapper = myDialogWrapper.get(); if (wrapper == null || !wrapper.isShowing()) { myFocusTrackback.dispose(); return; } if (focusManager != null) { final Component c = focusManager.getFocusedDescendantFor(wrapper.getContentPane()); if (c == null) { myFocusTrackback.dispose(); } } else { final Component owner = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner(); if (owner == null || !SwingUtilities.isDescendingFrom(owner, wrapper.getContentPane())) { myFocusTrackback.dispose(); } } } @Override @SuppressWarnings("deprecation") public void hide() { super.hide(); if (myFocusTrackback != null && !(myFocusTrackback.isScheduledForRestore() || myFocusTrackback.isWillBeScheduledForRestore())) { myFocusTrackback.setWillBeScheduledForRestore(); IdeFocusManager mgr = getFocusManager(); Runnable r = () -> { if (myFocusTrackback != null) myFocusTrackback.restoreFocus(); myFocusTrackback = null; }; mgr.doWhenFocusSettlesDown(r); } } @Override public void dispose() { if (isShowing()) { hide(); } if (myWindowListener != null) { myWindowListener.saveSize(); removeWindowListener(myWindowListener); myWindowListener = null; } DialogWrapper.cleanupWindowListeners(this); if (myFocusTrackback != null && !(myFocusTrackback.isScheduledForRestore() || myFocusTrackback.isWillBeScheduledForRestore())) { myFocusTrackback.dispose(); myFocusTrackback = null; } final BufferStrategy strategy = getBufferStrategy(); if (strategy != null) { strategy.dispose(); } super.dispose(); removeAll(); DialogWrapper.cleanupRootPane(rootPane); rootPane = null; // http://bugs.sun.com/view_bug.do?bug_id=6614056 try { synchronized (getTreeLock()) { List<?> list = ReflectionUtil.getStaticFieldValue(Dialog.class, List.class, "modalDialogs"); list.remove(this); } } catch (final Exception ignored) { } } @Override public Component getMostRecentFocusOwner() { if (!myOpened) { final DialogWrapper wrapper = getDialogWrapper(); if (wrapper != null) { JComponent toFocus = wrapper.getPreferredFocusedComponent(); if (toFocus != null) { return toFocus; } } } return super.getMostRecentFocusOwner(); } @Override public void paint(Graphics g) { if (!SystemInfo.isMac || UIUtil.isUnderAquaLookAndFeel()) { // avoid rendering problems with non-aqua (alloy) LaFs under mac // actually, it's a bad idea to globally enable this for dialog graphics since renderers, for example, may not // inherit graphics so rendering hints won't be applied and trees or lists may render ugly. UISettings.setupAntialiasing(g); } super.paint(g); } @SuppressWarnings("SSBasedInspection") private class MyWindowListener extends WindowAdapter { @Override public void windowClosing(WindowEvent e) { DialogWrapper dialogWrapper = getDialogWrapper(); if (dialogWrapper.shouldCloseOnCross()) { dialogWrapper.doCancelAction(e); } } @Override public void windowClosed(WindowEvent e) { saveSize(); } public void saveSize() { if (myDimensionServiceKey != null && myInitialSize != null && myOpened) { // myInitialSize can be null only if dialog is disposed before first showing final Project projectGuess = CommonDataKeys.PROJECT.getData(DataManager.getInstance().getDataContext(MyDialog.this)); // Save location Point location = getLocation(); DimensionService.getInstance().setLocation(myDimensionServiceKey, location, projectGuess); // Save size Dimension size = getSize(); if (!myInitialSize.equals(size)) { DimensionService.getInstance().setSize(myDimensionServiceKey, size, projectGuess); } myOpened = false; } } @Override public void windowOpened(final WindowEvent e) { if (SystemInfo.isMacOSLion) { Window window = e.getWindow(); if (window instanceof Dialog) { ID _native = MacUtil.findWindowForTitle(((Dialog)window).getTitle()); if (_native != null && _native.intValue() > 0) { // see MacMainFrameDecorator // NSCollectionBehaviorFullScreenAuxiliary = 1 << 8 Foundation.invoke(_native, "setCollectionBehavior:", 1 << 8); } } } SwingUtilities.invokeLater(() -> { myOpened = true; final DialogWrapper activeWrapper = getActiveWrapper(); for (JComponent c : UIUtil.uiTraverser(e.getWindow()).filter(JComponent.class)) { GraphicsUtil.setAntialiasingType(c, AntialiasingType.getAAHintForSwingComponent()); } if (activeWrapper == null) { myFocusedCallback.setRejected(); myTypeAheadDone.setRejected(); } }); } @Override public void windowActivated(final WindowEvent e) { if (myFocusTrackback != null) { DialogWrapper wrapper = getDialogWrapper(); if (wrapper != null) { myFocusTrackback.cleanParentWindow(); myFocusTrackback.registerFocusComponent(new FocusTrackback.ComponentQuery() { @Override public Component getComponent() { return wrapper.getPreferredFocusedComponent(); } }); } } SwingUtilities.invokeLater(() -> { final DialogWrapper wrapper = getActiveWrapper(); if (wrapper == null && !myFocusedCallback.isProcessed()) { myFocusedCallback.setRejected(); myTypeAheadDone.setRejected(); return; } if (myActivated) { return; } myActivated = true; JComponent toFocus = wrapper == null ? null : wrapper.getPreferredFocusedComponent(); if (getRootPane() != null && toFocus == null) { toFocus = getRootPane().getDefaultButton(); } if (getRootPane() != null) { IJSwingUtilities.moveMousePointerOn(getRootPane().getDefaultButton()); } setupSelectionOnPreferredComponent(toFocus); if (toFocus != null) { if (isShowing() && isActive()) { getFocusManager().requestFocus(toFocus, true); notifyFocused(wrapper); } } else { if (isShowing()) { notifyFocused(wrapper); } } if (myTypeAheadCallback != null) { myTypeAheadCallback.setDone(); } }); } @Override public void windowDeactivated(WindowEvent e) { if (!isModal()) { Ref<IdeFocusManager> focusManager = new Ref<>(null); Project project = getProject(); if (project != null && !project.isDisposed()) { focusManager.set(getFocusManager()); focusManager.get().doWhenFocusSettlesDown(() -> disposeFocusTrackbackIfNoChildWindowFocused(focusManager.get())); } else { disposeFocusTrackbackIfNoChildWindowFocused(focusManager.get()); } } } private void notifyFocused(DialogWrapper wrapper) { myFocusedCallback.setDone(); final long timeout = wrapper.getTypeAheadTimeoutMs(); if (timeout > 0) { SimpleTimer.getInstance().setUp(new EdtRunnable() { @Override public void runEdt() { myTypeAheadDone.setDone(); } }, timeout); } } private DialogWrapper getActiveWrapper() { DialogWrapper activeWrapper = getDialogWrapper(); if (activeWrapper == null || !activeWrapper.isShowing()) { return null; } return activeWrapper; } } private class DialogRootPane extends JRootPane implements DataProvider { private final boolean myGlassPaneIsSet; private Dimension myLastMinimumSize; private DialogRootPane() { setGlassPane(new IdeGlassPaneImpl(this)); myGlassPaneIsSet = true; putClientProperty("DIALOG_ROOT_PANE", true); } @NotNull @Override protected JLayeredPane createLayeredPane() { JLayeredPane p = new JBLayeredPane(); p.setName(this.getName()+".layeredPane"); return p; } @Override public void validate() { super.validate(); DialogWrapper wrapper = myDialogWrapper.get(); if (wrapper != null && wrapper.isAutoAdjustable()) { Window window = wrapper.getWindow(); if (window != null) { Dimension size = getMinimumSize(); if (!(size == null ? myLastMinimumSize == null : size.equals(myLastMinimumSize))) { // update window minimum size only if root pane minimum size is changed if (size == null) { myLastMinimumSize = null; } else { myLastMinimumSize = new Dimension(size); JBInsets.addTo(size, window.getInsets()); Rectangle screen = ScreenUtil.getScreenRectangle(window); if (size.width > screen.width || size.height > screen.height) { Application application = ApplicationManager.getApplication(); if (application != null && application.isInternal()) { LOG.warn("minimum size " + size.width + "x" + size.height + " is bigger than screen " + screen.width + "x" + screen.height); } if (size.width > screen.width) size.width = screen.width; if (size.height > screen.height) size.height = screen.height; } } window.setMinimumSize(size); } } } } @Override public void setGlassPane(final Component glass) { if (myGlassPaneIsSet) { LOG.warn("Setting of glass pane for DialogWrapper is prohibited", new Exception()); return; } super.setGlassPane(glass); } @Override public void setContentPane(Container contentPane) { super.setContentPane(contentPane); if (contentPane != null) { contentPane.addMouseMotionListener(new MouseMotionAdapter() {}); // listen to mouse motino events for a11y } } @Override public Object getData(@NonNls String dataId) { final DialogWrapper wrapper = myDialogWrapper.get(); return wrapper != null && PlatformDataKeys.UI_DISPOSABLE.is(dataId) ? wrapper.getDisposable() : null; } } } private static void setupSelectionOnPreferredComponent(final JComponent component) { if (component instanceof JTextField) { JTextField field = (JTextField)component; String text = field.getText(); if (text != null && field.getClientProperty(HAVE_INITIAL_SELECTION) == null) { field.setSelectionStart(0); field.setSelectionEnd(text.length()); } } else if (component instanceof JComboBox) { JComboBox combobox = (JComboBox)component; combobox.getEditor().selectAll(); } } @Override public void setContentPane(JComponent content) { myDialog.setContentPane(content); } @Override public void centerInParent() { myDialog.centerInParent(); } public void setAutoRequestFocus(boolean b) { UIUtil.setAutoRequestFocus((JDialog)myDialog, b); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.tools.rumen; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.StringTokenizer; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.mapred.TaskStatus; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.TaskType; import org.apache.hadoop.mapreduce.jobhistory.AMStartedEvent; import org.apache.hadoop.mapreduce.jobhistory.HistoryEvent; import org.apache.hadoop.mapreduce.jobhistory.JobFinished; import org.apache.hadoop.mapreduce.jobhistory.JobFinishedEvent; import org.apache.hadoop.mapreduce.jobhistory.JobInfoChangeEvent; import org.apache.hadoop.mapreduce.jobhistory.JobInitedEvent; import org.apache.hadoop.mapreduce.jobhistory.JobPriorityChangeEvent; import org.apache.hadoop.mapreduce.jobhistory.JobStatusChangedEvent; import org.apache.hadoop.mapreduce.jobhistory.JobSubmittedEvent; import org.apache.hadoop.mapreduce.jobhistory.JobQueueChangeEvent; import org.apache.hadoop.mapreduce.jobhistory.JobUnsuccessfulCompletionEvent; import org.apache.hadoop.mapreduce.jobhistory.MapAttemptFinished; import org.apache.hadoop.mapreduce.jobhistory.MapAttemptFinishedEvent; import org.apache.hadoop.mapreduce.jobhistory.NormalizedResourceEvent; import org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinished; import org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinishedEvent; import org.apache.hadoop.mapreduce.jobhistory.TaskAttemptFinished; import org.apache.hadoop.mapreduce.jobhistory.TaskAttemptFinishedEvent; import org.apache.hadoop.mapreduce.jobhistory.TaskAttemptStartedEvent; import org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion; import org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletionEvent; import org.apache.hadoop.mapreduce.jobhistory.TaskFailed; import org.apache.hadoop.mapreduce.jobhistory.TaskFailedEvent; import org.apache.hadoop.mapreduce.jobhistory.TaskFinished; import org.apache.hadoop.mapreduce.jobhistory.TaskFinishedEvent; import org.apache.hadoop.mapreduce.jobhistory.TaskStartedEvent; import org.apache.hadoop.mapreduce.jobhistory.TaskUpdatedEvent; import org.apache.hadoop.tools.rumen.Pre21JobHistoryConstants.Values; import org.apache.hadoop.util.StringUtils; /** * {@link JobBuilder} builds one job. It processes a sequence of * {@link HistoryEvent}s. */ public class JobBuilder { private static final long BYTES_IN_MEG = StringUtils.TraditionalBinaryPrefix.string2long("1m"); static final private Log LOG = LogFactory.getLog(JobBuilder.class); private String jobID; private boolean finalized = false; private ParsedJob result = new ParsedJob(); private Map<String, ParsedTask> mapTasks = new HashMap<String, ParsedTask>(); private Map<String, ParsedTask> reduceTasks = new HashMap<String, ParsedTask>(); private Map<String, ParsedTask> otherTasks = new HashMap<String, ParsedTask>(); private Map<String, ParsedTaskAttempt> attempts = new HashMap<String, ParsedTaskAttempt>(); private Map<ParsedHost, ParsedHost> allHosts = new HashMap<ParsedHost, ParsedHost>(); private org.apache.hadoop.mapreduce.jobhistory.JhCounters EMPTY_COUNTERS = new org.apache.hadoop.mapreduce.jobhistory.JhCounters(); /** * The number of splits a task can have, before we ignore them all. */ private final static int MAXIMUM_PREFERRED_LOCATIONS = 25; private int[] attemptTimesPercentiles = null; // Use this to search within the java options to get heap sizes. // The heap size number is in Capturing Group 1. // The heap size order-of-magnitude suffix is in Capturing Group 2 private static final Pattern heapPattern = Pattern.compile("-Xmx([0-9]+[kKmMgGtT])"); private Properties jobConfigurationParameters = null; public JobBuilder(String jobID) { this.jobID = jobID; } public String getJobID() { return jobID; } { if (attemptTimesPercentiles == null) { attemptTimesPercentiles = new int[19]; for (int i = 0; i < 19; ++i) { attemptTimesPercentiles[i] = (i + 1) * 5; } } } /** * Process one {@link HistoryEvent} * * @param event * The {@link HistoryEvent} to be processed. */ public void process(HistoryEvent event) { if (finalized) { throw new IllegalStateException( "JobBuilder.process(HistoryEvent event) called after ParsedJob built"); } // these are in lexicographical order by class name. if (event instanceof AMStartedEvent) { // ignore this event as Rumen currently doesnt need this event //TODO Enhance Rumen to process this event and capture restarts return; } else if (event instanceof NormalizedResourceEvent) { // Log an warn message as NormalizedResourceEvent shouldn't be written. LOG.warn("NormalizedResourceEvent should be ignored in history server."); } else if (event instanceof JobFinishedEvent) { processJobFinishedEvent((JobFinishedEvent) event); } else if (event instanceof JobInfoChangeEvent) { processJobInfoChangeEvent((JobInfoChangeEvent) event); } else if (event instanceof JobInitedEvent) { processJobInitedEvent((JobInitedEvent) event); } else if (event instanceof JobPriorityChangeEvent) { processJobPriorityChangeEvent((JobPriorityChangeEvent) event); } else if (event instanceof JobQueueChangeEvent) { processJobQueueChangeEvent((JobQueueChangeEvent) event); } else if (event instanceof JobStatusChangedEvent) { processJobStatusChangedEvent((JobStatusChangedEvent) event); } else if (event instanceof JobSubmittedEvent) { processJobSubmittedEvent((JobSubmittedEvent) event); } else if (event instanceof JobUnsuccessfulCompletionEvent) { processJobUnsuccessfulCompletionEvent((JobUnsuccessfulCompletionEvent) event); } else if (event instanceof MapAttemptFinishedEvent) { processMapAttemptFinishedEvent((MapAttemptFinishedEvent) event); } else if (event instanceof ReduceAttemptFinishedEvent) { processReduceAttemptFinishedEvent((ReduceAttemptFinishedEvent) event); } else if (event instanceof TaskAttemptFinishedEvent) { processTaskAttemptFinishedEvent((TaskAttemptFinishedEvent) event); } else if (event instanceof TaskAttemptStartedEvent) { processTaskAttemptStartedEvent((TaskAttemptStartedEvent) event); } else if (event instanceof TaskAttemptUnsuccessfulCompletionEvent) { processTaskAttemptUnsuccessfulCompletionEvent((TaskAttemptUnsuccessfulCompletionEvent) event); } else if (event instanceof TaskFailedEvent) { processTaskFailedEvent((TaskFailedEvent) event); } else if (event instanceof TaskFinishedEvent) { processTaskFinishedEvent((TaskFinishedEvent) event); } else if (event instanceof TaskStartedEvent) { processTaskStartedEvent((TaskStartedEvent) event); } else if (event instanceof TaskUpdatedEvent) { processTaskUpdatedEvent((TaskUpdatedEvent) event); } else throw new IllegalArgumentException( "JobBuilder.process(HistoryEvent): unknown event type:" + event.getEventType() + " for event:" + event); } static String extract(Properties conf, String[] names, String defaultValue) { for (String name : names) { String result = conf.getProperty(name); if (result != null) { return result; } } return defaultValue; } private Integer extractMegabytes(Properties conf, String[] names) { String javaOptions = extract(conf, names, null); if (javaOptions == null) { return null; } Matcher matcher = heapPattern.matcher(javaOptions); Integer heapMegabytes = null; while (matcher.find()) { String heapSize = matcher.group(1); heapMegabytes = ((int) (StringUtils.TraditionalBinaryPrefix.string2long(heapSize) / BYTES_IN_MEG)); } return heapMegabytes; } private void maybeSetHeapMegabytes(Integer megabytes) { if (megabytes != null) { result.setHeapMegabytes(megabytes); } } private void maybeSetJobMapMB(Integer megabytes) { if (megabytes != null) { result.setJobMapMB(megabytes); } } private void maybeSetJobReduceMB(Integer megabytes) { if (megabytes != null) { result.setJobReduceMB(megabytes); } } /** * Process a collection of JobConf {@link Properties}. We do not restrict it * to be called once. It is okay to process a conf before, during or after the * events. * * @param conf * The job conf properties to be added. */ public void process(Properties conf) { if (finalized) { throw new IllegalStateException( "JobBuilder.process(Properties conf) called after ParsedJob built"); } //TODO remove this once the deprecate APIs in LoggedJob are removed String queue = extract(conf, JobConfPropertyNames.QUEUE_NAMES .getCandidates(), null); // set the queue name if existing if (queue != null) { result.setQueue(queue); } result.setJobName(extract(conf, JobConfPropertyNames.JOB_NAMES .getCandidates(), null)); maybeSetHeapMegabytes(extractMegabytes(conf, JobConfPropertyNames.TASK_JAVA_OPTS_S.getCandidates())); maybeSetJobMapMB(extractMegabytes(conf, JobConfPropertyNames.MAP_JAVA_OPTS_S.getCandidates())); maybeSetJobReduceMB(extractMegabytes(conf, JobConfPropertyNames.REDUCE_JAVA_OPTS_S.getCandidates())); this.jobConfigurationParameters = conf; } /** * Request the builder to build the final object. Once called, the * {@link JobBuilder} would accept no more events or job-conf properties. * * @return Parsed {@link ParsedJob} object. */ public ParsedJob build() { // The main job here is to build CDFs and manage the conf finalized = true; // set the conf if (jobConfigurationParameters != null) { result.setJobProperties(jobConfigurationParameters); } // initialize all the per-job statistics gathering places Histogram[] successfulMapAttemptTimes = new Histogram[ParsedHost.numberOfDistances() + 1]; for (int i = 0; i < successfulMapAttemptTimes.length; ++i) { successfulMapAttemptTimes[i] = new Histogram(); } Histogram successfulReduceAttemptTimes = new Histogram(); Histogram[] failedMapAttemptTimes = new Histogram[ParsedHost.numberOfDistances() + 1]; for (int i = 0; i < failedMapAttemptTimes.length; ++i) { failedMapAttemptTimes[i] = new Histogram(); } Histogram failedReduceAttemptTimes = new Histogram(); Histogram successfulNthMapperAttempts = new Histogram(); // Histogram successfulNthReducerAttempts = new Histogram(); // Histogram mapperLocality = new Histogram(); for (LoggedTask task : result.getMapTasks()) { for (LoggedTaskAttempt attempt : task.getAttempts()) { int distance = successfulMapAttemptTimes.length - 1; Long runtime = null; if (attempt.getFinishTime() > 0 && attempt.getStartTime() > 0) { runtime = attempt.getFinishTime() - attempt.getStartTime(); if (attempt.getResult() == Values.SUCCESS) { LoggedLocation host = attempt.getLocation(); List<LoggedLocation> locs = task.getPreferredLocations(); if (host != null && locs != null) { for (LoggedLocation loc : locs) { ParsedHost preferedLoc = new ParsedHost(loc); distance = Math.min(distance, preferedLoc .distance(new ParsedHost(host))); } // mapperLocality.enter(distance); } if (attempt.getStartTime() > 0 && attempt.getFinishTime() > 0) { if (runtime != null) { successfulMapAttemptTimes[distance].enter(runtime); } } TaskAttemptID attemptID = attempt.getAttemptID(); if (attemptID != null) { successfulNthMapperAttempts.enter(attemptID.getId()); } } else { if (attempt.getResult() == Pre21JobHistoryConstants.Values.FAILED) { if (runtime != null) { failedMapAttemptTimes[distance].enter(runtime); } } } } } } for (LoggedTask task : result.getReduceTasks()) { for (LoggedTaskAttempt attempt : task.getAttempts()) { Long runtime = attempt.getFinishTime() - attempt.getStartTime(); if (attempt.getFinishTime() > 0 && attempt.getStartTime() > 0) { runtime = attempt.getFinishTime() - attempt.getStartTime(); } if (attempt.getResult() == Values.SUCCESS) { if (runtime != null) { successfulReduceAttemptTimes.enter(runtime); } } else if (attempt.getResult() == Pre21JobHistoryConstants.Values.FAILED) { failedReduceAttemptTimes.enter(runtime); } } } result.setFailedMapAttemptCDFs(mapCDFArrayList(failedMapAttemptTimes)); LoggedDiscreteCDF failedReduce = new LoggedDiscreteCDF(); failedReduce.setCDF(failedReduceAttemptTimes, attemptTimesPercentiles, 100); result.setFailedReduceAttemptCDF(failedReduce); result .setSuccessfulMapAttemptCDFs(mapCDFArrayList(successfulMapAttemptTimes)); LoggedDiscreteCDF succReduce = new LoggedDiscreteCDF(); succReduce.setCDF(successfulReduceAttemptTimes, attemptTimesPercentiles, 100); result.setSuccessfulReduceAttemptCDF(succReduce); long totalSuccessfulAttempts = 0L; long maxTriesToSucceed = 0L; for (Map.Entry<Long, Long> ent : successfulNthMapperAttempts) { totalSuccessfulAttempts += ent.getValue(); maxTriesToSucceed = Math.max(maxTriesToSucceed, ent.getKey()); } if (totalSuccessfulAttempts > 0L) { double[] successAfterI = new double[(int) maxTriesToSucceed + 1]; for (int i = 0; i < successAfterI.length; ++i) { successAfterI[i] = 0.0D; } for (Map.Entry<Long, Long> ent : successfulNthMapperAttempts) { successAfterI[ent.getKey().intValue()] = ((double) ent.getValue()) / totalSuccessfulAttempts; } result.setMapperTriesToSucceed(successAfterI); } else { result.setMapperTriesToSucceed(null); } return result; } private ArrayList<LoggedDiscreteCDF> mapCDFArrayList(Histogram[] data) { ArrayList<LoggedDiscreteCDF> result = new ArrayList<LoggedDiscreteCDF>(); for (Histogram hist : data) { LoggedDiscreteCDF discCDF = new LoggedDiscreteCDF(); discCDF.setCDF(hist, attemptTimesPercentiles, 100); result.add(discCDF); } return result; } private static Values getPre21Value(String name) { if (name.equalsIgnoreCase("JOB_CLEANUP")) { return Values.CLEANUP; } if (name.equalsIgnoreCase("JOB_SETUP")) { return Values.SETUP; } // Note that pre-21, the task state of a successful task was logged as // SUCCESS while from 21 onwards, its logged as SUCCEEDED. if (name.equalsIgnoreCase(TaskStatus.State.SUCCEEDED.toString())) { return Values.SUCCESS; } return Values.valueOf(StringUtils.toUpperCase(name)); } private void processTaskUpdatedEvent(TaskUpdatedEvent event) { ParsedTask task = getTask(event.getTaskId().toString()); if (task == null) { return; } task.setFinishTime(event.getFinishTime()); } private void processTaskStartedEvent(TaskStartedEvent event) { ParsedTask task = getOrMakeTask(event.getTaskType(), event.getTaskId().toString(), true); task.setStartTime(event.getStartTime()); task.setPreferredLocations(preferredLocationForSplits(event .getSplitLocations())); } private void processTaskFinishedEvent(TaskFinishedEvent event) { ParsedTask task = getOrMakeTask(event.getTaskType(), event.getTaskId().toString(), false); if (task == null) { return; } task.setFinishTime(event.getFinishTime()); task.setTaskStatus(getPre21Value(event.getTaskStatus())); task.incorporateCounters(((TaskFinished) event.getDatum()).counters); } private void processTaskFailedEvent(TaskFailedEvent event) { ParsedTask task = getOrMakeTask(event.getTaskType(), event.getTaskId().toString(), false); if (task == null) { return; } task.setFinishTime(event.getFinishTime()); task.setTaskStatus(getPre21Value(event.getTaskStatus())); TaskFailed t = (TaskFailed)(event.getDatum()); task.putDiagnosticInfo(t.error.toString()); task.putFailedDueToAttemptId(t.failedDueToAttempt.toString()); org.apache.hadoop.mapreduce.jobhistory.JhCounters counters = ((TaskFailed) event.getDatum()).counters; task.incorporateCounters( counters == null ? EMPTY_COUNTERS : counters); } private void processTaskAttemptUnsuccessfulCompletionEvent( TaskAttemptUnsuccessfulCompletionEvent event) { ParsedTaskAttempt attempt = getOrMakeTaskAttempt(event.getTaskType(), event.getTaskId().toString(), event.getTaskAttemptId().toString()); if (attempt == null) { return; } attempt.setResult(getPre21Value(event.getTaskStatus())); attempt.setHostName(event.getHostname(), event.getRackName()); ParsedHost pHost = getAndRecordParsedHost(event.getRackName(), event.getHostname()); if (pHost != null) { attempt.setLocation(pHost.makeLoggedLocation()); } attempt.setFinishTime(event.getFinishTime()); org.apache.hadoop.mapreduce.jobhistory.JhCounters counters = ((TaskAttemptUnsuccessfulCompletion) event.getDatum()).counters; attempt.incorporateCounters( counters == null ? EMPTY_COUNTERS : counters); attempt.arraySetClockSplits(event.getClockSplits()); attempt.arraySetCpuUsages(event.getCpuUsages()); attempt.arraySetVMemKbytes(event.getVMemKbytes()); attempt.arraySetPhysMemKbytes(event.getPhysMemKbytes()); TaskAttemptUnsuccessfulCompletion t = (TaskAttemptUnsuccessfulCompletion) (event.getDatum()); attempt.putDiagnosticInfo(t.error.toString()); } private void processTaskAttemptStartedEvent(TaskAttemptStartedEvent event) { ParsedTaskAttempt attempt = getOrMakeTaskAttempt(event.getTaskType(), event.getTaskId().toString(), event.getTaskAttemptId().toString()); if (attempt == null) { return; } attempt.setStartTime(event.getStartTime()); attempt.putTrackerName(event.getTrackerName()); attempt.putHttpPort(event.getHttpPort()); attempt.putShufflePort(event.getShufflePort()); } private void processTaskAttemptFinishedEvent(TaskAttemptFinishedEvent event) { ParsedTaskAttempt attempt = getOrMakeTaskAttempt(event.getTaskType(), event.getTaskId().toString(), event.getAttemptId().toString()); if (attempt == null) { return; } attempt.setResult(getPre21Value(event.getTaskStatus())); ParsedHost pHost = getAndRecordParsedHost(event.getRackName(), event.getHostname()); if (pHost != null) { attempt.setLocation(pHost.makeLoggedLocation()); } attempt.setFinishTime(event.getFinishTime()); attempt .incorporateCounters(((TaskAttemptFinished) event.getDatum()).counters); } private void processReduceAttemptFinishedEvent( ReduceAttemptFinishedEvent event) { ParsedTaskAttempt attempt = getOrMakeTaskAttempt(event.getTaskType(), event.getTaskId().toString(), event.getAttemptId().toString()); if (attempt == null) { return; } attempt.setResult(getPre21Value(event.getTaskStatus())); attempt.setHostName(event.getHostname(), event.getRackName()); ParsedHost pHost = getAndRecordParsedHost(event.getRackName(), event.getHostname()); if (pHost != null) { attempt.setLocation(pHost.makeLoggedLocation()); } // XXX There may be redundant location info available in the event. // We might consider extracting it from this event. Currently this // is redundant, but making this will add future-proofing. attempt.setFinishTime(event.getFinishTime()); attempt.setShuffleFinished(event.getShuffleFinishTime()); attempt.setSortFinished(event.getSortFinishTime()); attempt .incorporateCounters(((ReduceAttemptFinished) event.getDatum()).counters); attempt.arraySetClockSplits(event.getClockSplits()); attempt.arraySetCpuUsages(event.getCpuUsages()); attempt.arraySetVMemKbytes(event.getVMemKbytes()); attempt.arraySetPhysMemKbytes(event.getPhysMemKbytes()); } private void processMapAttemptFinishedEvent(MapAttemptFinishedEvent event) { ParsedTaskAttempt attempt = getOrMakeTaskAttempt(event.getTaskType(), event.getTaskId().toString(), event.getAttemptId().toString()); if (attempt == null) { return; } attempt.setResult(getPre21Value(event.getTaskStatus())); attempt.setHostName(event.getHostname(), event.getRackName()); ParsedHost pHost = getAndRecordParsedHost(event.getRackName(), event.getHostname()); if (pHost != null) { attempt.setLocation(pHost.makeLoggedLocation()); } // XXX There may be redundant location info available in the event. // We might consider extracting it from this event. Currently this // is redundant, but making this will add future-proofing. attempt.setFinishTime(event.getFinishTime()); attempt .incorporateCounters(((MapAttemptFinished) event.getDatum()).counters); attempt.arraySetClockSplits(event.getClockSplits()); attempt.arraySetCpuUsages(event.getCpuUsages()); attempt.arraySetVMemKbytes(event.getVMemKbytes()); attempt.arraySetPhysMemKbytes(event.getPhysMemKbytes()); } private void processJobUnsuccessfulCompletionEvent( JobUnsuccessfulCompletionEvent event) { result.setOutcome(Pre21JobHistoryConstants.Values .valueOf(event.getStatus())); result.setFinishTime(event.getFinishTime()); // No counters in JobUnsuccessfulCompletionEvent } private void processJobSubmittedEvent(JobSubmittedEvent event) { result.setJobID(event.getJobId().toString()); result.setJobName(event.getJobName()); result.setUser(event.getUserName()); result.setSubmitTime(event.getSubmitTime()); result.putJobConfPath(event.getJobConfPath()); result.putJobAcls(event.getJobAcls()); // set the queue name if existing String queue = event.getJobQueueName(); if (queue != null) { result.setQueue(queue); } } private void processJobQueueChangeEvent(JobQueueChangeEvent event) { // set the queue name if existing String queue = event.getJobQueueName(); if (queue != null) { result.setQueue(queue); } } private void processJobStatusChangedEvent(JobStatusChangedEvent event) { result.setOutcome(Pre21JobHistoryConstants.Values .valueOf(event.getStatus())); } private void processJobPriorityChangeEvent(JobPriorityChangeEvent event) { result.setPriority(LoggedJob.JobPriority.valueOf(event.getPriority() .toString())); } private void processJobInitedEvent(JobInitedEvent event) { result.setLaunchTime(event.getLaunchTime()); result.setTotalMaps(event.getTotalMaps()); result.setTotalReduces(event.getTotalReduces()); } private void processJobInfoChangeEvent(JobInfoChangeEvent event) { result.setLaunchTime(event.getLaunchTime()); } private void processJobFinishedEvent(JobFinishedEvent event) { result.setFinishTime(event.getFinishTime()); result.setJobID(jobID); result.setOutcome(Values.SUCCESS); JobFinished job = (JobFinished)event.getDatum(); Map<String, Long> countersMap = JobHistoryUtils.extractCounters(job.totalCounters); result.putTotalCounters(countersMap); countersMap = JobHistoryUtils.extractCounters(job.mapCounters); result.putMapCounters(countersMap); countersMap = JobHistoryUtils.extractCounters(job.reduceCounters); result.putReduceCounters(countersMap); } private ParsedTask getTask(String taskIDname) { ParsedTask result = mapTasks.get(taskIDname); if (result != null) { return result; } result = reduceTasks.get(taskIDname); if (result != null) { return result; } return otherTasks.get(taskIDname); } /** * @param type * the task type * @param taskIDname * the task ID name, as a string * @param allowCreate * if true, we can create a task. * @return */ private ParsedTask getOrMakeTask(TaskType type, String taskIDname, boolean allowCreate) { Map<String, ParsedTask> taskMap = otherTasks; List<LoggedTask> tasks = this.result.getOtherTasks(); switch (type) { case MAP: taskMap = mapTasks; tasks = this.result.getMapTasks(); break; case REDUCE: taskMap = reduceTasks; tasks = this.result.getReduceTasks(); break; default: // no code } ParsedTask result = taskMap.get(taskIDname); if (result == null && allowCreate) { result = new ParsedTask(); result.setTaskType(getPre21Value(type.toString())); result.setTaskID(taskIDname); taskMap.put(taskIDname, result); tasks.add(result); } return result; } private ParsedTaskAttempt getOrMakeTaskAttempt(TaskType type, String taskIDName, String taskAttemptName) { ParsedTask task = getOrMakeTask(type, taskIDName, false); ParsedTaskAttempt result = attempts.get(taskAttemptName); if (result == null && task != null) { result = new ParsedTaskAttempt(); result.setAttemptID(taskAttemptName); attempts.put(taskAttemptName, result); task.getAttempts().add(result); } return result; } private ParsedHost getAndRecordParsedHost(String hostName) { return getAndRecordParsedHost(null, hostName); } private ParsedHost getAndRecordParsedHost(String rackName, String hostName) { ParsedHost result = null; if (rackName == null) { // for old (pre-23) job history files where hostname was represented as // /rackname/hostname result = ParsedHost.parse(hostName); } else { // for new (post-23) job history files result = new ParsedHost(rackName, hostName); } if (result != null) { ParsedHost canonicalResult = allHosts.get(result); if (canonicalResult != null) { return canonicalResult; } allHosts.put(result, result); return result; } return null; } private ArrayList<LoggedLocation> preferredLocationForSplits(String splits) { if (splits != null) { ArrayList<LoggedLocation> locations = null; StringTokenizer tok = new StringTokenizer(splits, ",", false); if (tok.countTokens() <= MAXIMUM_PREFERRED_LOCATIONS) { locations = new ArrayList<LoggedLocation>(); while (tok.hasMoreTokens()) { String nextSplit = tok.nextToken(); ParsedHost node = getAndRecordParsedHost(nextSplit); if (locations != null && node != null) { locations.add(node.makeLoggedLocation()); } } return locations; } } return null; } }
/** * Copyright 2010-2014 Axel Fontaine and the many contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flywaydb.core.util; import java.util.Collection; import java.util.Iterator; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Various string-related utilities. */ public class StringUtils { /** * Prevents instantiation. */ private StringUtils() { // Do nothing. } /** * Trims or pads (with spaces) this string, so it has this exact length. * * @param str The string to adjust. {@code null} is treated as an empty string. * @param length The exact length to reach. * @return The adjusted string. */ public static String trimOrPad(String str, int length) { return trimOrPad(str, length, ' '); } /** * Trims or pads this string, so it has this exact length. * * @param str The string to adjust. {@code null} is treated as an empty string. * @param length The exact length to reach. * @param padChar The padding character. * @return The adjusted string. */ public static String trimOrPad(String str, int length, char padChar) { String result; if (str == null) { result = ""; } else { result = str; } if (result.length() > length) { return result.substring(0, length); } while (result.length() < length) { result += padChar; } return result; } /** * <p>Checks if the String contains only unicode digits. A decimal point is not a unicode digit and returns * false.</p> <p/> <p>{@code null} will return {@code false}. An empty String ("") will return {@code true}.</p> * <p/> * <pre> * StringUtils.isNumeric(null) = false * StringUtils.isNumeric("") = true * StringUtils.isNumeric(" ") = false * StringUtils.isNumeric("123") = true * StringUtils.isNumeric("12 3") = false * StringUtils.isNumeric("ab2c") = false * StringUtils.isNumeric("12-3") = false * StringUtils.isNumeric("12.3") = false * </pre> * * @param str the String to check, may be null * @return {@code true} if only contains digits, and is non-null */ public static boolean isNumeric(String str) { return str != null && str.matches("\\d*"); } /** * Replaces all sequences of whitespace by a single blank. Ex.: "&nbsp;&nbsp;&nbsp;&nbsp;" -> " " * * @param str The string to analyse. * @return The input string, with all whitespace collapsed. */ public static String collapseWhitespace(String str) { return str.replaceAll("\\s+", " "); } /** * Returns the first n characters from this string, where n = count. If the string is shorter, the entire string * will be returned. If the string is longer, it will be truncated. * * @param str The string to parse. * @param count The amount of characters to return. * @return The first n characters from this string, where n = count. */ public static String left(String str, int count) { if (str == null) { return null; } if (str.length() < count) { return str; } return str.substring(0, count); } /** * Replaces all occurrances of this originalToken in this string with this replacementToken. * * @param str The string to process. * @param originalToken The token to replace. * @param replacementToken The replacement. * @return The transformed str. */ public static String replaceAll(String str, String originalToken, String replacementToken) { return str.replaceAll(Pattern.quote(originalToken), Matcher.quoteReplacement(replacementToken)); } /** * Checks whether this string is not {@code null} and not <i>blank</i>. * * @param str The string to check. * @return {@code true} if it has content, {@code false} if it is {@code null} or blank. */ public static boolean hasLength(String str) { return str != null && str.trim().length() > 0; } /** * Turns this string array in one comma-delimited string. * * @param strings The array to process. * @return The new comma-delimited string. An empty string if {@code strings} is {@code null}. */ public static String arrayToCommaDelimitedString(Object[] strings) { if (strings == null) { return ""; } StringBuilder builder = new StringBuilder(); for (int i = 0; i < strings.length; i++) { if (i > 0) { builder.append(","); } builder.append(String.valueOf(strings[i])); } return builder.toString(); } /** * Checks whether this string isn't {@code null} and contains at least one non-blank character. * * @param s The string to check. * @return {@code true} if it has text, {@code false} if not. */ public static boolean hasText(String s) { return (s != null) && (s.trim().length() > 0); } /** * Splits this string into an array using these delimiters. * * @param str The string to split. * @param delimiters The delimiters to use. * @return The resulting array. */ public static String[] tokenizeToStringArray(String str, String delimiters) { if (str == null) { return null; } String[] tokens = str.split("[" + delimiters + "]"); for (int i = 0; i < tokens.length; i++) { tokens[i] = tokens[i].trim(); } return tokens; } /** * Counts the number of occurrences of this token in this string. * * @param str The string to analyse. * @param token The token to look for. * @return The number of occurrences. */ public static int countOccurrencesOf(String str, String token) { if (str == null || token == null || str.length() == 0 || token.length() == 0) { return 0; } int count = 0; int pos = 0; int idx; while ((idx = str.indexOf(token, pos)) != -1) { ++count; pos = idx + token.length(); } return count; } /** * Replace all occurences of a substring within a string with * another string. * * @param inString String to examine * @param oldPattern String to replace * @param newPattern String to insert * @return a String with the replacements */ public static String replace(String inString, String oldPattern, String newPattern) { if (!hasLength(inString) || !hasLength(oldPattern) || newPattern == null) { return inString; } StringBuilder sb = new StringBuilder(); int pos = 0; // our position in the old string int index = inString.indexOf(oldPattern); // the index of an occurrence we've found, or -1 int patLen = oldPattern.length(); while (index >= 0) { sb.append(inString.substring(pos, index)); sb.append(newPattern); pos = index + patLen; index = inString.indexOf(oldPattern, pos); } sb.append(inString.substring(pos)); // remember to append any characters to the right of a match return sb.toString(); } /** * Convenience method to return a Collection as a comma-delimited * String. E.g. useful for {@code toString()} implementations. * * @param collection the Collection to analyse * @return The comma-delimited String. */ public static String collectionToCommaDelimitedString(Collection<?> collection) { return collectionToDelimitedString(collection, ", "); } /** * Convenience method to return a Collection as a delimited * String. E.g. useful for {@code toString()} implementations. * * @param collection the Collection to analyse * @param delimiter The delimiter. * @return The delimited String. */ public static String collectionToDelimitedString(Collection<?> collection, String delimiter) { if (collection == null) { return ""; } StringBuilder sb = new StringBuilder(); Iterator it = collection.iterator(); while (it.hasNext()) { sb.append(it.next()); if (it.hasNext()) { sb.append(delimiter); } } return sb.toString(); } /** * Trim leading whitespace from the given String. * * @param str the String to check * @return the trimmed String * @see java.lang.Character#isWhitespace */ public static String trimLeadingWhitespace(String str) { if (!hasLength(str)) { return str; } StringBuilder buf = new StringBuilder(str); while (buf.length() > 0 && Character.isWhitespace(buf.charAt(0))) { buf.deleteCharAt(0); } return buf.toString(); } /** * Trim trailing whitespace from the given String. * * @param str the String to check * @return the trimmed String * @see java.lang.Character#isWhitespace */ public static String trimTrailingWhitespace(String str) { if (!hasLength(str)) { return str; } StringBuilder buf = new StringBuilder(str); while (buf.length() > 0 && Character.isWhitespace(buf.charAt(buf.length() - 1))) { buf.deleteCharAt(buf.length() - 1); } return buf.toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.connectors.kafka; import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.state.ListState; import org.apache.flink.api.common.state.ListStateDescriptor; import org.apache.flink.api.common.state.OperatorStateStore; import org.apache.flink.api.common.typeinfo.TypeHint; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.java.ClosureCleaner; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.api.java.typeutils.ResultTypeQueryable; import org.apache.flink.configuration.Configuration; import org.apache.flink.metrics.Counter; import org.apache.flink.metrics.MetricGroup; import org.apache.flink.runtime.state.CheckpointListener; import org.apache.flink.runtime.state.DefaultOperatorStateBackend; import org.apache.flink.runtime.state.FunctionInitializationContext; import org.apache.flink.runtime.state.FunctionSnapshotContext; import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction; import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks; import org.apache.flink.streaming.api.functions.AssignerWithPunctuatedWatermarks; import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction; import org.apache.flink.streaming.api.operators.StreamingRuntimeContext; import org.apache.flink.streaming.connectors.kafka.config.OffsetCommitMode; import org.apache.flink.streaming.connectors.kafka.config.OffsetCommitModes; import org.apache.flink.streaming.connectors.kafka.config.StartupMode; import org.apache.flink.streaming.connectors.kafka.internals.AbstractFetcher; import org.apache.flink.streaming.connectors.kafka.internals.AbstractPartitionDiscoverer; import org.apache.flink.streaming.connectors.kafka.internals.KafkaCommitCallback; import org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition; import org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartitionAssigner; import org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartitionStateSentinel; import org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicsDescriptor; import org.apache.flink.util.ExceptionUtils; import org.apache.flink.util.SerializedValue; import org.apache.commons.collections.map.LinkedMap; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.TreeMap; import java.util.concurrent.atomic.AtomicReference; import java.util.regex.Pattern; import static org.apache.flink.streaming.connectors.kafka.internals.metrics.KafkaConsumerMetricConstants.COMMITS_FAILED_METRICS_COUNTER; import static org.apache.flink.streaming.connectors.kafka.internals.metrics.KafkaConsumerMetricConstants.COMMITS_SUCCEEDED_METRICS_COUNTER; import static org.apache.flink.streaming.connectors.kafka.internals.metrics.KafkaConsumerMetricConstants.KAFKA_CONSUMER_METRICS_GROUP; import static org.apache.flink.util.Preconditions.checkArgument; import static org.apache.flink.util.Preconditions.checkNotNull; /** * Base class of all Flink Kafka Consumer data sources. * This implements the common behavior across all Kafka versions. * * <p>The Kafka version specific behavior is defined mainly in the specific subclasses of the * {@link AbstractFetcher}. * * @param <T> The type of records produced by this data source */ @Internal public abstract class FlinkKafkaConsumerBase<T> extends RichParallelSourceFunction<T> implements CheckpointListener, ResultTypeQueryable<T>, CheckpointedFunction { private static final long serialVersionUID = -6272159445203409112L; protected static final Logger LOG = LoggerFactory.getLogger(FlinkKafkaConsumerBase.class); /** The maximum number of pending non-committed checkpoints to track, to avoid memory leaks. */ public static final int MAX_NUM_PENDING_CHECKPOINTS = 100; /** * The default interval to execute partition discovery, * in milliseconds ({@code Long.MIN_VALUE}, i.e. disabled by default). */ public static final long PARTITION_DISCOVERY_DISABLED = Long.MIN_VALUE; /** Boolean configuration key to disable metrics tracking. **/ public static final String KEY_DISABLE_METRICS = "flink.disable-metrics"; /** Configuration key to define the consumer's partition discovery interval, in milliseconds. */ public static final String KEY_PARTITION_DISCOVERY_INTERVAL_MILLIS = "flink.partition-discovery.interval-millis"; /** State name of the consumer's partition offset states. */ private static final String OFFSETS_STATE_NAME = "topic-partition-offset-states"; // ------------------------------------------------------------------------ // configuration state, set on the client relevant for all subtasks // ------------------------------------------------------------------------ /** Describes whether we are discovering partitions for fixed topics or a topic pattern. */ private final KafkaTopicsDescriptor topicsDescriptor; /** The schema to convert between Kafka's byte messages, and Flink's objects. */ protected final KafkaDeserializationSchema<T> deserializer; /** The set of topic partitions that the source will read, with their initial offsets to start reading from. */ private Map<KafkaTopicPartition, Long> subscribedPartitionsToStartOffsets; /** Optional timestamp extractor / watermark generator that will be run per Kafka partition, * to exploit per-partition timestamp characteristics. * The assigner is kept in serialized form, to deserialize it into multiple copies. */ private SerializedValue<AssignerWithPeriodicWatermarks<T>> periodicWatermarkAssigner; /** Optional timestamp extractor / watermark generator that will be run per Kafka partition, * to exploit per-partition timestamp characteristics. * The assigner is kept in serialized form, to deserialize it into multiple copies. */ private SerializedValue<AssignerWithPunctuatedWatermarks<T>> punctuatedWatermarkAssigner; /** * User-set flag determining whether or not to commit on checkpoints. * Note: this flag does not represent the final offset commit mode. */ private boolean enableCommitOnCheckpoints = true; /** * User-set flag to disable filtering restored partitions with current topics descriptor. */ private boolean filterRestoredPartitionsWithCurrentTopicsDescriptor = true; /** * The offset commit mode for the consumer. * The value of this can only be determined in {@link FlinkKafkaConsumerBase#open(Configuration)} since it depends * on whether or not checkpointing is enabled for the job. */ private OffsetCommitMode offsetCommitMode; /** User configured value for discovery interval, in milliseconds. */ private final long discoveryIntervalMillis; /** The startup mode for the consumer (default is {@link StartupMode#GROUP_OFFSETS}). */ private StartupMode startupMode = StartupMode.GROUP_OFFSETS; /** Specific startup offsets; only relevant when startup mode is {@link StartupMode#SPECIFIC_OFFSETS}. */ private Map<KafkaTopicPartition, Long> specificStartupOffsets; /** Timestamp to determine startup offsets; only relevant when startup mode is {@link StartupMode#TIMESTAMP}. */ private Long startupOffsetsTimestamp; // ------------------------------------------------------------------------ // runtime state (used individually by each parallel subtask) // ------------------------------------------------------------------------ /** Data for pending but uncommitted offsets. */ private final LinkedMap pendingOffsetsToCommit = new LinkedMap(); /** The fetcher implements the connections to the Kafka brokers. */ private transient volatile AbstractFetcher<T, ?> kafkaFetcher; /** The partition discoverer, used to find new partitions. */ private transient volatile AbstractPartitionDiscoverer partitionDiscoverer; /** * The offsets to restore to, if the consumer restores state from a checkpoint. * * <p>This map will be populated by the {@link #initializeState(FunctionInitializationContext)} method. * * <p>Using a sorted map as the ordering is important when using restored state * to seed the partition discoverer. */ private transient volatile TreeMap<KafkaTopicPartition, Long> restoredState; /** Accessor for state in the operator state backend. */ private transient ListState<Tuple2<KafkaTopicPartition, Long>> unionOffsetStates; /** * Flag indicating whether the consumer is restored from older state written with Flink 1.1 or 1.2. * When the current run is restored from older state, partition discovery is disabled. */ private boolean restoredFromOldState; /** Discovery loop, executed in a separate thread. */ private transient volatile Thread discoveryLoopThread; /** Flag indicating whether the consumer is still running. */ private volatile boolean running = true; // ------------------------------------------------------------------------ // internal metrics // ------------------------------------------------------------------------ /** * Flag indicating whether or not metrics should be exposed. * If {@code true}, offset metrics (e.g. current offset, committed offset) and * Kafka-shipped metrics will be registered. */ private final boolean useMetrics; /** Counter for successful Kafka offset commits. */ private transient Counter successfulCommits; /** Counter for failed Kafka offset commits. */ private transient Counter failedCommits; /** Callback interface that will be invoked upon async Kafka commit completion. * Please be aware that default callback implementation in base class does not * provide any guarantees on thread-safety. This is sufficient for now because current * supported Kafka connectors guarantee no more than 1 concurrent async pending offset * commit. */ private transient KafkaCommitCallback offsetCommitCallback; // ------------------------------------------------------------------------ /** * Base constructor. * * @param topics fixed list of topics to subscribe to (null, if using topic pattern) * @param topicPattern the topic pattern to subscribe to (null, if using fixed topics) * @param deserializer The deserializer to turn raw byte messages into Java/Scala objects. * @param discoveryIntervalMillis the topic / partition discovery interval, in * milliseconds (0 if discovery is disabled). */ public FlinkKafkaConsumerBase( List<String> topics, Pattern topicPattern, KafkaDeserializationSchema<T> deserializer, long discoveryIntervalMillis, boolean useMetrics) { this.topicsDescriptor = new KafkaTopicsDescriptor(topics, topicPattern); this.deserializer = checkNotNull(deserializer, "valueDeserializer"); checkArgument( discoveryIntervalMillis == PARTITION_DISCOVERY_DISABLED || discoveryIntervalMillis >= 0, "Cannot define a negative value for the topic / partition discovery interval."); this.discoveryIntervalMillis = discoveryIntervalMillis; this.useMetrics = useMetrics; } /** * Make sure that auto commit is disabled when our offset commit mode is ON_CHECKPOINTS. * This overwrites whatever setting the user configured in the properties. * @param properties - Kafka configuration properties to be adjusted * @param offsetCommitMode offset commit mode */ static void adjustAutoCommitConfig(Properties properties, OffsetCommitMode offsetCommitMode) { if (offsetCommitMode == OffsetCommitMode.ON_CHECKPOINTS || offsetCommitMode == OffsetCommitMode.DISABLED) { properties.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); } } // ------------------------------------------------------------------------ // Configuration // ------------------------------------------------------------------------ /** * Specifies an {@link AssignerWithPunctuatedWatermarks} to emit watermarks in a punctuated manner. * The watermark extractor will run per Kafka partition, watermarks will be merged across partitions * in the same way as in the Flink runtime, when streams are merged. * * <p>When a subtask of a FlinkKafkaConsumer source reads multiple Kafka partitions, * the streams from the partitions are unioned in a "first come first serve" fashion. Per-partition * characteristics are usually lost that way. For example, if the timestamps are strictly ascending * per Kafka partition, they will not be strictly ascending in the resulting Flink DataStream, if the * parallel source subtask reads more that one partition. * * <p>Running timestamp extractors / watermark generators directly inside the Kafka source, per Kafka * partition, allows users to let them exploit the per-partition characteristics. * * <p>Note: One can use either an {@link AssignerWithPunctuatedWatermarks} or an * {@link AssignerWithPeriodicWatermarks}, not both at the same time. * * @param assigner The timestamp assigner / watermark generator to use. * @return The consumer object, to allow function chaining. */ public FlinkKafkaConsumerBase<T> assignTimestampsAndWatermarks(AssignerWithPunctuatedWatermarks<T> assigner) { checkNotNull(assigner); if (this.periodicWatermarkAssigner != null) { throw new IllegalStateException("A periodic watermark emitter has already been set."); } try { ClosureCleaner.clean(assigner, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true); this.punctuatedWatermarkAssigner = new SerializedValue<>(assigner); return this; } catch (Exception e) { throw new IllegalArgumentException("The given assigner is not serializable", e); } } /** * Specifies an {@link AssignerWithPunctuatedWatermarks} to emit watermarks in a punctuated manner. * The watermark extractor will run per Kafka partition, watermarks will be merged across partitions * in the same way as in the Flink runtime, when streams are merged. * * <p>When a subtask of a FlinkKafkaConsumer source reads multiple Kafka partitions, * the streams from the partitions are unioned in a "first come first serve" fashion. Per-partition * characteristics are usually lost that way. For example, if the timestamps are strictly ascending * per Kafka partition, they will not be strictly ascending in the resulting Flink DataStream, if the * parallel source subtask reads more that one partition. * * <p>Running timestamp extractors / watermark generators directly inside the Kafka source, per Kafka * partition, allows users to let them exploit the per-partition characteristics. * * <p>Note: One can use either an {@link AssignerWithPunctuatedWatermarks} or an * {@link AssignerWithPeriodicWatermarks}, not both at the same time. * * @param assigner The timestamp assigner / watermark generator to use. * @return The consumer object, to allow function chaining. */ public FlinkKafkaConsumerBase<T> assignTimestampsAndWatermarks(AssignerWithPeriodicWatermarks<T> assigner) { checkNotNull(assigner); if (this.punctuatedWatermarkAssigner != null) { throw new IllegalStateException("A punctuated watermark emitter has already been set."); } try { ClosureCleaner.clean(assigner, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true); this.periodicWatermarkAssigner = new SerializedValue<>(assigner); return this; } catch (Exception e) { throw new IllegalArgumentException("The given assigner is not serializable", e); } } /** * Specifies whether or not the consumer should commit offsets back to Kafka on checkpoints. * * <p>This setting will only have effect if checkpointing is enabled for the job. * If checkpointing isn't enabled, only the "auto.commit.enable" (for 0.8) / "enable.auto.commit" (for 0.9+) * property settings will be used. * * @return The consumer object, to allow function chaining. */ public FlinkKafkaConsumerBase<T> setCommitOffsetsOnCheckpoints(boolean commitOnCheckpoints) { this.enableCommitOnCheckpoints = commitOnCheckpoints; return this; } /** * Specifies the consumer to start reading from the earliest offset for all partitions. * This lets the consumer ignore any committed group offsets in Zookeeper / Kafka brokers. * * <p>This method does not affect where partitions are read from when the consumer is restored * from a checkpoint or savepoint. When the consumer is restored from a checkpoint or * savepoint, only the offsets in the restored state will be used. * * @return The consumer object, to allow function chaining. */ public FlinkKafkaConsumerBase<T> setStartFromEarliest() { this.startupMode = StartupMode.EARLIEST; this.startupOffsetsTimestamp = null; this.specificStartupOffsets = null; return this; } /** * Specifies the consumer to start reading from the latest offset for all partitions. * This lets the consumer ignore any committed group offsets in Zookeeper / Kafka brokers. * * <p>This method does not affect where partitions are read from when the consumer is restored * from a checkpoint or savepoint. When the consumer is restored from a checkpoint or * savepoint, only the offsets in the restored state will be used. * * @return The consumer object, to allow function chaining. */ public FlinkKafkaConsumerBase<T> setStartFromLatest() { this.startupMode = StartupMode.LATEST; this.startupOffsetsTimestamp = null; this.specificStartupOffsets = null; return this; } /** * Specifies the consumer to start reading partitions from a specified timestamp. * The specified timestamp must be before the current timestamp. * This lets the consumer ignore any committed group offsets in Zookeeper / Kafka brokers. * * <p>The consumer will look up the earliest offset whose timestamp is greater than or equal * to the specific timestamp from Kafka. If there's no such offset, the consumer will use the * latest offset to read data from kafka. * * <p>This method does not affect where partitions are read from when the consumer is restored * from a checkpoint or savepoint. When the consumer is restored from a checkpoint or * savepoint, only the offsets in the restored state will be used. * * @param startupOffsetsTimestamp timestamp for the startup offsets, as milliseconds from epoch. * * @return The consumer object, to allow function chaining. */ // NOTE - // This method is implemented in the base class because this is where the startup logging and verifications live. // However, it is not publicly exposed since only newer Kafka versions support the functionality. // Version-specific subclasses which can expose the functionality should override and allow public access. protected FlinkKafkaConsumerBase<T> setStartFromTimestamp(long startupOffsetsTimestamp) { checkArgument(startupOffsetsTimestamp >= 0, "The provided value for the startup offsets timestamp is invalid."); long currentTimestamp = System.currentTimeMillis(); checkArgument(startupOffsetsTimestamp <= currentTimestamp, "Startup time[%s] must be before current time[%s].", startupOffsetsTimestamp, currentTimestamp); this.startupMode = StartupMode.TIMESTAMP; this.startupOffsetsTimestamp = startupOffsetsTimestamp; this.specificStartupOffsets = null; return this; } /** * Specifies the consumer to start reading from any committed group offsets found * in Zookeeper / Kafka brokers. The "group.id" property must be set in the configuration * properties. If no offset can be found for a partition, the behaviour in "auto.offset.reset" * set in the configuration properties will be used for the partition. * * <p>This method does not affect where partitions are read from when the consumer is restored * from a checkpoint or savepoint. When the consumer is restored from a checkpoint or * savepoint, only the offsets in the restored state will be used. * * @return The consumer object, to allow function chaining. */ public FlinkKafkaConsumerBase<T> setStartFromGroupOffsets() { this.startupMode = StartupMode.GROUP_OFFSETS; this.startupOffsetsTimestamp = null; this.specificStartupOffsets = null; return this; } /** * Specifies the consumer to start reading partitions from specific offsets, set independently for each partition. * The specified offset should be the offset of the next record that will be read from partitions. * This lets the consumer ignore any committed group offsets in Zookeeper / Kafka brokers. * * <p>If the provided map of offsets contains entries whose {@link KafkaTopicPartition} is not subscribed by the * consumer, the entry will be ignored. If the consumer subscribes to a partition that does not exist in the provided * map of offsets, the consumer will fallback to the default group offset behaviour (see * {@link FlinkKafkaConsumerBase#setStartFromGroupOffsets()}) for that particular partition. * * <p>If the specified offset for a partition is invalid, or the behaviour for that partition is defaulted to group * offsets but still no group offset could be found for it, then the "auto.offset.reset" behaviour set in the * configuration properties will be used for the partition * * <p>This method does not affect where partitions are read from when the consumer is restored * from a checkpoint or savepoint. When the consumer is restored from a checkpoint or * savepoint, only the offsets in the restored state will be used. * * @return The consumer object, to allow function chaining. */ public FlinkKafkaConsumerBase<T> setStartFromSpecificOffsets(Map<KafkaTopicPartition, Long> specificStartupOffsets) { this.startupMode = StartupMode.SPECIFIC_OFFSETS; this.startupOffsetsTimestamp = null; this.specificStartupOffsets = checkNotNull(specificStartupOffsets); return this; } /** * By default, when restoring from a checkpoint / savepoint, the consumer always * ignores restored partitions that are no longer associated with the current specified topics or * topic pattern to subscribe to. * * <p>This method configures the consumer to not filter the restored partitions, * therefore always attempting to consume whatever partition was present in the * previous execution regardless of the specified topics to subscribe to in the * current execution. * * @return The consumer object, to allow function chaining. */ public FlinkKafkaConsumerBase<T> disableFilterRestoredPartitionsWithSubscribedTopics() { this.filterRestoredPartitionsWithCurrentTopicsDescriptor = false; return this; } // ------------------------------------------------------------------------ // Work methods // ------------------------------------------------------------------------ @Override public void open(Configuration configuration) throws Exception { // determine the offset commit mode this.offsetCommitMode = OffsetCommitModes.fromConfiguration( getIsAutoCommitEnabled(), enableCommitOnCheckpoints, ((StreamingRuntimeContext) getRuntimeContext()).isCheckpointingEnabled()); // create the partition discoverer this.partitionDiscoverer = createPartitionDiscoverer( topicsDescriptor, getRuntimeContext().getIndexOfThisSubtask(), getRuntimeContext().getNumberOfParallelSubtasks()); this.partitionDiscoverer.open(); subscribedPartitionsToStartOffsets = new HashMap<>(); final List<KafkaTopicPartition> allPartitions = partitionDiscoverer.discoverPartitions(); if (restoredState != null) { for (KafkaTopicPartition partition : allPartitions) { if (!restoredState.containsKey(partition)) { restoredState.put(partition, KafkaTopicPartitionStateSentinel.EARLIEST_OFFSET); } } for (Map.Entry<KafkaTopicPartition, Long> restoredStateEntry : restoredState.entrySet()) { if (!restoredFromOldState) { // seed the partition discoverer with the union state while filtering out // restored partitions that should not be subscribed by this subtask if (KafkaTopicPartitionAssigner.assign( restoredStateEntry.getKey(), getRuntimeContext().getNumberOfParallelSubtasks()) == getRuntimeContext().getIndexOfThisSubtask()){ subscribedPartitionsToStartOffsets.put(restoredStateEntry.getKey(), restoredStateEntry.getValue()); } } else { // when restoring from older 1.1 / 1.2 state, the restored state would not be the union state; // in this case, just use the restored state as the subscribed partitions subscribedPartitionsToStartOffsets.put(restoredStateEntry.getKey(), restoredStateEntry.getValue()); } } if (filterRestoredPartitionsWithCurrentTopicsDescriptor) { subscribedPartitionsToStartOffsets.entrySet().removeIf(entry -> { if (!topicsDescriptor.isMatchingTopic(entry.getKey().getTopic())) { LOG.warn( "{} is removed from subscribed partitions since it is no longer associated with topics descriptor of current execution.", entry.getKey()); return true; } return false; }); } LOG.info("Consumer subtask {} will start reading {} partitions with offsets in restored state: {}", getRuntimeContext().getIndexOfThisSubtask(), subscribedPartitionsToStartOffsets.size(), subscribedPartitionsToStartOffsets); } else { // use the partition discoverer to fetch the initial seed partitions, // and set their initial offsets depending on the startup mode. // for SPECIFIC_OFFSETS and TIMESTAMP modes, we set the specific offsets now; // for other modes (EARLIEST, LATEST, and GROUP_OFFSETS), the offset is lazily determined // when the partition is actually read. switch (startupMode) { case SPECIFIC_OFFSETS: if (specificStartupOffsets == null) { throw new IllegalStateException( "Startup mode for the consumer set to " + StartupMode.SPECIFIC_OFFSETS + ", but no specific offsets were specified."); } for (KafkaTopicPartition seedPartition : allPartitions) { Long specificOffset = specificStartupOffsets.get(seedPartition); if (specificOffset != null) { // since the specified offsets represent the next record to read, we subtract // it by one so that the initial state of the consumer will be correct subscribedPartitionsToStartOffsets.put(seedPartition, specificOffset - 1); } else { // default to group offset behaviour if the user-provided specific offsets // do not contain a value for this partition subscribedPartitionsToStartOffsets.put(seedPartition, KafkaTopicPartitionStateSentinel.GROUP_OFFSET); } } break; case TIMESTAMP: if (startupOffsetsTimestamp == null) { throw new IllegalStateException( "Startup mode for the consumer set to " + StartupMode.TIMESTAMP + ", but no startup timestamp was specified."); } for (Map.Entry<KafkaTopicPartition, Long> partitionToOffset : fetchOffsetsWithTimestamp(allPartitions, startupOffsetsTimestamp).entrySet()) { subscribedPartitionsToStartOffsets.put( partitionToOffset.getKey(), (partitionToOffset.getValue() == null) // if an offset cannot be retrieved for a partition with the given timestamp, // we default to using the latest offset for the partition ? KafkaTopicPartitionStateSentinel.LATEST_OFFSET // since the specified offsets represent the next record to read, we subtract // it by one so that the initial state of the consumer will be correct : partitionToOffset.getValue() - 1); } break; default: for (KafkaTopicPartition seedPartition : allPartitions) { subscribedPartitionsToStartOffsets.put(seedPartition, startupMode.getStateSentinel()); } } if (!subscribedPartitionsToStartOffsets.isEmpty()) { switch (startupMode) { case EARLIEST: LOG.info("Consumer subtask {} will start reading the following {} partitions from the earliest offsets: {}", getRuntimeContext().getIndexOfThisSubtask(), subscribedPartitionsToStartOffsets.size(), subscribedPartitionsToStartOffsets.keySet()); break; case LATEST: LOG.info("Consumer subtask {} will start reading the following {} partitions from the latest offsets: {}", getRuntimeContext().getIndexOfThisSubtask(), subscribedPartitionsToStartOffsets.size(), subscribedPartitionsToStartOffsets.keySet()); break; case TIMESTAMP: LOG.info("Consumer subtask {} will start reading the following {} partitions from timestamp {}: {}", getRuntimeContext().getIndexOfThisSubtask(), subscribedPartitionsToStartOffsets.size(), startupOffsetsTimestamp, subscribedPartitionsToStartOffsets.keySet()); break; case SPECIFIC_OFFSETS: LOG.info("Consumer subtask {} will start reading the following {} partitions from the specified startup offsets {}: {}", getRuntimeContext().getIndexOfThisSubtask(), subscribedPartitionsToStartOffsets.size(), specificStartupOffsets, subscribedPartitionsToStartOffsets.keySet()); List<KafkaTopicPartition> partitionsDefaultedToGroupOffsets = new ArrayList<>(subscribedPartitionsToStartOffsets.size()); for (Map.Entry<KafkaTopicPartition, Long> subscribedPartition : subscribedPartitionsToStartOffsets.entrySet()) { if (subscribedPartition.getValue() == KafkaTopicPartitionStateSentinel.GROUP_OFFSET) { partitionsDefaultedToGroupOffsets.add(subscribedPartition.getKey()); } } if (partitionsDefaultedToGroupOffsets.size() > 0) { LOG.warn("Consumer subtask {} cannot find offsets for the following {} partitions in the specified startup offsets: {}" + "; their startup offsets will be defaulted to their committed group offsets in Kafka.", getRuntimeContext().getIndexOfThisSubtask(), partitionsDefaultedToGroupOffsets.size(), partitionsDefaultedToGroupOffsets); } break; case GROUP_OFFSETS: LOG.info("Consumer subtask {} will start reading the following {} partitions from the committed group offsets in Kafka: {}", getRuntimeContext().getIndexOfThisSubtask(), subscribedPartitionsToStartOffsets.size(), subscribedPartitionsToStartOffsets.keySet()); } } else { LOG.info("Consumer subtask {} initially has no partitions to read from.", getRuntimeContext().getIndexOfThisSubtask()); } } } @Override public void run(SourceContext<T> sourceContext) throws Exception { if (subscribedPartitionsToStartOffsets == null) { throw new Exception("The partitions were not set for the consumer"); } // initialize commit metrics and default offset callback method this.successfulCommits = this.getRuntimeContext().getMetricGroup().counter(COMMITS_SUCCEEDED_METRICS_COUNTER); this.failedCommits = this.getRuntimeContext().getMetricGroup().counter(COMMITS_FAILED_METRICS_COUNTER); final int subtaskIndex = this.getRuntimeContext().getIndexOfThisSubtask(); this.offsetCommitCallback = new KafkaCommitCallback() { @Override public void onSuccess() { successfulCommits.inc(); } @Override public void onException(Throwable cause) { LOG.warn(String.format("Consumer subtask %d failed async Kafka commit.", subtaskIndex), cause); failedCommits.inc(); } }; // mark the subtask as temporarily idle if there are no initial seed partitions; // once this subtask discovers some partitions and starts collecting records, the subtask's // status will automatically be triggered back to be active. if (subscribedPartitionsToStartOffsets.isEmpty()) { sourceContext.markAsTemporarilyIdle(); } LOG.info("Consumer subtask {} creating fetcher with offsets {}.", getRuntimeContext().getIndexOfThisSubtask(), subscribedPartitionsToStartOffsets); // from this point forward: // - 'snapshotState' will draw offsets from the fetcher, // instead of being built from `subscribedPartitionsToStartOffsets` // - 'notifyCheckpointComplete' will start to do work (i.e. commit offsets to // Kafka through the fetcher, if configured to do so) this.kafkaFetcher = createFetcher( sourceContext, subscribedPartitionsToStartOffsets, periodicWatermarkAssigner, punctuatedWatermarkAssigner, (StreamingRuntimeContext) getRuntimeContext(), offsetCommitMode, getRuntimeContext().getMetricGroup().addGroup(KAFKA_CONSUMER_METRICS_GROUP), useMetrics); if (!running) { return; } // depending on whether we were restored with the current state version (1.3), // remaining logic branches off into 2 paths: // 1) New state - partition discovery loop executed as separate thread, with this // thread running the main fetcher loop // 2) Old state - partition discovery is disabled and only the main fetcher loop is executed if (discoveryIntervalMillis == PARTITION_DISCOVERY_DISABLED) { kafkaFetcher.runFetchLoop(); } else { runWithPartitionDiscovery(); } } private void runWithPartitionDiscovery() throws Exception { final AtomicReference<Exception> discoveryLoopErrorRef = new AtomicReference<>(); createAndStartDiscoveryLoop(discoveryLoopErrorRef); kafkaFetcher.runFetchLoop(); // make sure that the partition discoverer is waked up so that // the discoveryLoopThread exits partitionDiscoverer.wakeup(); joinDiscoveryLoopThread(); // rethrow any fetcher errors final Exception discoveryLoopError = discoveryLoopErrorRef.get(); if (discoveryLoopError != null) { throw new RuntimeException(discoveryLoopError); } } @VisibleForTesting void joinDiscoveryLoopThread() throws InterruptedException { if (discoveryLoopThread != null) { discoveryLoopThread.join(); } } private void createAndStartDiscoveryLoop(AtomicReference<Exception> discoveryLoopErrorRef) { discoveryLoopThread = new Thread(() -> { try { // --------------------- partition discovery loop --------------------- // throughout the loop, we always eagerly check if we are still running before // performing the next operation, so that we can escape the loop as soon as possible while (running) { if (LOG.isDebugEnabled()) { LOG.debug("Consumer subtask {} is trying to discover new partitions ...", getRuntimeContext().getIndexOfThisSubtask()); } final List<KafkaTopicPartition> discoveredPartitions; try { discoveredPartitions = partitionDiscoverer.discoverPartitions(); } catch (AbstractPartitionDiscoverer.WakeupException | AbstractPartitionDiscoverer.ClosedException e) { // the partition discoverer may have been closed or woken up before or during the discovery; // this would only happen if the consumer was canceled; simply escape the loop break; } // no need to add the discovered partitions if we were closed during the meantime if (running && !discoveredPartitions.isEmpty()) { kafkaFetcher.addDiscoveredPartitions(discoveredPartitions); } // do not waste any time sleeping if we're not running anymore if (running && discoveryIntervalMillis != 0) { try { Thread.sleep(discoveryIntervalMillis); } catch (InterruptedException iex) { // may be interrupted if the consumer was canceled midway; simply escape the loop break; } } } } catch (Exception e) { discoveryLoopErrorRef.set(e); } finally { // calling cancel will also let the fetcher loop escape // (if not running, cancel() was already called) if (running) { cancel(); } } }, "Kafka Partition Discovery for " + getRuntimeContext().getTaskNameWithSubtasks()); discoveryLoopThread.start(); } @Override public void cancel() { // set ourselves as not running; // this would let the main discovery loop escape as soon as possible running = false; if (discoveryLoopThread != null) { if (partitionDiscoverer != null) { // we cannot close the discoverer here, as it is error-prone to concurrent access; // only wakeup the discoverer, the discovery loop will clean itself up after it escapes partitionDiscoverer.wakeup(); } // the discovery loop may currently be sleeping in-between // consecutive discoveries; interrupt to shutdown faster discoveryLoopThread.interrupt(); } // abort the fetcher, if there is one if (kafkaFetcher != null) { kafkaFetcher.cancel(); } } @Override public void close() throws Exception { cancel(); joinDiscoveryLoopThread(); Exception exception = null; if (partitionDiscoverer != null) { try { partitionDiscoverer.close(); } catch (Exception e) { exception = e; } } try { super.close(); } catch (Exception e) { exception = ExceptionUtils.firstOrSuppressed(e, exception); } if (exception != null) { throw exception; } } // ------------------------------------------------------------------------ // Checkpoint and restore // ------------------------------------------------------------------------ @Override public final void initializeState(FunctionInitializationContext context) throws Exception { OperatorStateStore stateStore = context.getOperatorStateStore(); ListState<Tuple2<KafkaTopicPartition, Long>> oldRoundRobinListState = stateStore.getSerializableListState(DefaultOperatorStateBackend.DEFAULT_OPERATOR_STATE_NAME); this.unionOffsetStates = stateStore.getUnionListState(new ListStateDescriptor<>( OFFSETS_STATE_NAME, TypeInformation.of(new TypeHint<Tuple2<KafkaTopicPartition, Long>>() {}))); if (context.isRestored() && !restoredFromOldState) { restoredState = new TreeMap<>(new KafkaTopicPartition.Comparator()); // migrate from 1.2 state, if there is any for (Tuple2<KafkaTopicPartition, Long> kafkaOffset : oldRoundRobinListState.get()) { restoredFromOldState = true; unionOffsetStates.add(kafkaOffset); } oldRoundRobinListState.clear(); if (restoredFromOldState && discoveryIntervalMillis != PARTITION_DISCOVERY_DISABLED) { throw new IllegalArgumentException( "Topic / partition discovery cannot be enabled if the job is restored from a savepoint from Flink 1.2.x."); } // populate actual holder for restored state for (Tuple2<KafkaTopicPartition, Long> kafkaOffset : unionOffsetStates.get()) { restoredState.put(kafkaOffset.f0, kafkaOffset.f1); } LOG.info("Consumer subtask {} restored state: {}.", getRuntimeContext().getIndexOfThisSubtask(), restoredState); } else { LOG.info("Consumer subtask {} has no restore state.", getRuntimeContext().getIndexOfThisSubtask()); } } @Override public final void snapshotState(FunctionSnapshotContext context) throws Exception { if (!running) { LOG.debug("snapshotState() called on closed source"); } else { unionOffsetStates.clear(); final AbstractFetcher<?, ?> fetcher = this.kafkaFetcher; if (fetcher == null) { // the fetcher has not yet been initialized, which means we need to return the // originally restored offsets or the assigned partitions for (Map.Entry<KafkaTopicPartition, Long> subscribedPartition : subscribedPartitionsToStartOffsets.entrySet()) { unionOffsetStates.add(Tuple2.of(subscribedPartition.getKey(), subscribedPartition.getValue())); } if (offsetCommitMode == OffsetCommitMode.ON_CHECKPOINTS) { // the map cannot be asynchronously updated, because only one checkpoint call can happen // on this function at a time: either snapshotState() or notifyCheckpointComplete() pendingOffsetsToCommit.put(context.getCheckpointId(), restoredState); } } else { HashMap<KafkaTopicPartition, Long> currentOffsets = fetcher.snapshotCurrentState(); if (offsetCommitMode == OffsetCommitMode.ON_CHECKPOINTS) { // the map cannot be asynchronously updated, because only one checkpoint call can happen // on this function at a time: either snapshotState() or notifyCheckpointComplete() pendingOffsetsToCommit.put(context.getCheckpointId(), currentOffsets); } for (Map.Entry<KafkaTopicPartition, Long> kafkaTopicPartitionLongEntry : currentOffsets.entrySet()) { unionOffsetStates.add( Tuple2.of(kafkaTopicPartitionLongEntry.getKey(), kafkaTopicPartitionLongEntry.getValue())); } } if (offsetCommitMode == OffsetCommitMode.ON_CHECKPOINTS) { // truncate the map of pending offsets to commit, to prevent infinite growth while (pendingOffsetsToCommit.size() > MAX_NUM_PENDING_CHECKPOINTS) { pendingOffsetsToCommit.remove(0); } } } } @Override public final void notifyCheckpointComplete(long checkpointId) throws Exception { if (!running) { LOG.debug("notifyCheckpointComplete() called on closed source"); return; } final AbstractFetcher<?, ?> fetcher = this.kafkaFetcher; if (fetcher == null) { LOG.debug("notifyCheckpointComplete() called on uninitialized source"); return; } if (offsetCommitMode == OffsetCommitMode.ON_CHECKPOINTS) { // only one commit operation must be in progress if (LOG.isDebugEnabled()) { LOG.debug("Consumer subtask {} committing offsets to Kafka/ZooKeeper for checkpoint {}.", getRuntimeContext().getIndexOfThisSubtask(), checkpointId); } try { final int posInMap = pendingOffsetsToCommit.indexOf(checkpointId); if (posInMap == -1) { LOG.warn("Consumer subtask {} received confirmation for unknown checkpoint id {}", getRuntimeContext().getIndexOfThisSubtask(), checkpointId); return; } @SuppressWarnings("unchecked") Map<KafkaTopicPartition, Long> offsets = (Map<KafkaTopicPartition, Long>) pendingOffsetsToCommit.remove(posInMap); // remove older checkpoints in map for (int i = 0; i < posInMap; i++) { pendingOffsetsToCommit.remove(0); } if (offsets == null || offsets.size() == 0) { LOG.debug("Consumer subtask {} has empty checkpoint state.", getRuntimeContext().getIndexOfThisSubtask()); return; } fetcher.commitInternalOffsetsToKafka(offsets, offsetCommitCallback); } catch (Exception e) { if (running) { throw e; } // else ignore exception if we are no longer running } } } // ------------------------------------------------------------------------ // Kafka Consumer specific methods // ------------------------------------------------------------------------ /** * Creates the fetcher that connect to the Kafka brokers, pulls data, deserialized the * data, and emits it into the data streams. * * @param sourceContext The source context to emit data to. * @param subscribedPartitionsToStartOffsets The set of partitions that this subtask should handle, with their start offsets. * @param watermarksPeriodic Optional, a serialized timestamp extractor / periodic watermark generator. * @param watermarksPunctuated Optional, a serialized timestamp extractor / punctuated watermark generator. * @param runtimeContext The task's runtime context. * * @return The instantiated fetcher * * @throws Exception The method should forward exceptions */ protected abstract AbstractFetcher<T, ?> createFetcher( SourceContext<T> sourceContext, Map<KafkaTopicPartition, Long> subscribedPartitionsToStartOffsets, SerializedValue<AssignerWithPeriodicWatermarks<T>> watermarksPeriodic, SerializedValue<AssignerWithPunctuatedWatermarks<T>> watermarksPunctuated, StreamingRuntimeContext runtimeContext, OffsetCommitMode offsetCommitMode, MetricGroup kafkaMetricGroup, boolean useMetrics) throws Exception; /** * Creates the partition discoverer that is used to find new partitions for this subtask. * * @param topicsDescriptor Descriptor that describes whether we are discovering partitions for fixed topics or a topic pattern. * @param indexOfThisSubtask The index of this consumer subtask. * @param numParallelSubtasks The total number of parallel consumer subtasks. * * @return The instantiated partition discoverer */ protected abstract AbstractPartitionDiscoverer createPartitionDiscoverer( KafkaTopicsDescriptor topicsDescriptor, int indexOfThisSubtask, int numParallelSubtasks); protected abstract boolean getIsAutoCommitEnabled(); protected abstract Map<KafkaTopicPartition, Long> fetchOffsetsWithTimestamp( Collection<KafkaTopicPartition> partitions, long timestamp); // ------------------------------------------------------------------------ // ResultTypeQueryable methods // ------------------------------------------------------------------------ @Override public TypeInformation<T> getProducedType() { return deserializer.getProducedType(); } // ------------------------------------------------------------------------ // Test utilities // ------------------------------------------------------------------------ @VisibleForTesting Map<KafkaTopicPartition, Long> getSubscribedPartitionsToStartOffsets() { return subscribedPartitionsToStartOffsets; } @VisibleForTesting TreeMap<KafkaTopicPartition, Long> getRestoredState() { return restoredState; } @VisibleForTesting OffsetCommitMode getOffsetCommitMode() { return offsetCommitMode; } @VisibleForTesting LinkedMap getPendingOffsetsToCommit() { return pendingOffsetsToCommit; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.io; import com.google.common.base.Charsets; import org.elasticsearch.common.util.Callback; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.Reader; import java.io.StringWriter; import java.io.Writer; import java.util.ArrayList; import java.util.List; import java.util.Objects; /** * Simple utility methods for file and stream copying. * All copy methods use a block size of 4096 bytes, * and close all affected streams when done. * <p/> * <p>Mainly for use within the framework, * but also useful for application code. */ public abstract class Streams { public static final int BUFFER_SIZE = 1024 * 8; //--------------------------------------------------------------------- // Copy methods for java.io.InputStream / java.io.OutputStream //--------------------------------------------------------------------- public static long copy(InputStream in, OutputStream out) throws IOException { return copy(in, out, new byte[BUFFER_SIZE]); } /** * Copy the contents of the given InputStream to the given OutputStream. * Closes both streams when done. * * @param in the stream to copy from * @param out the stream to copy to * @return the number of bytes copied * @throws IOException in case of I/O errors */ public static long copy(InputStream in, OutputStream out, byte[] buffer) throws IOException { Objects.requireNonNull(in, "No InputStream specified"); Objects.requireNonNull(out, "No OutputStream specified"); try { long byteCount = 0; int bytesRead; while ((bytesRead = in.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); byteCount += bytesRead; } out.flush(); return byteCount; } finally { try { in.close(); } catch (IOException ex) { // do nothing } try { out.close(); } catch (IOException ex) { // do nothing } } } /** * Copy the contents of the given byte array to the given OutputStream. * Closes the stream when done. * * @param in the byte array to copy from * @param out the OutputStream to copy to * @throws IOException in case of I/O errors */ public static void copy(byte[] in, OutputStream out) throws IOException { Objects.requireNonNull(in, "No input byte array specified"); Objects.requireNonNull(out, "No OutputStream specified"); try { out.write(in); } finally { try { out.close(); } catch (IOException ex) { // do nothing } } } //--------------------------------------------------------------------- // Copy methods for java.io.Reader / java.io.Writer //--------------------------------------------------------------------- /** * Copy the contents of the given Reader to the given Writer. * Closes both when done. * * @param in the Reader to copy from * @param out the Writer to copy to * @return the number of characters copied * @throws IOException in case of I/O errors */ public static int copy(Reader in, Writer out) throws IOException { Objects.requireNonNull(in, "No Reader specified"); Objects.requireNonNull(out, "No Writer specified"); try { int byteCount = 0; char[] buffer = new char[BUFFER_SIZE]; int bytesRead; while ((bytesRead = in.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); byteCount += bytesRead; } out.flush(); return byteCount; } finally { try { in.close(); } catch (IOException ex) { // do nothing } try { out.close(); } catch (IOException ex) { // do nothing } } } /** * Copy the contents of the given String to the given output Writer. * Closes the write when done. * * @param in the String to copy from * @param out the Writer to copy to * @throws IOException in case of I/O errors */ public static void copy(String in, Writer out) throws IOException { Objects.requireNonNull(in, "No input String specified"); Objects.requireNonNull(out, "No Writer specified"); try { out.write(in); } finally { try { out.close(); } catch (IOException ex) { // do nothing } } } /** * Copy the contents of the given Reader into a String. * Closes the reader when done. * * @param in the reader to copy from * @return the String that has been copied to * @throws IOException in case of I/O errors */ public static String copyToString(Reader in) throws IOException { StringWriter out = new StringWriter(); copy(in, out); return out.toString(); } public static int readFully(Reader reader, char[] dest) throws IOException { return readFully(reader, dest, 0, dest.length); } public static int readFully(Reader reader, char[] dest, int offset, int len) throws IOException { int read = 0; while (read < len) { final int r = reader.read(dest, offset + read, len - read); if (r == -1) { break; } read += r; } return read; } public static int readFully(InputStream reader, byte[] dest) throws IOException { return readFully(reader, dest, 0, dest.length); } public static int readFully(InputStream reader, byte[] dest, int offset, int len) throws IOException { int read = 0; while (read < len) { final int r = reader.read(dest, offset + read, len - read); if (r == -1) { break; } read += r; } return read; } public static List<String> readAllLines(InputStream input) throws IOException { final List<String> lines = new ArrayList<>(); readAllLines(input, new Callback<String>() { @Override public void handle(String line) { lines.add(line); } }); return lines; } public static void readAllLines(InputStream input, Callback<String> callback) throws IOException { try (BufferedReader reader = new BufferedReader(new InputStreamReader(input, Charsets.UTF_8))) { String line; while ((line = reader.readLine()) != null) { callback.handle(line); } } } }
// ======================================================================== // Copyright (c) 2004-2009 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // All rights reserved. This program and the accompanying materials // are made available under the terms of the Eclipse Public License v1.0 // and Apache License v2.0 which accompanies this distribution. // The Eclipse Public License is available at // http://www.eclipse.org/legal/epl-v10.html // The Apache License v2.0 is available at // http://www.opensource.org/licenses/apache2.0.php // You may elect to redistribute this code under either of these licenses. // ======================================================================== package org.eclipse.jetty.server; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; import javax.servlet.ServletInputStream; import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServletResponse; import org.eclipse.jetty.continuation.ContinuationThrowable; import org.eclipse.jetty.http.AbstractGenerator; import org.eclipse.jetty.http.EncodedHttpURI; import org.eclipse.jetty.http.Generator; import org.eclipse.jetty.http.HttpBuffers; import org.eclipse.jetty.http.HttpContent; import org.eclipse.jetty.http.HttpException; import org.eclipse.jetty.http.HttpFields; import org.eclipse.jetty.http.HttpGenerator; import org.eclipse.jetty.http.HttpHeaderValues; import org.eclipse.jetty.http.HttpHeaders; import org.eclipse.jetty.http.HttpMethods; import org.eclipse.jetty.http.HttpParser; import org.eclipse.jetty.http.HttpStatus; import org.eclipse.jetty.http.HttpURI; import org.eclipse.jetty.http.HttpVersions; import org.eclipse.jetty.http.MimeTypes; import org.eclipse.jetty.http.Parser; import org.eclipse.jetty.io.AbstractConnection; import org.eclipse.jetty.io.AsyncEndPoint; import org.eclipse.jetty.io.Buffer; import org.eclipse.jetty.io.BufferCache.CachedBuffer; import org.eclipse.jetty.io.Connection; import org.eclipse.jetty.io.EndPoint; import org.eclipse.jetty.io.EofException; import org.eclipse.jetty.io.RuntimeIOException; import org.eclipse.jetty.io.UncheckedPrintWriter; import org.eclipse.jetty.server.nio.NIOConnector; import org.eclipse.jetty.server.ssl.SslConnector; import org.eclipse.jetty.util.QuotedStringTokenizer; import org.eclipse.jetty.util.StringUtil; import org.eclipse.jetty.util.URIUtil; import org.eclipse.jetty.util.log.Log; import org.eclipse.jetty.util.resource.Resource; import org.eclipse.jetty.util.thread.Timeout; /** * <p>A HttpConnection represents the connection of a HTTP client to the server * and is created by an instance of a {@link Connector}. It's prime function is * to associate {@link Request} and {@link Response} instances with a {@link EndPoint}. * </p> * <p> * A connection is also the prime mechanism used by jetty to recycle objects without * pooling. The {@link Request}, {@link Response}, {@link HttpParser}, {@link HttpGenerator} * and {@link HttpFields} instances are all recycled for the duraction of * a connection. Where appropriate, allocated buffers are also kept associated * with the connection via the parser and/or generator. * </p> * <p> * The connection state is held by 3 separate state machines: The request state, the * response state and the continuation state. All three state machines must be driven * to completion for every request, and all three can complete in any order. * </p> * <p> * The HttpConnection support protocol upgrade. If on completion of a request, the * response code is 101 (switch protocols), then the org.eclipse.jetty.io.Connection * request attribute is checked to see if there is a new Connection instance. If so, * the new connection is returned from {@link #handle()} and is used for future * handling of the underlying connection. Note that for switching protocols that * don't use 101 responses (eg CONNECT), the response should be sent and then the * status code changed to 101 before returning from the handler. Implementors * of new Connection types should be careful to extract any buffered data from * (HttpParser)http.getParser()).getHeaderBuffer() and * (HttpParser)http.getParser()).getBodyBuffer() to initialise their new connection. * </p> * */ public class HttpConnection extends AbstractConnection implements Connection { private static final int UNKNOWN = -2; private static final ThreadLocal<HttpConnection> __currentConnection = new ThreadLocal<HttpConnection>(); private int _requests; private volatile boolean _handling; protected final Connector _connector; protected final Server _server; protected final HttpURI _uri; protected final Parser _parser; protected final HttpFields _requestFields; protected final Request _request; protected ServletInputStream _in; protected final Generator _generator; protected final HttpFields _responseFields; protected final Response _response; protected Output _out; protected OutputWriter _writer; protected PrintWriter _printWriter; int _include; private Object _associatedObject; // associated object private int _version = UNKNOWN; private boolean _expect = false; private boolean _expect100Continue = false; private boolean _expect102Processing = false; private boolean _head = false; private boolean _host = false; private boolean _delayedHandling=false; /* ------------------------------------------------------------ */ public static HttpConnection getCurrentConnection() { return __currentConnection.get(); } /* ------------------------------------------------------------ */ protected static void setCurrentConnection(HttpConnection connection) { __currentConnection.set(connection); } /* ------------------------------------------------------------ */ /** Constructor * */ public HttpConnection(Connector connector, EndPoint endpoint, Server server) { super(endpoint); _uri = StringUtil.__UTF8.equals(URIUtil.__CHARSET)?new HttpURI():new EncodedHttpURI(URIUtil.__CHARSET); _connector = connector; HttpBuffers ab = (HttpBuffers)_connector; _parser = new HttpParser(ab.getRequestBuffers(), endpoint, new RequestHandler()); _requestFields = new HttpFields(); _responseFields = new HttpFields(server.getMaxCookieVersion()); _request = new Request(this); _response = new Response(this); _generator = new HttpGenerator(ab.getResponseBuffers(), _endp); _generator.setSendServerVersion(server.getSendServerVersion()); _server = server; } /* ------------------------------------------------------------ */ protected HttpConnection(Connector connector, EndPoint endpoint, Server server, Parser parser, Generator generator, Request request) { super(endpoint); _uri = URIUtil.__CHARSET.equals(StringUtil.__UTF8)?new HttpURI():new EncodedHttpURI(URIUtil.__CHARSET); _connector = connector; _parser = parser; _requestFields = new HttpFields(); _responseFields = new HttpFields(server.getMaxCookieVersion()); _request = request; _response = new Response(this); _generator = generator; _generator.setSendServerVersion(server.getSendServerVersion()); _server = server; } /* ------------------------------------------------------------ */ /** * @return the parser used by this connection */ public Parser getParser() { return _parser; } /* ------------------------------------------------------------ */ /** * @return the number of requests handled by this connection */ public int getRequests() { return _requests; } /* ------------------------------------------------------------ */ public Server getServer() { return _server; } /* ------------------------------------------------------------ */ /** * @return Returns the associatedObject. */ public Object getAssociatedObject() { return _associatedObject; } /* ------------------------------------------------------------ */ /** * @param associatedObject The associatedObject to set. */ public void setAssociatedObject(Object associatedObject) { _associatedObject = associatedObject; } /* ------------------------------------------------------------ */ /** * @return Returns the connector. */ public Connector getConnector() { return _connector; } /* ------------------------------------------------------------ */ /** * @return Returns the requestFields. */ public HttpFields getRequestFields() { return _requestFields; } /* ------------------------------------------------------------ */ /** * @return Returns the responseFields. */ public HttpFields getResponseFields() { return _responseFields; } /* ------------------------------------------------------------ */ /** * @return The result of calling {@link #getConnector}.{@link Connector#isConfidential(Request) isCondidential}(request), or false * if there is no connector. */ public boolean isConfidential(Request request) { if (_connector!=null) return _connector.isConfidential(request); return false; } /* ------------------------------------------------------------ */ /** * Find out if the request is INTEGRAL security. * @param request * @return <code>true</code> if there is a {@link #getConnector() connector} and it considers <code>request</code> * to be {@link Connector#isIntegral(Request) integral} */ public boolean isIntegral(Request request) { if (_connector!=null) return _connector.isIntegral(request); return false; } /* ------------------------------------------------------------ */ /** * @return <code>false</code> (this method is not yet implemented) */ public boolean getResolveNames() { return _connector.getResolveNames(); } /* ------------------------------------------------------------ */ /** * @return Returns the request. */ public Request getRequest() { return _request; } /* ------------------------------------------------------------ */ /** * @return Returns the response. */ public Response getResponse() { return _response; } /* ------------------------------------------------------------ */ /** * Get the inputStream from the connection. * <p> * If the associated response has the Expect header set to 100 Continue, * then accessing the input stream indicates that the handler/servlet * is ready for the request body and thus a 100 Continue response is sent. * * @return The input stream for this connection. * The stream will be created if it does not already exist. */ public ServletInputStream getInputStream() throws IOException { // If the client is expecting 100 CONTINUE, then send it now. if (_expect100Continue) { // is content missing? if (((HttpParser)_parser).getHeaderBuffer()==null || ((HttpParser)_parser).getHeaderBuffer().length()<2) { if (_generator.isCommitted()) throw new IllegalStateException("Committed before 100 Continues"); ((HttpGenerator)_generator).send1xx(HttpStatus.CONTINUE_100); } _expect100Continue=false; } if (_in == null) _in = new HttpInput(((HttpParser)_parser),_connector.getMaxIdleTime()); return _in; } /* ------------------------------------------------------------ */ /** * @return The output stream for this connection. The stream will be created if it does not already exist. */ public ServletOutputStream getOutputStream() { if (_out == null) _out = new Output(); return _out; } /* ------------------------------------------------------------ */ /** * @return A {@link PrintWriter} wrapping the {@link #getOutputStream output stream}. The writer is created if it * does not already exist. */ public PrintWriter getPrintWriter(String encoding) { getOutputStream(); if (_writer==null) { _writer=new OutputWriter(); _printWriter=new UncheckedPrintWriter(_writer); } _writer.setCharacterEncoding(encoding); return _printWriter; } /* ------------------------------------------------------------ */ public boolean isResponseCommitted() { return _generator.isCommitted(); } /* ------------------------------------------------------------ */ public Connection handle() throws IOException { Connection connection = this; // Loop while more in buffer boolean more_in_buffer =true; // assume true until proven otherwise boolean progress=true; try { assert getCurrentConnection()==null; assert _handling==false; _handling=true; setCurrentConnection(this); while (more_in_buffer && _endp.isOpen()) { try { if (_request._async.isAsync()) { // TODO - handle the case of input being read for a // suspended request. Log.debug("async request",_request); if (!_request._async.isComplete()) handleRequest(); else if (!_parser.isComplete()) { int parsed=_parser.parseAvailable(); if (parsed>0) progress=true; } if (_generator.isCommitted() && !_generator.isComplete()) progress|=_generator.flushBuffer()>0; if (_endp.isBufferingOutput()) _endp.flush(); } else { // If we are not ended then parse available if (!_parser.isComplete()) { int parsed=_parser.parseAvailable(); if (parsed>0) progress=true; } // Do we have more generating to do? // Loop here because some writes may take multiple steps and // we need to flush them all before potentially blocking in the // next loop. while (_generator.isCommitted() && !_generator.isComplete()) { long written=_generator.flushBuffer(); if (written<=0) break; progress=true; if (_endp.isBufferingOutput()) _endp.flush(); } // Flush buffers if (_endp.isBufferingOutput()) { _endp.flush(); if (!_endp.isBufferingOutput()) progress=true; } if (!progress) return this; } progress=false; } catch (HttpException e) { if (Log.isDebugEnabled()) { Log.debug("uri="+_uri); Log.debug("fields="+_requestFields); Log.debug(e); } _generator.sendError(e.getStatus(), e.getReason(), null, true); _parser.reset(true); _endp.close(); } finally { more_in_buffer = _parser.isMoreInBuffer() || _endp.isBufferingInput(); // Is this request/response round complete? if (_parser.isComplete() && _generator.isComplete() && !_endp.isBufferingOutput()) { // look for a switched connection instance? Connection switched=(_response.getStatus()==HttpStatus.SWITCHING_PROTOCOLS_101) ?(Connection)_request.getAttribute("org.eclipse.jetty.io.Connection"):null; // have we switched? if (switched!=null) { _parser.reset(true); _generator.reset(true); connection=switched; } else { // No switch, so cleanup and reset if (!_generator.isPersistent() || _endp.isInputShutdown()) { _parser.reset(true); more_in_buffer=false; _endp.close(); } if (more_in_buffer) { reset(false); more_in_buffer = _parser.isMoreInBuffer() || _endp.isBufferingInput(); } else reset(true); progress=true; } } else if (_parser.isIdle() && _endp.isInputShutdown()) { more_in_buffer=false; _endp.close(); } if (_request.isAsyncStarted()) { Log.debug("return with suspended request"); more_in_buffer=false; } else if (_generator.isCommitted() && !_generator.isComplete() && _endp instanceof AsyncEndPoint) ((AsyncEndPoint)_endp).scheduleWrite(); } } } finally { setCurrentConnection(null); _handling=false; } return connection; } /* ------------------------------------------------------------ */ public void scheduleTimeout(Timeout.Task task, long timeoutMs) { throw new UnsupportedOperationException(); } /* ------------------------------------------------------------ */ public void cancelTimeout(Timeout.Task task) { throw new UnsupportedOperationException(); } /* ------------------------------------------------------------ */ public void reset(boolean returnBuffers) { _parser.reset(returnBuffers); // TODO maybe only release when low on resources _requestFields.clear(); _request.recycle(); _generator.reset(returnBuffers); // TODO maybe only release when low on resources _responseFields.clear(); _response.recycle(); _uri.clear(); } /* ------------------------------------------------------------ */ protected void handleRequest() throws IOException { boolean error = false; String threadName=null; try { if (Log.isDebugEnabled()) { threadName=Thread.currentThread().getName(); Thread.currentThread().setName(threadName+" - "+_uri); } // Loop here to handle async request redispatches. // The loop is controlled by the call to async.unhandle in the // finally block below. If call is from a non-blocking connector, // then the unhandle will return false only if an async dispatch has // already happened when unhandle is called. For a blocking connector, // the wait for the asynchronous dispatch or timeout actually happens // within the call to unhandle(). final Server server=_server; boolean handling=_request._async.handling() && server!=null && server.isRunning(); while (handling) { _request.setHandled(false); String info=null; try { _uri.getPort(); info=URIUtil.canonicalPath(_uri.getDecodedPath()); if (info==null && !_request.getMethod().equals(HttpMethods.CONNECT)) throw new HttpException(400); _request.setPathInfo(info); if (_out!=null) _out.reopen(); if (_request._async.isInitial()) { _request.setDispatcherType(DispatcherType.REQUEST); _connector.customize(_endp, _request); server.handle(this); } else { _request.setDispatcherType(DispatcherType.ASYNC); server.handleAsync(this); } } catch (ContinuationThrowable e) { Log.ignore(e); } catch (EofException e) { Log.debug(e); _request.setHandled(true); error=true; } catch (RuntimeIOException e) { Log.debug(e); _request.setHandled(true); error=true; } catch (HttpException e) { Log.debug(e); _request.setHandled(true); _response.sendError(e.getStatus(), e.getReason()); error=true; } catch (Throwable e) { if (e instanceof ThreadDeath) throw (ThreadDeath)e; error=true; Log.warn(String.valueOf(_uri),e); _request.setHandled(true); _generator.sendError(info==null?400:500, null, null, true); } finally { handling = !_request._async.unhandle() && server.isRunning() && _server!=null; } } } finally { if (threadName!=null) Thread.currentThread().setName(threadName); if (_request._async.isUncompleted()) { _request._async.doComplete(); if (_expect100Continue) { Log.debug("100 continues not sent"); // We didn't send 100 continues, but the latest interpretation // of the spec (see httpbis) is that the client will either // send the body anyway, or close. So we no longer need to // do anything special here. _expect100Continue = false; if (!_response.isCommitted()) _generator.setPersistent(false); } if(_endp.isOpen()) { if (error) _endp.close(); else { if (!_response.isCommitted() && !_request.isHandled()) _response.sendError(HttpServletResponse.SC_NOT_FOUND); _response.complete(); if (_generator.isPersistent()) _connector.persist(_endp); } } else { _response.complete(); } _request.setHandled(true); } } } /* ------------------------------------------------------------ */ public void commitResponse(boolean last) throws IOException { if (!_generator.isCommitted()) { _generator.setResponse(_response.getStatus(), _response.getReason()); try { // If the client was expecting 100 continues, but we sent something // else, then we need to close the connection if (_expect100Continue && _response.getStatus()!=100) _generator.setPersistent(false); _generator.completeHeader(_responseFields, last); } catch(IOException io) { throw io; } catch(RuntimeException e) { Log.warn("header full: "+e); _response.reset(); _generator.reset(true); _generator.setResponse(HttpStatus.INTERNAL_SERVER_ERROR_500,null); _generator.completeHeader(_responseFields,Generator.LAST); _generator.complete(); throw new HttpException(HttpStatus.INTERNAL_SERVER_ERROR_500); } } if (last) _generator.complete(); } /* ------------------------------------------------------------ */ public void completeResponse() throws IOException { if (!_generator.isCommitted()) { _generator.setResponse(_response.getStatus(), _response.getReason()); try { _generator.completeHeader(_responseFields, Generator.LAST); } catch(IOException io) { throw io; } catch(RuntimeException e) { Log.warn("header full: "+e); Log.debug(e); _response.reset(); _generator.reset(true); _generator.setResponse(HttpStatus.INTERNAL_SERVER_ERROR_500,null); _generator.completeHeader(_responseFields,Generator.LAST); _generator.complete(); throw new HttpException(HttpStatus.INTERNAL_SERVER_ERROR_500); } } _generator.complete(); } /* ------------------------------------------------------------ */ public void flushResponse() throws IOException { try { commitResponse(Generator.MORE); _generator.flushBuffer(); } catch(IOException e) { throw (e instanceof EofException) ? e:new EofException(e); } } /* ------------------------------------------------------------ */ public Generator getGenerator() { return _generator; } /* ------------------------------------------------------------ */ public boolean isIncluding() { return _include>0; } /* ------------------------------------------------------------ */ public void include() { _include++; } /* ------------------------------------------------------------ */ public void included() { _include--; if (_out!=null) _out.reopen(); } /* ------------------------------------------------------------ */ public boolean isIdle() { return _generator.isIdle() && (_parser.isIdle() || _delayedHandling); } /* ------------------------------------------------------------ */ /** * @see org.eclipse.jetty.io.Connection#isSuspended() */ public boolean isSuspended() { return _request.getAsyncContinuation().isSuspended(); } /* ------------------------------------------------------------ */ public void closed() { Log.debug("closed {}",this); } /* ------------------------------------------------------------ */ public boolean isExpecting100Continues() { return _expect100Continue; } /* ------------------------------------------------------------ */ public boolean isExpecting102Processing() { return _expect102Processing; } /* ------------------------------------------------------------ */ /* ------------------------------------------------------------ */ /* ------------------------------------------------------------ */ private class RequestHandler extends HttpParser.EventHandler { private String _charset; /* * * @see org.eclipse.jetty.server.server.HttpParser.EventHandler#startRequest(org.eclipse.io.Buffer, * org.eclipse.io.Buffer, org.eclipse.io.Buffer) */ @Override public void startRequest(Buffer method, Buffer uri, Buffer version) throws IOException { _host = false; _expect = false; _expect100Continue=false; _expect102Processing=false; _delayedHandling=false; _charset=null; if(_request.getTimeStamp()==0) _request.setTimeStamp(System.currentTimeMillis()); _request.setMethod(method.toString()); try { _head=false; switch (HttpMethods.CACHE.getOrdinal(method)) { case HttpMethods.CONNECT_ORDINAL: _uri.parseConnect(uri.array(), uri.getIndex(), uri.length()); break; case HttpMethods.HEAD_ORDINAL: _head=true; // fall through default: _uri.parse(uri.array(), uri.getIndex(), uri.length()); } _request.setUri(_uri); if (version==null) { _request.setProtocol(HttpVersions.HTTP_0_9); _version=HttpVersions.HTTP_0_9_ORDINAL; } else { version= HttpVersions.CACHE.get(version); if (version==null) throw new HttpException(HttpStatus.BAD_REQUEST_400,null); _version = HttpVersions.CACHE.getOrdinal(version); if (_version <= 0) _version = HttpVersions.HTTP_1_0_ORDINAL; _request.setProtocol(version.toString()); } } catch (Exception e) { Log.debug(e); if (e instanceof HttpException) throw (HttpException)e; throw new HttpException(HttpStatus.BAD_REQUEST_400,null,e); } } /* * @see org.eclipse.jetty.server.server.HttpParser.EventHandler#parsedHeaderValue(org.eclipse.io.Buffer) */ @Override public void parsedHeader(Buffer name, Buffer value) { int ho = HttpHeaders.CACHE.getOrdinal(name); switch (ho) { case HttpHeaders.HOST_ORDINAL: // TODO check if host matched a host in the URI. _host = true; break; case HttpHeaders.EXPECT_ORDINAL: value = HttpHeaderValues.CACHE.lookup(value); switch(HttpHeaderValues.CACHE.getOrdinal(value)) { case HttpHeaderValues.CONTINUE_ORDINAL: _expect100Continue=_generator instanceof HttpGenerator; break; case HttpHeaderValues.PROCESSING_ORDINAL: _expect102Processing=_generator instanceof HttpGenerator; break; default: String[] values = value.toString().split(","); for (int i=0;values!=null && i<values.length;i++) { CachedBuffer cb=HttpHeaderValues.CACHE.get(values[i].trim()); if (cb==null) _expect=true; else { switch(cb.getOrdinal()) { case HttpHeaderValues.CONTINUE_ORDINAL: _expect100Continue=_generator instanceof HttpGenerator; break; case HttpHeaderValues.PROCESSING_ORDINAL: _expect102Processing=_generator instanceof HttpGenerator; break; default: _expect=true; } } } } break; case HttpHeaders.ACCEPT_ENCODING_ORDINAL: case HttpHeaders.USER_AGENT_ORDINAL: value = HttpHeaderValues.CACHE.lookup(value); break; case HttpHeaders.CONTENT_TYPE_ORDINAL: value = MimeTypes.CACHE.lookup(value); _charset=MimeTypes.getCharsetFromContentType(value); break; case HttpHeaders.CONNECTION_ORDINAL: //looks rather clumsy, but the idea is to optimize for a single valued header switch(HttpHeaderValues.CACHE.getOrdinal(value)) { case -1: { String[] values = value.toString().split(","); for (int i=0;values!=null && i<values.length;i++) { CachedBuffer cb = HttpHeaderValues.CACHE.get(values[i].trim()); if (cb!=null) { switch(cb.getOrdinal()) { case HttpHeaderValues.CLOSE_ORDINAL: _responseFields.add(HttpHeaders.CONNECTION_BUFFER,HttpHeaderValues.CLOSE_BUFFER); _generator.setPersistent(false); break; case HttpHeaderValues.KEEP_ALIVE_ORDINAL: if (_version==HttpVersions.HTTP_1_0_ORDINAL) _responseFields.add(HttpHeaders.CONNECTION_BUFFER,HttpHeaderValues.KEEP_ALIVE_BUFFER); break; } } } break; } case HttpHeaderValues.CLOSE_ORDINAL: _responseFields.put(HttpHeaders.CONNECTION_BUFFER,HttpHeaderValues.CLOSE_BUFFER); _generator.setPersistent(false); break; case HttpHeaderValues.KEEP_ALIVE_ORDINAL: if (_version==HttpVersions.HTTP_1_0_ORDINAL) _responseFields.put(HttpHeaders.CONNECTION_BUFFER,HttpHeaderValues.KEEP_ALIVE_BUFFER); break; } } _requestFields.add(name, value); } /* * @see org.eclipse.jetty.server.server.HttpParser.EventHandler#headerComplete() */ @Override public void headerComplete() throws IOException { if (_endp instanceof AsyncEndPoint) ((AsyncEndPoint)_endp).scheduleIdle(); _requests++; _generator.setVersion(_version); switch (_version) { case HttpVersions.HTTP_0_9_ORDINAL: break; case HttpVersions.HTTP_1_0_ORDINAL: _generator.setHead(_head); if (_server.getSendDateHeader()) _generator.setDate(_request.getTimeStampBuffer()); break; case HttpVersions.HTTP_1_1_ORDINAL: _generator.setHead(_head); if (_server.getSendDateHeader()) _generator.setDate(_request.getTimeStampBuffer()); if (!_host) { _generator.setResponse(HttpStatus.BAD_REQUEST_400, null); _responseFields.put(HttpHeaders.CONNECTION_BUFFER, HttpHeaderValues.CLOSE_BUFFER); _generator.completeHeader(_responseFields, true); _generator.complete(); return; } if (_expect) { _generator.setResponse(HttpStatus.EXPECTATION_FAILED_417, null); _responseFields.put(HttpHeaders.CONNECTION_BUFFER, HttpHeaderValues.CLOSE_BUFFER); _generator.completeHeader(_responseFields, true); _generator.complete(); return; } break; default: } if(_charset!=null) _request.setCharacterEncodingUnchecked(_charset); // Either handle now or wait for first content if ((((HttpParser)_parser).getContentLength()<=0 && !((HttpParser)_parser).isChunking())||_expect100Continue) handleRequest(); else _delayedHandling=true; } /* ------------------------------------------------------------ */ /* * @see org.eclipse.jetty.server.server.HttpParser.EventHandler#content(int, org.eclipse.io.Buffer) */ @Override public void content(Buffer ref) throws IOException { if (_endp instanceof AsyncEndPoint) ((AsyncEndPoint)_endp).scheduleIdle(); if (_delayedHandling) { _delayedHandling=false; handleRequest(); } } /* ------------------------------------------------------------ */ /* * (non-Javadoc) * * @see org.eclipse.jetty.server.server.HttpParser.EventHandler#messageComplete(int) */ @Override public void messageComplete(long contentLength) throws IOException { if (_delayedHandling) { _delayedHandling=false; handleRequest(); } } /* ------------------------------------------------------------ */ /* * (non-Javadoc) * * @see org.eclipse.jetty.server.server.HttpParser.EventHandler#startResponse(org.eclipse.io.Buffer, int, * org.eclipse.io.Buffer) */ @Override public void startResponse(Buffer version, int status, Buffer reason) { Log.debug("Bad request!: "+version+" "+status+" "+reason); } } /* ------------------------------------------------------------ */ /* ------------------------------------------------------------ */ /* ------------------------------------------------------------ */ public class Output extends HttpOutput { Output() { super((AbstractGenerator)HttpConnection.this._generator, _connector.isLowResources()?_connector.getLowResourceMaxIdleTime():_connector.getMaxIdleTime()); } /* ------------------------------------------------------------ */ /* * @see java.io.OutputStream#close() */ @Override public void close() throws IOException { if (isClosed()) return; if (!isIncluding() && !super._generator.isCommitted()) commitResponse(Generator.LAST); else flushResponse(); super.close(); } /* ------------------------------------------------------------ */ /* * @see java.io.OutputStream#flush() */ @Override public void flush() throws IOException { if (!super._generator.isCommitted()) commitResponse(Generator.MORE); super.flush(); } /* ------------------------------------------------------------ */ /* * @see javax.servlet.ServletOutputStream#print(java.lang.String) */ @Override public void print(String s) throws IOException { if (isClosed()) throw new IOException("Closed"); PrintWriter writer=getPrintWriter(null); writer.print(s); } /* ------------------------------------------------------------ */ public void sendResponse(Buffer response) throws IOException { ((HttpGenerator)super._generator).sendResponse(response); } /* ------------------------------------------------------------ */ public void sendContent(Object content) throws IOException { Resource resource=null; if (isClosed()) throw new IOException("Closed"); if (super._generator.isWritten()) throw new IllegalStateException("!empty"); // Convert HTTP content to contentl if (content instanceof HttpContent) { HttpContent httpContent = (HttpContent) content; Buffer contentType = httpContent.getContentType(); if (contentType != null && !_responseFields.containsKey(HttpHeaders.CONTENT_TYPE_BUFFER)) { String enc = _response.getSetCharacterEncoding(); if(enc==null) _responseFields.add(HttpHeaders.CONTENT_TYPE_BUFFER, contentType); else { if(contentType instanceof CachedBuffer) { CachedBuffer content_type = ((CachedBuffer)contentType).getAssociate(enc); if(content_type!=null) _responseFields.put(HttpHeaders.CONTENT_TYPE_BUFFER, content_type); else { _responseFields.put(HttpHeaders.CONTENT_TYPE_BUFFER, contentType+";charset="+QuotedStringTokenizer.quoteIfNeeded(enc,";= ")); } } else { _responseFields.put(HttpHeaders.CONTENT_TYPE_BUFFER, contentType+";charset="+QuotedStringTokenizer.quoteIfNeeded(enc,";= ")); } } } if (httpContent.getContentLength() > 0) _responseFields.putLongField(HttpHeaders.CONTENT_LENGTH_BUFFER, httpContent.getContentLength()); Buffer lm = httpContent.getLastModified(); long lml=httpContent.getResource().lastModified(); if (lm != null) _responseFields.put(HttpHeaders.LAST_MODIFIED_BUFFER, lm,lml); else if (httpContent.getResource()!=null) { if (lml!=-1) _responseFields.putDateField(HttpHeaders.LAST_MODIFIED_BUFFER, lml); } boolean direct=_connector instanceof NIOConnector && ((NIOConnector)_connector).getUseDirectBuffers() && !(_connector instanceof SslConnector); content = direct?httpContent.getDirectBuffer():httpContent.getIndirectBuffer(); if (content==null) content=httpContent.getInputStream(); } else if (content instanceof Resource) { resource=(Resource)content; _responseFields.putDateField(HttpHeaders.LAST_MODIFIED_BUFFER, resource.lastModified()); content=resource.getInputStream(); } // Process content. if (content instanceof Buffer) { super._generator.addContent((Buffer) content, Generator.LAST); commitResponse(Generator.LAST); } else if (content instanceof InputStream) { InputStream in = (InputStream)content; try { int max = super._generator.prepareUncheckedAddContent(); Buffer buffer = super._generator.getUncheckedBuffer(); int len=buffer.readFrom(in,max); while (len>=0) { super._generator.completeUncheckedAddContent(); _out.flush(); max = super._generator.prepareUncheckedAddContent(); buffer = super._generator.getUncheckedBuffer(); len=buffer.readFrom(in,max); } super._generator.completeUncheckedAddContent(); _out.flush(); } finally { if (resource!=null) resource.release(); else in.close(); } } else throw new IllegalArgumentException("unknown content type?"); } } /* ------------------------------------------------------------ */ /* ------------------------------------------------------------ */ /* ------------------------------------------------------------ */ public class OutputWriter extends HttpWriter { OutputWriter() { super(HttpConnection.this._out); } } }
package org.hisp.dhis.user; /* * Copyright (c) 2004-2015, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonView; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlElementWrapper; import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty; import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlRootElement; import org.hisp.dhis.common.BaseIdentifiableObject; import org.hisp.dhis.common.DimensionType; import org.hisp.dhis.common.DimensionalObject; import org.hisp.dhis.common.DxfNamespaces; import org.hisp.dhis.common.IdentifiableObject; import org.hisp.dhis.common.IdentifiableObjectUtils; import org.hisp.dhis.common.MergeStrategy; import org.hisp.dhis.common.annotation.Scanned; import org.hisp.dhis.common.view.DetailedView; import org.hisp.dhis.common.view.ExportView; import org.hisp.dhis.dataelement.CategoryOptionGroupSet; import org.hisp.dhis.dataelement.DataElementCategory; import org.hisp.dhis.dataset.DataSet; import org.hisp.dhis.schema.PropertyType; import org.hisp.dhis.schema.annotation.Property; import org.hisp.dhis.schema.annotation.PropertyRange; import org.springframework.util.StringUtils; import java.util.Collection; import java.util.Date; import java.util.HashSet; import java.util.Set; /** * @author Nguyen Hong Duc */ @JacksonXmlRootElement( localName = "userCredentials", namespace = DxfNamespaces.DXF_2_0 ) public class UserCredentials extends BaseIdentifiableObject { /** * Determines if a de-serialized file is compatible with this class. */ private static final long serialVersionUID = -8919501679702302098L; /** * Required and unique. * TODO: This must be renamed before we start using idObjectStore for UserCredentials */ //private User user; /** * Required and unique. */ private String username; /** * Unique OpenID. */ private String openId; /** * Required. Will be stored as a hash. */ private String password; /** * Date when password was changed. */ private Date passwordLastUpdated; /** * Set of user roles. */ @Scanned private Set<UserAuthorityGroup> userAuthorityGroups = new HashSet<>(); /** * Category option group set dimensions to constrain data analytics aggregation. */ @Scanned private Set<CategoryOptionGroupSet> cogsDimensionConstraints = new HashSet<>(); /** * Category dimensions to constrain data analytics aggregation. */ @Scanned private Set<DataElementCategory> catDimensionConstraints = new HashSet<>(); /** * Date of the user's last login. */ private Date lastLogin; /** * The token used for a user account restore. Will be stored as a hash. */ private String restoreToken; /** * The code used for a user account restore. Will be stored as a hash. */ private String restoreCode; /** * The timestamp representing when the restore window expires. */ private Date restoreExpiry; /** * Indicates whether this user was originally self registered. */ private boolean selfRegistered; /** * Indicates whether this credentials is currently an invitation. */ private boolean invitation; /** * Indicates whether this is user is disabled, which means the user cannot * be authenticated. */ private boolean disabled; // ------------------------------------------------------------------------- // Constructor // ------------------------------------------------------------------------- public UserCredentials() { setAutoFields(); this.name = this.getClass().getSimpleName(); // Used to avoid JSR-303 issues this.lastLogin = new Date(); this.passwordLastUpdated = new Date(); } // ------------------------------------------------------------------------- // Logic // ------------------------------------------------------------------------- /** * Returns a concatenated String of the display names of all user authority * groups for this user credentials. */ public String getUserAuthorityGroupsName() { return IdentifiableObjectUtils.join( userAuthorityGroups ); } /** * Returns a set of the aggregated authorities for all user authority groups * of this user credentials. */ public Set<String> getAllAuthorities() { Set<String> authorities = new HashSet<>(); for ( UserAuthorityGroup group : userAuthorityGroups ) { authorities.addAll( group.getAuthorities() ); } return authorities; } /** * Indicates whether this user credentials has at least one authority through * its user authority groups. */ public boolean hasAuthorities() { for ( UserAuthorityGroup group : userAuthorityGroups ) { if ( group != null && group.getAuthorities() != null && !group.getAuthorities().isEmpty() ) { return true; } } return false; } /** * Tests whether this user credentials has any of the authorities in the * given set. * * @param auths the authorities to compare with. * @return true or false. */ public boolean hasAnyAuthority( Collection<String> auths ) { Set<String> all = new HashSet<>( getAllAuthorities() ); return all.removeAll( auths ); } /** * Tests whether the user has the given authority. Returns true in any case * if the user has the ALL authority. */ public boolean isAuthorized( String auth ) { if ( auth == null ) { return false; } final Set<String> auths = getAllAuthorities(); return auths.contains( UserAuthorityGroup.AUTHORITY_ALL ) || auths.contains( auth ); } /** * Indicates whether this user credentials is a super user, implying that the * ALL authority is present in at least one of the user authority groups of * this user credentials. */ public boolean isSuper() { for ( UserAuthorityGroup group : userAuthorityGroups ) { if ( group.isSuper() ) { return true; } } return false; } /** * Returns a set of the aggregated data sets for all user authority groups * of this user credentials. */ public Set<DataSet> getAllDataSets() { Set<DataSet> dataSets = new HashSet<>(); for ( UserAuthorityGroup group : userAuthorityGroups ) { dataSets.addAll( group.getDataSets() ); } return dataSets; } /** * Indicates whether this user credentials can issue the given user authority * group. First the given authority group must not be null. Second this * user credentials must not contain the given authority group. Third * the authority group must be a subset of the aggregated user authorities * of this user credentials, or this user credentials must have the ALL * authority. * * @param group the user authority group. * @param canGrantOwnUserAuthorityGroups indicates whether this users can grant * its own authority groups to others. */ public boolean canIssueUserRole( UserAuthorityGroup group, boolean canGrantOwnUserAuthorityGroups ) { if ( group == null ) { return false; } final Set<String> authorities = getAllAuthorities(); if ( authorities.contains( UserAuthorityGroup.AUTHORITY_ALL ) ) { return true; } if ( !canGrantOwnUserAuthorityGroups && userAuthorityGroups.contains( group ) ) { return false; } return authorities.containsAll( group.getAuthorities() ); } /** * Indicates whether this user credentials can issue all of the user authority * groups in the given collection. * * @param groups the collection of user authority groups. * @param canGrantOwnUserAuthorityGroups indicates whether this users can grant * its own authority groups to others. */ public boolean canIssueUserRoles( Collection<UserAuthorityGroup> groups, boolean canGrantOwnUserAuthorityGroups ) { for ( UserAuthorityGroup group : groups ) { if ( !canIssueUserRole( group, canGrantOwnUserAuthorityGroups ) ) { return false; } } return true; } /** * Indicates whether this user credentials can modify the given user * credentials. This user credentials must have the ALL authority or possess * all user authorities of the other user credentials to do so. * * @param other the user credentials to modify. */ public boolean canModifyUser( UserCredentials other ) { if ( other == null ) { return false; } final Set<String> authorities = getAllAuthorities(); if ( authorities.contains( UserAuthorityGroup.AUTHORITY_ALL ) ) { return true; } return authorities.containsAll( other.getAllAuthorities() ); } /** * Return the name of this user credentials. More specifically, if this * credentials has a user it will return the first name and surname of that * user, if not it returns the username of this credentials. * * @return the name. */ @Override public String getName() { return user != null ? user.getName() : username; } @Override public String getCode() { return username; } /** * Tests whether the credentials contain all needed parameters to * perform an account restore. * If a parameter is missing a descriptive error string is returned. * * @return null on success, a descriptive error string on failure. */ public String isRestorable() { if ( restoreToken == null ) { return "account_restoreToken_is_null"; } if ( restoreCode == null ) { return "account_restoreCode_is_null"; } if ( restoreExpiry == null ) { return "account_restoreExpiry_is_null"; } return null; // Success. } /** * Returns the dimensions to use as constrains (filters) in data analytics * aggregation. */ public Set<DimensionalObject> getDimensionConstraints() { Set<DimensionalObject> constraints = new HashSet<>(); for ( CategoryOptionGroupSet cogs : cogsDimensionConstraints ) { cogs.setDimensionType( DimensionType.CATEGORYOPTION_GROUPSET ); constraints.add( cogs ); } for ( DataElementCategory cat : catDimensionConstraints ) { cat.setDimensionType( DimensionType.CATEGORY ); constraints.add( cat ); } return constraints; } /** * Indicates whether this user credentials has user authority groups. */ public boolean hasUserAuthorityGroups() { return userAuthorityGroups != null && !userAuthorityGroups.isEmpty(); } /** * Indicates whether this user credentials has dimension constraints. */ public boolean hasDimensionConstraints() { Set<DimensionalObject> constraints = getDimensionConstraints(); return constraints != null && !constraints.isEmpty(); } // ------------------------------------------------------------------------- // hashCode and equals // ------------------------------------------------------------------------- @Override public int hashCode() { return username.hashCode(); } @Override public boolean equals( Object o ) { if ( this == o ) { return true; } if ( o == null ) { return false; } if ( !(o instanceof UserCredentials) ) { return false; } final UserCredentials other = (UserCredentials) o; return username.equals( other.getUsername() ); } @Override public boolean haveUniqueNames() { return false; } // ------------------------------------------------------------------------- // Getters and setters // ------------------------------------------------------------------------- @Override @JsonProperty @JsonView( DetailedView.class ) @JsonSerialize( as = BaseIdentifiableObject.class ) @JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 ) public User getUser() { return super.getUser(); } public String getPassword() { return password; } @JsonProperty @JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 ) @Property( PropertyType.PASSWORD ) @PropertyRange( min = 8, max = 35 ) public void setPassword( String password ) { this.password = password; } @JsonProperty @JsonView( { DetailedView.class, ExportView.class } ) @JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 ) public Date getPasswordLastUpdated() { return passwordLastUpdated; } public void setPasswordLastUpdated( Date passwordLastUpdated ) { this.passwordLastUpdated = passwordLastUpdated; } @JsonProperty( "userRoles" ) @JsonSerialize( contentAs = BaseIdentifiableObject.class ) @JsonView( { DetailedView.class, ExportView.class } ) @JacksonXmlElementWrapper( localName = "userRoles", namespace = DxfNamespaces.DXF_2_0 ) @JacksonXmlProperty( localName = "userRole", namespace = DxfNamespaces.DXF_2_0 ) public Set<UserAuthorityGroup> getUserAuthorityGroups() { return userAuthorityGroups; } public void setUserAuthorityGroups( Set<UserAuthorityGroup> userAuthorityGroups ) { this.userAuthorityGroups = userAuthorityGroups; } @JsonProperty @JsonSerialize( contentAs = BaseIdentifiableObject.class ) @JsonView( { DetailedView.class, ExportView.class } ) @JacksonXmlElementWrapper( localName = "catDimensionConstraints", namespace = DxfNamespaces.DXF_2_0 ) @JacksonXmlProperty( localName = "catDimensionConstraint", namespace = DxfNamespaces.DXF_2_0 ) public Set<DataElementCategory> getCatDimensionConstraints() { return catDimensionConstraints; } public void setCatDimensionConstraints( Set<DataElementCategory> catDimensionConstraints ) { this.catDimensionConstraints = catDimensionConstraints; } @JsonProperty @JsonSerialize( contentAs = BaseIdentifiableObject.class ) @JsonView( { DetailedView.class, ExportView.class } ) @JacksonXmlElementWrapper( localName = "cogsDimensionConstraints", namespace = DxfNamespaces.DXF_2_0 ) @JacksonXmlProperty( localName = "cogsDimensionConstraint", namespace = DxfNamespaces.DXF_2_0 ) public Set<CategoryOptionGroupSet> getCogsDimensionConstraints() { return cogsDimensionConstraints; } public void setCogsDimensionConstraints( Set<CategoryOptionGroupSet> cogsDimensionConstraints ) { this.cogsDimensionConstraints = cogsDimensionConstraints; } @JsonProperty @JsonView( { DetailedView.class, ExportView.class } ) @JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 ) public String getUsername() { return username; } public void setUsername( String username ) { this.username = username; } @JsonProperty @JsonView( { DetailedView.class, ExportView.class } ) @JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 ) public String getOpenId() { return openId; } public void setOpenId( String openId ) { this.openId = openId; } @JsonProperty @JsonView( { DetailedView.class, ExportView.class } ) @JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 ) public Date getLastLogin() { return lastLogin; } public void setLastLogin( Date lastLogin ) { this.lastLogin = lastLogin; } public String getRestoreToken() { return restoreToken; } public void setRestoreToken( String restoreToken ) { this.restoreToken = restoreToken; } public String getRestoreCode() { return restoreCode; } public void setRestoreCode( String restoreCode ) { this.restoreCode = restoreCode; } public Date getRestoreExpiry() { return restoreExpiry; } public void setRestoreExpiry( Date restoreExpiry ) { this.restoreExpiry = restoreExpiry; } @JsonProperty @JsonView( { DetailedView.class, ExportView.class } ) @JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 ) public boolean isSelfRegistered() { return selfRegistered; } public void setSelfRegistered( boolean selfRegistered ) { this.selfRegistered = selfRegistered; } @JsonProperty @JsonView( { DetailedView.class, ExportView.class } ) @JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 ) public boolean isInvitation() { return invitation; } public void setInvitation( boolean invitation ) { this.invitation = invitation; } @JsonProperty @JsonView( { DetailedView.class, ExportView.class } ) @JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 ) public boolean isDisabled() { return disabled; } public void setDisabled( boolean disabled ) { this.disabled = disabled; } @Override public void mergeWith( IdentifiableObject other, MergeStrategy strategy ) { super.mergeWith( other, strategy ); if ( other.getClass().isInstance( this ) ) { UserCredentials userCredentials = (UserCredentials) other; username = userCredentials.getUsername(); password = StringUtils.isEmpty( userCredentials.getPassword() ) ? password : userCredentials.getPassword(); passwordLastUpdated = userCredentials.getPasswordLastUpdated(); lastLogin = userCredentials.getLastLogin(); restoreToken = userCredentials.getRestoreToken(); restoreExpiry = userCredentials.getRestoreExpiry(); selfRegistered = userCredentials.isSelfRegistered(); disabled = userCredentials.isDisabled(); if ( strategy.isReplace() ) { openId = userCredentials.getOpenId(); } else if ( strategy.isMerge() ) { openId = userCredentials.getOpenId() == null ? openId : userCredentials.getOpenId(); } userAuthorityGroups.clear(); userAuthorityGroups.addAll( userCredentials.getUserAuthorityGroups() ); catDimensionConstraints.clear(); catDimensionConstraints.addAll( userCredentials.getCatDimensionConstraints() ); cogsDimensionConstraints.clear(); cogsDimensionConstraints.addAll( userCredentials.getCogsDimensionConstraints() ); } } @Override public String toString() { return "UserCredentials{" + "username='" + username + '\'' + ", openId='" + openId + '\'' + ", password='" + password + '\'' + ", passwordLastUpdated=" + passwordLastUpdated + ", userAuthorityGroups=" + userAuthorityGroups + ", lastLogin=" + lastLogin + ", restoreToken='" + restoreToken + '\'' + ", restoreCode='" + restoreCode + '\'' + ", restoreExpiry=" + restoreExpiry + ", selfRegistered=" + selfRegistered + ", disabled=" + disabled + '}'; } }
// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.firebase.analytics; import android.content.Context; import android.os.Bundle; import android.support.annotation.IntRange; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.util.Log; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.RejectedExecutionHandler; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; /** * Adapter that formats Google Analytics for Firebase call arguments so that they adhere to * formatting rules. */ public class GoogleAnalyticsAdapter { /** The builder for {@link GoogleAnalyticsAdapter}. */ public static class Builder { private Map<String, String> eventNameMap; private Map<String, String> paramNameMap; private Map<String, String> userPropertyNameMap; private List<String> blacklistedEventNames; private List<String> blacklistedParamNames; private List<String> blacklistedUserPropertyNames; private String wrappedSdkName; private String sanitizedNamePrefix; private String emptyEventName; private String emptyParamName; private String emptyUserPropertyName; Builder setEventNameMap(@Nullable Map<String, String> eventNameMap) { this.eventNameMap = eventNameMap; return this; } Builder setParamNameMap(@Nullable Map<String, String> paramNameMap) { this.paramNameMap = paramNameMap; return this; } Builder setUserPropertyNameMap(@Nullable Map<String, String> userPropertyNameMap) { this.userPropertyNameMap = userPropertyNameMap; return this; } Builder setBlacklistedEventNames(@Nullable List<String> blacklistedEventNames) { this.blacklistedEventNames = blacklistedEventNames; return this; } Builder setBlacklistedParamNames(@Nullable List<String> blacklistedParamNames) { this.blacklistedParamNames = blacklistedParamNames; return this; } Builder setBlacklistedUserPropertyNames(@Nullable List<String> blacklistedUserPropertyNames) { this.blacklistedUserPropertyNames = blacklistedUserPropertyNames; return this; } Builder setWrappedSdkName(@NonNull String wrappedSdkName) { if (wrappedSdkName == null || isStringTooLong(wrappedSdkName, MAX_PARAM_STRING_VALUE_LENGTH)) { throw new IllegalArgumentException( String.format( "Wrapped SDK Name must be non-null and not exceed %d characters", MAX_PARAM_STRING_VALUE_LENGTH)); } this.wrappedSdkName = wrappedSdkName; return this; } Builder setSanitizedNamePrefix(@NonNull String sanitizedNamePrefix) { if (!isValidPublicNameFormat(sanitizedNamePrefix)) { throw new IllegalArgumentException( "Sanitized Name Prefix must be non-null, may only contain alphanumeric characters" + "and underscores (\"_\"), and must start with an alphabetic character"); } this.sanitizedNamePrefix = sanitizedNamePrefix; return this; } Builder setEmptyEventName(@NonNull String emptyEventName) { if (!isValidPublicName(emptyEventName, MAX_EVENT_NAME_LENGTH, RESTRICTED_EVENT_NAMES)) { throw new IllegalArgumentException( "Empty Event Name must conform to formatting rules outlined at " + "https://firebase.google.com/docs/reference/android/com/google/firebase/" + "analytics/FirebaseAnalytics.Event"); } this.emptyEventName = emptyEventName; return this; } Builder setEmptyParamName(@NonNull String emptyParamName) { if (!isValidPublicName(emptyParamName, MAX_PARAM_NAME_LENGTH, RESTRICTED_PARAM_NAMES)) { throw new IllegalArgumentException( "Empty Param Name must conform to formatting rules outlined at " + "https://firebase.google.com/docs/reference/android/com/google/firebase/" + "analytics/FirebaseAnalytics.Param"); } this.emptyParamName = emptyParamName; return this; } Builder setEmptyUserPropertyName(@NonNull String emptyUserPropertyName) { if (!isValidPublicName( emptyUserPropertyName, MAX_USER_PROPERTY_NAME_LENGTH, RESTRICTED_USER_PROPERTY_NAMES)) { throw new IllegalArgumentException( "Empty User Property name must conform to formatting rules outlined at " + "https://firebase.google.com/docs/reference/android/com/google/firebase/" + "analytics/FirebaseAnalytics.UserProperty"); } this.emptyUserPropertyName = emptyUserPropertyName; return this; } public GoogleAnalyticsAdapter build() { if (wrappedSdkName == null) { throw new IllegalArgumentException("wrappedSdkName must not be empty"); } if (sanitizedNamePrefix == null) { throw new IllegalArgumentException("sanitizedNamePrefix must not be empty"); } if (emptyEventName == null) { emptyEventName = "unnamed_event"; } if (emptyParamName == null) { emptyParamName = "unnamed_param"; } if (emptyUserPropertyName == null) { emptyUserPropertyName = "unnamed_user_property"; } return new GoogleAnalyticsAdapter( eventNameMap != null ? eventNameMap : Collections.<String, String>emptyMap(), paramNameMap != null ? paramNameMap : Collections.<String, String>emptyMap(), userPropertyNameMap != null ? userPropertyNameMap : Collections.<String, String>emptyMap(), blacklistedEventNames != null ? blacklistedEventNames : Collections.<String>emptyList(), blacklistedParamNames != null ? blacklistedParamNames : Collections.<String>emptyList(), blacklistedUserPropertyNames != null ? blacklistedUserPropertyNames : Collections.<String>emptyList(), wrappedSdkName, sanitizedNamePrefix, emptyEventName, emptyParamName, emptyUserPropertyName); } } private static final int MAX_PARAM_COUNT = 25; private static final int MAX_PARAM_NAME_LENGTH = 40; private static final int MAX_PARAM_STRING_VALUE_LENGTH = 100; private static final int MAX_EVENT_NAME_LENGTH = 40; private static final int MAX_USER_PROPERTY_NAME_LENGTH = 24; private static final int MAX_USER_PROPERTY_VALUE_LENGTH = 36; private static final int MAX_USER_ID_VALUE_LENGTH = 256; /** * Maximum number of {@link FirebaseAnalytics} API calls queued until a {@link Context} is * registered using {@link GoogleAnalyticsAdapter#registerContext}. */ private static final int MAX_QUEUE_LENGTH = 1000; /** Idle number of worker threads. */ private static final int CORE_QUEUE_POOL_SIZE = 0; /** Maximum number of worker threads. */ private static final int MAX_QUEUE_POOL_SIZE = 1; /** Time to keep thread pool worker thread alive when idle */ private static final long QUEUE_THREAD_KEEP_ALIVE_TIME_SECONDS = 30L; private static final String LOG_TAG = "FA-W"; private static final String WRAPPER_PARAM_NAME = "api_wrapper"; private static final List<String> RESERVED_NAME_PREFIXES = Arrays.asList("firebase_", "ga_", "google_"); /** * Event names disallowed by Google Analytics. A prefix will be added if a developer logs an event * with a restricted name to disambiguate wrapped SDK events from the events automatically * collected by Google Analytics for Firebase. */ private static final List<String> RESTRICTED_EVENT_NAMES = Arrays.asList( "first_open", "in_app_purchase", "error", "user_engagement", "session_start", "app_update", "app_remove", "os_update", "app_clear_data", "notification_foreground", "notification_receive", "notification_open", "notification_dismiss", "notification_send", "app_exception", "dynamic_link_first_open", "dynamic_link_app_open", "dynamic_link_app_update", "app_install", "ad_exposure", "adunit_exposure", "ad_query", "ad_activeview", "ad_impression", "ad_click", "app_upgrade", "screen_view", "first_visit"); /** * Event param names disallowed by Google Analytics Adapter. A prefix will be added if a developer * includes event params with restricted names. */ private static final List<String> RESTRICTED_PARAM_NAMES = Arrays.asList(WRAPPER_PARAM_NAME); /** * User property names disallowed by Google Analytics. A prefix will be added if a developer sets * a user property with a restricted name. */ private static final List<String> RESTRICTED_USER_PROPERTY_NAMES = Arrays.asList( "first_open_time", "last_deep_link_referrer", "user_id", "first_open_after_install", "first_visit_time", "lifetime_user_engagement", "session_number", "session_id"); private AtomicReference<FirebaseAnalytics> firebaseReference = new AtomicReference<>(); /** * All non-static {@link FirebaseAnalytics} API calls ({@link FirebaseAnalytics#logEvent(String, * Bundle)}, {@link FirebaseAnalytics#setUserProperty(String, String)}, etc) should to be made on * this executor. Calls will be blocked until a {@link Context} is registered and a {@link * FirebaseAnalytics} instance can be obtained. */ protected final ThreadPoolExecutor executor; @NonNull private final Map<String, String> eventNameMap; @NonNull private final Map<String, String> paramNameMap; @NonNull private final Map<String, String> userPropertyNameMap; @NonNull private final List<String> blacklistedEventNames; @NonNull private final List<String> blacklistedParamNames; @NonNull private final List<String> blacklistedUserPropertyNames; private final String wrappedSdkName; private final String sanitizedNamePrefix; private final String emptyEventName; private final String emptyParamName; private final String emptyUserPropertyName; GoogleAnalyticsAdapter( @NonNull Map<String, String> eventNameMap, @NonNull Map<String, String> paramNameMap, @NonNull Map<String, String> userPropertyNameMap, @NonNull List<String> blacklistedEventNames, @NonNull List<String> blacklistedParamNames, @NonNull List<String> blacklistedUserPropertyNames, @NonNull String wrappedSdkName, @NonNull String sanitizedNamePrefix, @NonNull String emptyEventName, @NonNull String emptyParamName, @NonNull String emptyUserPropertyName) { this.eventNameMap = eventNameMap; this.paramNameMap = paramNameMap; this.userPropertyNameMap = userPropertyNameMap; this.blacklistedEventNames = blacklistedEventNames; this.blacklistedParamNames = blacklistedParamNames; this.blacklistedUserPropertyNames = blacklistedUserPropertyNames; this.wrappedSdkName = wrappedSdkName; this.sanitizedNamePrefix = sanitizedNamePrefix; this.emptyEventName = emptyEventName; this.emptyParamName = emptyParamName; this.emptyUserPropertyName = emptyUserPropertyName; this.executor = new ThreadPoolExecutor( CORE_QUEUE_POOL_SIZE, MAX_QUEUE_POOL_SIZE, QUEUE_THREAD_KEEP_ALIVE_TIME_SECONDS, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>(MAX_QUEUE_LENGTH), new RejectedExecutionHandler() { @Override public void rejectedExecution( Runnable runnable, ThreadPoolExecutor threadPoolExecutor) { if (executor.isShutdown()) { Log.e(LOG_TAG, "Data loss. Executor service is shut down."); } else { Log.e(LOG_TAG, "Data loss. Max task queueOnWorker size exceeded."); } } }); // Initial executor runnable blocks Google Analytics for Firebase API call execution until an // instance of FirebaseAnalytics can be obtained. this.executor.execute( new Runnable() { @Override public void run() { synchronized (firebaseReference) { // Block executor until a FirebaseAnalytics instance can be obtained. while (firebaseReference.get() == null) { try { firebaseReference.wait(); } catch (InterruptedException ex) { Log.e(LOG_TAG, "Error waiting for a FirebaseAnalytics instance.", ex); } } } } }); } /** * Registers application {@link Context} required to obtain an instance of {@link * FirebaseAnalytics}. * * @param context the {@link Context} used to initialize Firebase Analytics. Call is a no-op if * {@code null} or if an instance of {@link FirebaseAnalytics} has been already obtained. */ public GoogleAnalyticsAdapter registerContext(@Nullable Context context) { if (context == null) { return this; } if (firebaseReference.get() == null) { synchronized (firebaseReference) { if (firebaseReference.compareAndSet(null, FirebaseAnalytics.getInstance(context))) { firebaseReference.notifyAll(); } } } return this; } /** * Wraps call to {@link FirebaseAnalytics#logEvent(String, Bundle)}. * * <p>Note: Call will only be executed once an application {@link Context} is registered using * {@link GoogleAnalyticsAdapter#registerContext(Context)}. */ public void logEvent(@Nullable String rawName, @Nullable Map<String, Object> rawParams) { if (blacklistedEventNames.contains(rawName)) { return; } final String name = sanitizeName( mapName(eventNameMap, rawName), MAX_EVENT_NAME_LENGTH, emptyEventName, RESTRICTED_EVENT_NAMES); if (name == null) { Log.w(LOG_TAG, String.format("Event %s sanitized to \'_\'. Dropping event.", rawName)); return; } final Bundle params = new Bundle(); params.putString(WRAPPER_PARAM_NAME, wrappedSdkName); if (rawParams != null) { for (Entry<String, Object> rawParam : rawParams.entrySet()) { if (rawParam.getValue() == null) { continue; } String paramName = rawParam.getKey(); if (blacklistedParamNames.contains(paramName)) { continue; } String sanitizedParamName = sanitizeName( mapName(paramNameMap, paramName), MAX_PARAM_NAME_LENGTH, emptyParamName, RESTRICTED_PARAM_NAMES); if (sanitizedParamName == null) { Log.w( LOG_TAG, String.format("Parameter %s sanitized to \'_\'. Dropping param.", paramName)); continue; } addParamToBundle( params, sanitizedParamName, rawParam.getValue(), MAX_PARAM_STRING_VALUE_LENGTH); if (params.size() >= MAX_PARAM_COUNT) { break; } } } executor.execute( new Runnable() { @Override public void run() { getFirebaseAnalytics().logEvent(name, params); } }); } /** * Wraps call to {@link FirebaseAnalytics#setUserProperty(String, String)}. * * <p>Note: Call will only be executed once an application {@link Context} is registered using * {@link GoogleAnalyticsAdapter#registerContext(Context)}. */ public void setUserProperty(@Nullable String rawName, @Nullable String rawValue) { if (blacklistedEventNames != null && blacklistedUserPropertyNames.contains(rawName)) { return; } if (rawName != null && rawName.startsWith("firebase_exp_")) { rawName = sanitizedNamePrefix + rawName; } final String name = sanitizeName( mapName(userPropertyNameMap, rawName), MAX_USER_PROPERTY_NAME_LENGTH, emptyUserPropertyName, RESTRICTED_USER_PROPERTY_NAMES); if (name == null) { Log.w( LOG_TAG, String.format("User Property %s sanitized to \'_\'. Dropping user property.", rawName)); return; } final String value = trimString(rawValue, MAX_USER_PROPERTY_VALUE_LENGTH); executor.execute( new Runnable() { @Override public void run() { getFirebaseAnalytics().setUserProperty(name, value); } }); } /** * Wraps call to {@link FirebaseAnalytics#setAnalyticsCollectionEnabled(boolean)}. * * <p>Note: Call will only be executed once an application {@link Context} is registered using * {@link GoogleAnalyticsAdapter#registerContext(Context)}. */ public void setAnalyticsCollectionEnabled(final boolean enabled) { executor.execute( new Runnable() { @Override public void run() { getFirebaseAnalytics().setAnalyticsCollectionEnabled(enabled); } }); } /** * Wraps call to {@link FirebaseAnalytics#setUserId(String)}. * * <p>Note: Call will only be executed once an application {@link Context} is registered using * {@link GoogleAnalyticsAdapter#registerContext(Context)}. */ public void setUserId(@Nullable String rawUserId) { final String userId = trimString(rawUserId, MAX_USER_ID_VALUE_LENGTH); executor.execute( new Runnable() { @Override public void run() { getFirebaseAnalytics().setUserId(userId); } }); } /** * Wraps call to {@link FirebaseAnalytics#setSessionTimeoutDuration(long)}. * * <p>Note: Call will only be executed once an application {@link Context} is registered using * {@link GoogleAnalyticsAdapter#registerContext(Context)}. */ public void setSessionTimeoutDuration(@IntRange(from = 1) final long milliseconds) { executor.execute( new Runnable() { @Override public void run() { getFirebaseAnalytics().setSessionTimeoutDuration(milliseconds); } }); } private FirebaseAnalytics getFirebaseAnalytics() { synchronized (firebaseReference) { return firebaseReference.get(); } } /** * Returns event, param, or user property name formatted to conform to Google Analytics for * Firebase public name rules. * * <p>A valid public name: * * <p>- is non-null * * <p>- does not start with a reserved prefix ({@link * GoogleAnalyticsAdapter#RESERVED_NAME_PREFIXES}) * * <p>- starts with an alphabetic character * * <p>- only contains alphanumeric characters and underscores ("_") * * <p>- is not longer than the maximum length * * <p>- is not contained in the restricted internal names list * * @param name name to be sanitized * @param maxLength maximum character length of the output name * @param defaultValue name returned if name is null or empty * @param restrictedNames list of names reserved by Google Analytics for Firebase * @return formatted public name or null */ @Nullable String sanitizeName( @Nullable String name, @IntRange(from = 1) int maxLength, @Nullable String defaultValue, @NonNull List<String> restrictedNames) { if (name == null || name.isEmpty()) { return defaultValue; } StringBuilder builder = new StringBuilder(); // If name doesn't start with an alphabetic character, prepend prefix to name (additional // characters may cause string to exceed the length limit and cause the name to be truncated) boolean prependPrefix = !isValidNameCharacter(name.codePointAt(0), true); // Remove invalid characters (only letters, digits, and _ are allowed) int offset = 0; boolean lastAddedValidCharacter = true; boolean hasAlphaNumericChar = false; while (offset < name.length()) { int codepoint = name.codePointAt(offset); if (isValidNameCharacter(codepoint, false)) { builder.appendCodePoint(codepoint); hasAlphaNumericChar |= codepoint != (int) '_'; lastAddedValidCharacter = true; } else if (lastAddedValidCharacter) { // Add a single underscore for an invalid subsequence builder.append('_'); lastAddedValidCharacter = false; } offset += Character.charCount(codepoint); } if (!hasAlphaNumericChar) { // Drop names that collapse to a series of underscores return null; } if (prependPrefix || nameStartsWithReservedPrefix(builder.toString()) || restrictedNames.contains(builder.toString())) { // If name starts with "google_", "firebase_", or "ga_", or is a restricted internal name, // prepend prefix to name (additional characters may cause string to exceed the length limit // and cause the name to be truncated) builder.insert(0, sanitizedNamePrefix); } return trimString(builder.toString(), maxLength); } /** * Returns {@code true} if name is a properly formatted Google Analytics for Firebase public name. * * <p>A valid public name: * * <p>- is non-null * * <p>- does not start with a reserved prefix ({@link * GoogleAnalyticsAdapter#RESERVED_NAME_PREFIXES}) * * <p>- starts with an alphabetic character * * <p>- only contains alphanumeric characters and underscores ("_") * * @param name name to be checked * @return if name is properly formatted */ static boolean isValidPublicNameFormat(String name) { if (name == null || name.length() == 0 || nameStartsWithReservedPrefix(name)) { return false; } for (int offset = 0; offset < name.length(); offset++) { if (!isValidNameCharacter(name.codePointAt(offset), offset == 0)) { return false; } } return true; } /** * Returns {@code true} if name is a properly formatted Google Analytics for Firebase public name. * * <p>A valid public name: * * <p>- is non-null * * <p>- does not start with a reserved prefix ({@link * GoogleAnalyticsAdapter#RESERVED_NAME_PREFIXES}) * * <p>- starts with an alphabetic character * * <p>- only contains alphanumeric characters and underscores ("_") * * <p>- is not longer than the maximum length * * <p>- is not contained in the restricted internal names list * * @param name name to be checked * @param maxLength maximum character length of the output name * @param restrictedNames list of names reserved by Google Analytics for Firebase * @return if name is properly formatted */ static boolean isValidPublicName( String name, @IntRange(from = 1) int maxLength, @NonNull List<String> restrictedNames) { return isValidPublicNameFormat(name) && !isStringTooLong(name, maxLength) && !restrictedNames.contains(name); } /** Returns {@code true} if the {@code String} value exceeds the maximum character length. */ static boolean isStringTooLong(@NonNull String string, @IntRange(from = 1) int maxLength) { return string.codePointCount(0, string.length()) > maxLength; } /** * If {@code String} value exceeds maximum character length, returns the {@code String} value * truncated to the maximum character length, otherwise returns the original value. */ static String trimString(@Nullable String string, @IntRange(from = 1) int maxLength) { if (string == null) { return null; } if (maxLength <= 0 || string.isEmpty()) { return ""; } else if (isStringTooLong(string, maxLength)) { return string.substring(0, string.offsetByCodePoints(0, maxLength)); } else { return string; } } /** * Maps predefined event, param, and user property constants of a wrapped SDK to their Google * Analytics for Firebase equivalent. * * @param map map of other wrapped SDK event, param, or user property name (key) to the GA4F * equivalent (value) * @param name wrapped SDK event, param, or user property name * @return GA4F equivalent name (if present in map), otherwise the supplied name */ static String mapName(@NonNull Map<String, String> map, @Nullable String name) { if (map.containsKey(name)) { return map.get(name); } return name; } /** * Returns {@code true} if codepoint can be used in an event, param, user property name, otherwise * returns {@code false}. All event, param, user property names may only contain alphanumeric * characters and underscores ("_"), and must start with an alphabetic character. */ static boolean isValidNameCharacter(int codepoint, boolean isFirstCharacter) { if (isFirstCharacter) { return Character.isLetter(codepoint); } else { return Character.isLetterOrDigit(codepoint) || codepoint == (int) '_'; } } /** * Returns {@code true} if name starts with a reserved prefix ({@link * GoogleAnalyticsAdapter#RESERVED_NAME_PREFIXES}), otherwise returns {@code false}. */ static boolean nameStartsWithReservedPrefix(@Nullable String name) { if (name == null) { return false; } for (String prefix : RESERVED_NAME_PREFIXES) { if (name.startsWith(prefix)) { return true; } } return false; } /** * Converts param value to supported type ({@code String}, {@code long}, {@code double}) and adds * key-value pair to the supplied bundle. If value is converted to a {@code String}, it will be * truncated if it exceeds the maximum length. */ static Bundle addParamToBundle( @NonNull Bundle bundle, @NonNull String key, @NonNull Object value, @IntRange(from = 1) int maxStringLength) { String stringValueToAdd = null; if (value instanceof Float) { bundle.putDouble(key, ((Float) value).doubleValue()); } else if (value instanceof Double) { bundle.putDouble(key, ((Double) value)); } else if (value instanceof Byte) { bundle.putLong(key, ((Byte) value).longValue()); } else if (value instanceof Short) { bundle.putLong(key, ((Short) value).longValue()); } else if (value instanceof Integer) { bundle.putLong(key, ((Integer) value).longValue()); } else if (value instanceof Long) { bundle.putLong(key, (Long) value); } else if (value instanceof Character || value instanceof String || value instanceof CharSequence) { stringValueToAdd = String.valueOf(value); } else if (value instanceof float[]) { stringValueToAdd = Arrays.toString((float[]) value); } else if (value instanceof double[]) { stringValueToAdd = Arrays.toString((double[]) value); } else if (value instanceof byte[]) { stringValueToAdd = Arrays.toString((byte[]) value); } else if (value instanceof short[]) { stringValueToAdd = Arrays.toString((short[]) value); } else if (value instanceof int[]) { stringValueToAdd = Arrays.toString((int[]) value); } else if (value instanceof long[]) { stringValueToAdd = Arrays.toString((long[]) value); } else if (value instanceof char[]) { stringValueToAdd = Arrays.toString((char[]) value); } else if (value instanceof Object[]) { stringValueToAdd = Arrays.toString((Object[]) value); } else { stringValueToAdd = value.toString(); } if (stringValueToAdd != null) { bundle.putString(key, trimString(stringValueToAdd, maxStringLength)); } return bundle; } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.databoxedge.models; import com.azure.core.util.Context; import com.azure.resourcemanager.databoxedge.fluent.models.ShareInner; import java.util.List; /** An immutable client-side representation of Share. */ public interface Share { /** * Gets the id property: Fully qualified resource Id for the resource. * * @return the id value. */ String id(); /** * Gets the name property: The name of the resource. * * @return the name value. */ String name(); /** * Gets the type property: The type of the resource. * * @return the type value. */ String type(); /** * Gets the description property: Description for the share. * * @return the description value. */ String description(); /** * Gets the shareStatus property: Current status of the share. * * @return the shareStatus value. */ ShareStatus shareStatus(); /** * Gets the monitoringStatus property: Current monitoring status of the share. * * @return the monitoringStatus value. */ MonitoringStatus monitoringStatus(); /** * Gets the azureContainerInfo property: Azure container mapping for the share. * * @return the azureContainerInfo value. */ AzureContainerInfo azureContainerInfo(); /** * Gets the accessProtocol property: Access protocol to be used by the share. * * @return the accessProtocol value. */ ShareAccessProtocol accessProtocol(); /** * Gets the userAccessRights property: Mapping of users and corresponding access rights on the share (required for * SMB protocol). * * @return the userAccessRights value. */ List<UserAccessRight> userAccessRights(); /** * Gets the clientAccessRights property: List of IP addresses and corresponding access rights on the share(required * for NFS protocol). * * @return the clientAccessRights value. */ List<ClientAccessRight> clientAccessRights(); /** * Gets the refreshDetails property: Details of the refresh job on this share. * * @return the refreshDetails value. */ RefreshDetails refreshDetails(); /** * Gets the shareMappings property: Share mount point to the role. * * @return the shareMappings value. */ List<MountPointMap> shareMappings(); /** * Gets the dataPolicy property: Data policy of the share. * * @return the dataPolicy value. */ DataPolicy dataPolicy(); /** * Gets the inner com.azure.resourcemanager.databoxedge.fluent.models.ShareInner object. * * @return the inner object. */ ShareInner innerModel(); /** The entirety of the Share definition. */ interface Definition extends DefinitionStages.Blank, DefinitionStages.WithParentResource, DefinitionStages.WithShareStatus, DefinitionStages.WithMonitoringStatus, DefinitionStages.WithAccessProtocol, DefinitionStages.WithCreate { } /** The Share definition stages. */ interface DefinitionStages { /** The first stage of the Share definition. */ interface Blank extends WithParentResource { } /** The stage of the Share definition allowing to specify parent resource. */ interface WithParentResource { /** * Specifies deviceName, resourceGroupName. * * @param deviceName The device name. * @param resourceGroupName The resource group name. * @return the next definition stage. */ WithShareStatus withExistingDataBoxEdgeDevice(String deviceName, String resourceGroupName); } /** The stage of the Share definition allowing to specify shareStatus. */ interface WithShareStatus { /** * Specifies the shareStatus property: Current status of the share.. * * @param shareStatus Current status of the share. * @return the next definition stage. */ WithMonitoringStatus withShareStatus(ShareStatus shareStatus); } /** The stage of the Share definition allowing to specify monitoringStatus. */ interface WithMonitoringStatus { /** * Specifies the monitoringStatus property: Current monitoring status of the share.. * * @param monitoringStatus Current monitoring status of the share. * @return the next definition stage. */ WithAccessProtocol withMonitoringStatus(MonitoringStatus monitoringStatus); } /** The stage of the Share definition allowing to specify accessProtocol. */ interface WithAccessProtocol { /** * Specifies the accessProtocol property: Access protocol to be used by the share.. * * @param accessProtocol Access protocol to be used by the share. * @return the next definition stage. */ WithCreate withAccessProtocol(ShareAccessProtocol accessProtocol); } /** * The stage of the Share definition which contains all the minimum required properties for the resource to be * created, but also allows for any other optional properties to be specified. */ interface WithCreate extends DefinitionStages.WithDescription, DefinitionStages.WithAzureContainerInfo, DefinitionStages.WithUserAccessRights, DefinitionStages.WithClientAccessRights, DefinitionStages.WithRefreshDetails, DefinitionStages.WithDataPolicy { /** * Executes the create request. * * @return the created resource. */ Share create(); /** * Executes the create request. * * @param context The context to associate with this operation. * @return the created resource. */ Share create(Context context); } /** The stage of the Share definition allowing to specify description. */ interface WithDescription { /** * Specifies the description property: Description for the share.. * * @param description Description for the share. * @return the next definition stage. */ WithCreate withDescription(String description); } /** The stage of the Share definition allowing to specify azureContainerInfo. */ interface WithAzureContainerInfo { /** * Specifies the azureContainerInfo property: Azure container mapping for the share.. * * @param azureContainerInfo Azure container mapping for the share. * @return the next definition stage. */ WithCreate withAzureContainerInfo(AzureContainerInfo azureContainerInfo); } /** The stage of the Share definition allowing to specify userAccessRights. */ interface WithUserAccessRights { /** * Specifies the userAccessRights property: Mapping of users and corresponding access rights on the share * (required for SMB protocol).. * * @param userAccessRights Mapping of users and corresponding access rights on the share (required for SMB * protocol). * @return the next definition stage. */ WithCreate withUserAccessRights(List<UserAccessRight> userAccessRights); } /** The stage of the Share definition allowing to specify clientAccessRights. */ interface WithClientAccessRights { /** * Specifies the clientAccessRights property: List of IP addresses and corresponding access rights on the * share(required for NFS protocol).. * * @param clientAccessRights List of IP addresses and corresponding access rights on the share(required for * NFS protocol). * @return the next definition stage. */ WithCreate withClientAccessRights(List<ClientAccessRight> clientAccessRights); } /** The stage of the Share definition allowing to specify refreshDetails. */ interface WithRefreshDetails { /** * Specifies the refreshDetails property: Details of the refresh job on this share.. * * @param refreshDetails Details of the refresh job on this share. * @return the next definition stage. */ WithCreate withRefreshDetails(RefreshDetails refreshDetails); } /** The stage of the Share definition allowing to specify dataPolicy. */ interface WithDataPolicy { /** * Specifies the dataPolicy property: Data policy of the share.. * * @param dataPolicy Data policy of the share. * @return the next definition stage. */ WithCreate withDataPolicy(DataPolicy dataPolicy); } } /** * Begins update for the Share resource. * * @return the stage of resource update. */ Share.Update update(); /** The template for Share update. */ interface Update extends UpdateStages.WithDescription, UpdateStages.WithShareStatus, UpdateStages.WithMonitoringStatus, UpdateStages.WithAzureContainerInfo, UpdateStages.WithAccessProtocol, UpdateStages.WithUserAccessRights, UpdateStages.WithClientAccessRights, UpdateStages.WithRefreshDetails, UpdateStages.WithDataPolicy { /** * Executes the update request. * * @return the updated resource. */ Share apply(); /** * Executes the update request. * * @param context The context to associate with this operation. * @return the updated resource. */ Share apply(Context context); } /** The Share update stages. */ interface UpdateStages { /** The stage of the Share update allowing to specify description. */ interface WithDescription { /** * Specifies the description property: Description for the share.. * * @param description Description for the share. * @return the next definition stage. */ Update withDescription(String description); } /** The stage of the Share update allowing to specify shareStatus. */ interface WithShareStatus { /** * Specifies the shareStatus property: Current status of the share.. * * @param shareStatus Current status of the share. * @return the next definition stage. */ Update withShareStatus(ShareStatus shareStatus); } /** The stage of the Share update allowing to specify monitoringStatus. */ interface WithMonitoringStatus { /** * Specifies the monitoringStatus property: Current monitoring status of the share.. * * @param monitoringStatus Current monitoring status of the share. * @return the next definition stage. */ Update withMonitoringStatus(MonitoringStatus monitoringStatus); } /** The stage of the Share update allowing to specify azureContainerInfo. */ interface WithAzureContainerInfo { /** * Specifies the azureContainerInfo property: Azure container mapping for the share.. * * @param azureContainerInfo Azure container mapping for the share. * @return the next definition stage. */ Update withAzureContainerInfo(AzureContainerInfo azureContainerInfo); } /** The stage of the Share update allowing to specify accessProtocol. */ interface WithAccessProtocol { /** * Specifies the accessProtocol property: Access protocol to be used by the share.. * * @param accessProtocol Access protocol to be used by the share. * @return the next definition stage. */ Update withAccessProtocol(ShareAccessProtocol accessProtocol); } /** The stage of the Share update allowing to specify userAccessRights. */ interface WithUserAccessRights { /** * Specifies the userAccessRights property: Mapping of users and corresponding access rights on the share * (required for SMB protocol).. * * @param userAccessRights Mapping of users and corresponding access rights on the share (required for SMB * protocol). * @return the next definition stage. */ Update withUserAccessRights(List<UserAccessRight> userAccessRights); } /** The stage of the Share update allowing to specify clientAccessRights. */ interface WithClientAccessRights { /** * Specifies the clientAccessRights property: List of IP addresses and corresponding access rights on the * share(required for NFS protocol).. * * @param clientAccessRights List of IP addresses and corresponding access rights on the share(required for * NFS protocol). * @return the next definition stage. */ Update withClientAccessRights(List<ClientAccessRight> clientAccessRights); } /** The stage of the Share update allowing to specify refreshDetails. */ interface WithRefreshDetails { /** * Specifies the refreshDetails property: Details of the refresh job on this share.. * * @param refreshDetails Details of the refresh job on this share. * @return the next definition stage. */ Update withRefreshDetails(RefreshDetails refreshDetails); } /** The stage of the Share update allowing to specify dataPolicy. */ interface WithDataPolicy { /** * Specifies the dataPolicy property: Data policy of the share.. * * @param dataPolicy Data policy of the share. * @return the next definition stage. */ Update withDataPolicy(DataPolicy dataPolicy); } } /** * Refreshes the resource to sync with Azure. * * @return the refreshed resource. */ Share refresh(); /** * Refreshes the resource to sync with Azure. * * @param context The context to associate with this operation. * @return the refreshed resource. */ Share refresh(Context context); }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zookeeper.server.quorum; import java.io.IOException; import java.io.PrintWriter; import java.util.Objects; import java.util.stream.Collectors; import org.apache.zookeeper.ZooDefs.OpCode; import org.apache.zookeeper.jmx.MBeanRegistry; import org.apache.zookeeper.server.DataTreeBean; import org.apache.zookeeper.server.FinalRequestProcessor; import org.apache.zookeeper.server.PrepRequestProcessor; import org.apache.zookeeper.server.Request; import org.apache.zookeeper.server.RequestProcessor; import org.apache.zookeeper.server.ServerCnxn; import org.apache.zookeeper.server.ZKDatabase; import org.apache.zookeeper.server.ZooKeeperServer; import org.apache.zookeeper.server.ZooKeeperServerBean; import org.apache.zookeeper.server.persistence.FileTxnSnapLog; /** * A ZooKeeperServer which comes into play when peer is partitioned from the * majority. Handles read-only clients, but drops connections from not-read-only * ones. * <p> * The very first processor in the chain of request processors is a * ReadOnlyRequestProcessor which drops state-changing requests. */ public class ReadOnlyZooKeeperServer extends ZooKeeperServer { protected final QuorumPeer self; private volatile boolean shutdown = false; ReadOnlyZooKeeperServer(FileTxnSnapLog logFactory, QuorumPeer self, ZKDatabase zkDb) { super( logFactory, self.tickTime, self.minSessionTimeout, self.maxSessionTimeout, self.clientPortListenBacklog, zkDb, self.getInitialConfig(), self.isReconfigEnabled()); this.self = self; } @Override protected void setupRequestProcessors() { RequestProcessor finalProcessor = new FinalRequestProcessor(this); RequestProcessor prepProcessor = new PrepRequestProcessor(this, finalProcessor); ((PrepRequestProcessor) prepProcessor).start(); firstProcessor = new ReadOnlyRequestProcessor(this, prepProcessor); ((ReadOnlyRequestProcessor) firstProcessor).start(); } @Override public synchronized void startup() { // check to avoid startup follows shutdown if (shutdown) { LOG.warn("Not starting Read-only server as startup follows shutdown!"); return; } registerJMX(new ReadOnlyBean(this), self.jmxLocalPeerBean); super.startup(); self.setZooKeeperServer(this); self.adminServer.setZooKeeperServer(this); LOG.info("Read-only server started"); } @Override public void createSessionTracker() { sessionTracker = new LearnerSessionTracker( this, getZKDatabase().getSessionWithTimeOuts(), this.tickTime, self.getId(), self.areLocalSessionsEnabled(), getZooKeeperServerListener()); } @Override protected void startSessionTracker() { ((LearnerSessionTracker) sessionTracker).start(); } @Override protected void setLocalSessionFlag(Request si) { switch (si.type) { case OpCode.createSession: if (self.areLocalSessionsEnabled()) { si.setLocalSession(true); } break; case OpCode.closeSession: if (((UpgradeableSessionTracker) sessionTracker).isLocalSession(si.sessionId)) { si.setLocalSession(true); } else { LOG.warn("Submitting global closeSession request for session 0x{} in ReadOnly mode", Long.toHexString(si.sessionId)); } break; default: break; } } @Override protected void validateSession(ServerCnxn cnxn, long sessionId) throws IOException { if (((LearnerSessionTracker) sessionTracker).isGlobalSession(sessionId)) { String msg = "Refusing global session reconnection in RO mode " + cnxn.getRemoteSocketAddress(); LOG.info(msg); throw new ServerCnxn.CloseRequestException(msg, ServerCnxn.DisconnectReason.RENEW_GLOBAL_SESSION_IN_RO_MODE); } } @Override protected void registerJMX() { // register with JMX try { jmxDataTreeBean = new DataTreeBean(getZKDatabase().getDataTree()); MBeanRegistry.getInstance().register(jmxDataTreeBean, jmxServerBean); } catch (Exception e) { LOG.warn("Failed to register with JMX", e); jmxDataTreeBean = null; } } public void registerJMX(ZooKeeperServerBean serverBean, LocalPeerBean localPeerBean) { // register with JMX try { jmxServerBean = serverBean; MBeanRegistry.getInstance().register(serverBean, localPeerBean); } catch (Exception e) { LOG.warn("Failed to register with JMX", e); jmxServerBean = null; } } @Override protected void unregisterJMX() { // unregister from JMX try { if (jmxDataTreeBean != null) { MBeanRegistry.getInstance().unregister(jmxDataTreeBean); } } catch (Exception e) { LOG.warn("Failed to unregister with JMX", e); } jmxDataTreeBean = null; } protected void unregisterJMX(ZooKeeperServer zks) { // unregister from JMX try { if (jmxServerBean != null) { MBeanRegistry.getInstance().unregister(jmxServerBean); } } catch (Exception e) { LOG.warn("Failed to unregister with JMX", e); } jmxServerBean = null; } @Override public String getState() { return "read-only"; } /** * Returns the id of the associated QuorumPeer, which will do for a unique * id of this server. */ @Override public long getServerId() { return self.getId(); } @Override public synchronized void shutdown() { if (!canShutdown()) { LOG.debug("ZooKeeper server is not running, so not proceeding to shutdown!"); return; } shutdown = true; unregisterJMX(this); // set peer's server to null self.setZooKeeperServer(null); // clear all the connections self.closeAllConnections(); self.adminServer.setZooKeeperServer(null); // shutdown the server itself super.shutdown(); } @Override public void dumpConf(PrintWriter pwriter) { super.dumpConf(pwriter); pwriter.print("initLimit="); pwriter.println(self.getInitLimit()); pwriter.print("syncLimit="); pwriter.println(self.getSyncLimit()); pwriter.print("electionAlg="); pwriter.println(self.getElectionType()); pwriter.print("electionPort="); pwriter.println(self.getElectionAddress().getAllPorts() .stream().map(Objects::toString).collect(Collectors.joining("|"))); pwriter.print("quorumPort="); pwriter.println(self.getQuorumAddress().getAllPorts() .stream().map(Objects::toString).collect(Collectors.joining("|"))); pwriter.print("peerType="); pwriter.println(self.getLearnerType().ordinal()); } @Override protected void setState(State state) { this.state = state; } }
package org.sagebionetworks.repo.model.dbo.dao; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.sagebionetworks.repo.model.AuthorizationConstants.BOOTSTRAP_PRINCIPAL; import org.sagebionetworks.repo.model.TrashedEntity; import org.sagebionetworks.repo.model.dao.TrashCanDao; import org.sagebionetworks.repo.model.dbo.DBOBasicDao; import org.sagebionetworks.repo.model.dbo.persistence.DBOTrashedEntity; import org.sagebionetworks.repo.model.jdo.KeyFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(locations = { "classpath:jdomodels-test-context.xml" }) public class DBOTrashCanDaoImplAutowiredTest { @Autowired private TrashCanDao trashCanDao; @Autowired private DBOBasicDao basicDao; private String userId; @Before public void before() throws Exception { userId = BOOTSTRAP_PRINCIPAL.THE_ADMIN_USER.getPrincipalId().toString(); clear(); List<TrashedEntity> trashList = trashCanDao.getInRange(false, 0L, Long.MAX_VALUE); assertTrue(trashList.size() == 0); } @After public void after() throws Exception { clear(); List<TrashedEntity> trashList = trashCanDao.getInRange(false, 0L, Long.MAX_VALUE); assertTrue(trashList.size() == 0); } @Test public void testRoundTrip() throws Exception { int count = trashCanDao.getCount(); assertEquals(0, count); count = trashCanDao.getCount(userId); assertEquals(0, count); List<TrashedEntity> trashList = trashCanDao.getInRange(false, 0L, 100L); assertNotNull(trashList); assertEquals(0, trashList.size()); trashList = trashCanDao.getInRangeForUser(userId, false, 0L, 100L); assertNotNull(trashList); assertEquals(0, trashList.size()); final String nodeName = "DBOTrashCanDaoImplAutowiredTest.testRoundTrip()"; final String nodeId1 = KeyFactory.keyToString(555L); final String parentId1 = KeyFactory.keyToString(5L); TrashedEntity trash = trashCanDao.getTrashedEntity(userId, nodeId1); assertNull(trash); // Move node 1 to trash can trashCanDao.create(userId, nodeId1, nodeName, parentId1); count = trashCanDao.getCount(); assertEquals(1, count); count = trashCanDao.getCount(userId); assertEquals(1, count); trashList = trashCanDao.getInRange(false, 0L, 100L); assertNotNull(trashList); assertEquals(1, trashList.size()); trashList = trashCanDao.getInRangeForUser(userId, false, 0L, 100L); assertNotNull(trashList); assertEquals(1, trashList.size()); trash = trashList.get(0); assertEquals(nodeId1, trash.getEntityId()); assertEquals(nodeName, trash.getEntityName()); assertEquals(userId, trash.getDeletedByPrincipalId()); assertEquals(parentId1, trash.getOriginalParentId()); assertNotNull(trash.getDeletedOn()); Thread.sleep(1000); Timestamp timestamp1 = new Timestamp(System.currentTimeMillis()); assertTrue(trash.getDeletedOn().before(timestamp1)); trashList = trashCanDao.getTrashBefore(timestamp1); assertNotNull(trashList); assertEquals(1, trashList.size()); trash = trashList.get(0); assertEquals(nodeId1, trash.getEntityId()); assertEquals(nodeName, trash.getEntityName()); assertEquals(userId, trash.getDeletedByPrincipalId()); assertEquals(parentId1, trash.getOriginalParentId()); assertNotNull(trash.getDeletedOn()); trash = trashCanDao.getTrashedEntity(userId, nodeId1); assertEquals(nodeId1, trash.getEntityId()); assertEquals(nodeName, trash.getEntityName()); assertEquals(userId, trash.getDeletedByPrincipalId()); assertEquals(parentId1, trash.getOriginalParentId()); assertNotNull(trash.getDeletedOn()); trash = trashCanDao.getTrashedEntity(nodeId1); assertEquals(nodeId1, trash.getEntityId()); assertEquals(nodeName, trash.getEntityName()); assertEquals(userId, trash.getDeletedByPrincipalId()); assertEquals(parentId1, trash.getOriginalParentId()); assertNotNull(trash.getDeletedOn()); trash = trashCanDao.getTrashedEntity("syn3829195"); assertNull(trash); count = trashCanDao.getCount(userId); assertEquals(1, count); count = trashCanDao.getCount(KeyFactory.keyToString(837948837783838309L)); //a random, non-existing user assertEquals(0, count); boolean exists = trashCanDao.exists(userId, nodeId1); assertTrue(exists); exists = trashCanDao.exists(KeyFactory.keyToString(2839238478539L), nodeId1); assertFalse(exists); exists = trashCanDao.exists(userId, KeyFactory.keyToString(118493838393848L)); assertFalse(exists); // Move node 2 to trash can final String nodeName2 = "DBOTrashCanDaoImplAutowiredTest.testRoundTrip() 2"; final String nodeId2 = KeyFactory.keyToString(666L); final String parentId2 = KeyFactory.keyToString(6L); trashCanDao.create(userId, nodeId2, nodeName2, parentId2); trashList = trashCanDao.getInRangeForUser(userId, false, 0L, 100L); assertNotNull(trashList); assertEquals(2, trashList.size()); trashList = trashCanDao.getInRange(false, 0L, 100L); assertNotNull(trashList); assertEquals(2, trashList.size()); count = trashCanDao.getCount(userId); assertEquals(2, count); count = trashCanDao.getCount(); assertEquals(2, count); exists = trashCanDao.exists(userId, nodeId2); assertTrue(exists); trashList = trashCanDao.getTrashBefore(timestamp1); assertNotNull(trashList); assertEquals(1, trashList.size()); assertEquals(nodeId1, trashList.get(0).getEntityId()); Thread.sleep(1000); Timestamp timestamp2 = new Timestamp(System.currentTimeMillis()); trashList = trashCanDao.getTrashBefore(timestamp2); assertNotNull(trashList); assertEquals(2, trashList.size()); trashCanDao.delete(userId, nodeId1); trashList = trashCanDao.getInRange(false, 0L, 100L); assertNotNull(trashList); assertEquals(1, trashList.size()); trashList = trashCanDao.getInRangeForUser(userId, false, 0L, 100L); assertNotNull(trashList); assertEquals(1, trashList.size()); trash = trashCanDao.getTrashedEntity(userId, nodeId1); assertNull(trash); trash = trashList.get(0); assertEquals(nodeId2, trash.getEntityId()); assertEquals(nodeName2, trash.getEntityName()); assertEquals(userId, trash.getDeletedByPrincipalId()); assertEquals(parentId2, trash.getOriginalParentId()); assertNotNull(trash.getDeletedOn()); trash = trashCanDao.getTrashedEntity(userId, nodeId2); assertEquals(nodeId2, trash.getEntityId()); assertEquals(nodeName2, trash.getEntityName()); assertEquals(userId, trash.getDeletedByPrincipalId()); assertEquals(parentId2, trash.getOriginalParentId()); assertNotNull(trash.getDeletedOn()); count = trashCanDao.getCount(); assertEquals(1, count); count = trashCanDao.getCount(userId); assertEquals(1, count); exists = trashCanDao.exists(userId, nodeId2); assertTrue(exists); exists = trashCanDao.exists(userId, nodeId1); assertFalse(exists); trashCanDao.delete(userId, nodeId2); trashList = trashCanDao.getInRange(false, 0L, 100L); assertNotNull(trashList); assertEquals(0, trashList.size()); trashList = trashCanDao.getInRangeForUser(userId, false, 0L, 100L); assertNotNull(trashList); assertEquals(0, trashList.size()); count = trashCanDao.getCount(); assertEquals(0, count); count = trashCanDao.getCount(userId); assertEquals(0, count); exists = trashCanDao.exists(userId, nodeId1); assertFalse(exists); exists = trashCanDao.exists(userId, nodeId2); assertFalse(exists); trash = trashCanDao.getTrashedEntity(userId, nodeId2); assertNull(trash); } @Test (expected=IllegalArgumentException.class) public void testCreateItemLongNameTooLong() { char[] chars = new char[260]; Arrays.fill(chars, 'x'); final String nodeName = new String(chars); final String nodeId = KeyFactory.keyToString(999L); final String parentId = KeyFactory.keyToString(9L); trashCanDao.create(userId, nodeId, nodeName, parentId); } @Test public void testCreateItemLongName() { char[] chars = new char[255]; Arrays.fill(chars, 'x'); final String nodeName = new String(chars); final String nodeId = KeyFactory.keyToString(999L); final String parentId = KeyFactory.keyToString(9L); trashCanDao.create(userId, nodeId, nodeName, parentId); List<TrashedEntity> trashList = trashCanDao.getInRange(false, 0L, 100L); assertNotNull(trashList); assertEquals(1, trashList.size()); assertEquals(nodeName, trashList.get(0).getEntityName()); } @Test (expected = IllegalArgumentException.class) public void testGetTrashLeavesNegativeNumDays(){//TODO: move to unit test? trashCanDao.getTrashLeaves(-1, 123); } @Test (expected = IllegalArgumentException.class) public void testGetTrashLeavesNegativeLimit(){//TODO: move to unit test? trashCanDao.getTrashLeaves(123, -1); } @Test public void testGetTrashLeavesNodesWithNoChildrenOlderThanNow(){ final int numNodes = 5; final String nodeNameBase = "DBOTrashCanDaoImplAutowiredTest.testGetTrashLeavesNoChildrenOlderThanNow() Node:"; final long nodeID = 9000L; final long parentID = 10L; assertEquals(0, trashCanDao.getCount()); //create trash leaves for(int i = 0; i < numNodes; i++){ String stringNodeID = KeyFactory.keyToString(nodeID + i); String stringParentID = KeyFactory.keyToString(parentID + i); Timestamp time = timeDaysAgo( numNodes - i); createTestNode(userId, stringNodeID, nodeNameBase + stringNodeID, stringParentID, time); } assertEquals(trashCanDao.getCount(), numNodes); //test all older than now List<Long> trashOlderThanNow = trashCanDao.getTrashLeaves(0, 100); assertEquals(numNodes,trashOlderThanNow.size()); for(int i = 0; i < numNodes; i++){ assertTrue(trashOlderThanNow.contains(nodeID + i)); } } @Test public void testGetTrashLeavesNodesWithNoChildren(){ final String nodeNameBase = "DBOTrashCanDaoImplAutowiredTest.testGetTrashLeavesNoChildren() Node:"; final long nodeID = 9000L; final long parentID = 10L; assertEquals(0, trashCanDao.getCount()); //create trash leaves Node 1 String stringNode1ID = KeyFactory.keyToString(nodeID + 1); String stringParent1ID = KeyFactory.keyToString(parentID + 1); Timestamp time1 = timeDaysAgo(3);//3 days old createTestNode(userId, stringNode1ID, nodeNameBase + stringNode1ID, stringParent1ID, time1); //create trash leaves Node 1 String stringNode2ID = KeyFactory.keyToString(nodeID + 2); String stringParent2ID = KeyFactory.keyToString(parentID + 2); Timestamp time2 = timeDaysAgo(1);//1 day old createTestNode(userId, stringNode2ID, nodeNameBase + stringNode2ID, stringParent2ID, time2); assertEquals(2,trashCanDao.getCount()); int trashBefore = 2; //look for trash older than 2 days int limit = 100; //arbitrary number. doesn't matter here List<Long> trashOlderThanNumDays = trashCanDao.getTrashLeaves(trashBefore, limit); assertEquals(1,trashOlderThanNumDays.size()); assertTrue(trashOlderThanNumDays.contains(nodeID + 1));//contains node 1 assertFalse(trashOlderThanNumDays.contains(nodeID + 2));//does not contains node 2 } @Test public void testGetTrashLeavesNodesWithChildren(){ /* Create node with 2 children that have children N0 / \ N1 N2 | |\ N3 N4 N5 */ final String nodeNameBase = "DBOTrashCanDaoImplAutowiredTest.testGetTrashLeavesNodesWithChildren() Node:"; final long nodeIdBase = 9000L; //N0 final String N0Id = KeyFactory.keyToString(nodeIdBase + 0); final String N0ParentId = KeyFactory.keyToString(12345L); //some random value for parent createTestNode(userId, N0Id, nodeNameBase + N0Id, N0ParentId, timeDaysAgo(1) ); //N1 final String N1Id = KeyFactory.keyToString(nodeIdBase + 1); final String N1ParentId = N0Id; //some random value for parent createTestNode(userId, N1Id, nodeNameBase + N1Id, N1ParentId, timeDaysAgo(1) ); //N2 final String N2Id = KeyFactory.keyToString(nodeIdBase + 2); final String N2ParentId = N0Id; //some random value for parent createTestNode(userId, N2Id, nodeNameBase + N2Id, N2ParentId, timeDaysAgo(1) ); //N3 final String N3Id = KeyFactory.keyToString(nodeIdBase + 3); final String N3ParentId = N1Id; //some random value for parent createTestNode(userId, N3Id, nodeNameBase + N3Id, N3ParentId, timeDaysAgo(1) ); //N4 final String N4Id = KeyFactory.keyToString(nodeIdBase + 4); final String N4ParentId = N2Id; //some random value for parent createTestNode(userId, N4Id, nodeNameBase + N4Id, N4ParentId, timeDaysAgo(1) ); //N5 final String N5Id = KeyFactory.keyToString(nodeIdBase + 5); final String N5ParentId = N2Id; //some random value for parent createTestNode(userId, N5Id, nodeNameBase + N5Id, N5ParentId, timeDaysAgo(1) ); //check that N3, N4, N5 are the only ones in the list List<Long> trashLeaves = trashCanDao.getTrashLeaves(0, 6); assertEquals(3, trashLeaves.size()); assertTrue( trashLeaves.contains( KeyFactory.stringToKey(N3Id) ) ); assertTrue( trashLeaves.contains( KeyFactory.stringToKey(N4Id) ) ); assertTrue( trashLeaves.contains( KeyFactory.stringToKey(N5Id) ) ); } @Test (expected = IllegalArgumentException.class) public void TestDeleteListNullList(){ trashCanDao.delete(null); } @Test public void TestDeleteListEmptyList(){ assertEquals(0, trashCanDao.delete(new ArrayList<Long>())); } @Test public void testDeleteListNonExistantTrash(){ List<Long> nonExistantTrashList = new ArrayList<Long>(); assertEquals(0, trashCanDao.delete(nonExistantTrashList) ); } @Test public void testDeleteList(){ final int numNodes = 2; final String nodeNameBase = "DBOTrashCanDaoImplAutowiredTest.testDeleteList() Node:"; final long nodeIDBase = 9000L; final long parentID = 10L; List<Long> nodesToDelete = new ArrayList<Long>(); assertEquals(0, trashCanDao.getCount()); for(int i = 0; i < numNodes; i++){ long nodeID = nodeIDBase + i; String stringNodeID = KeyFactory.keyToString(nodeID); String stringParentID = KeyFactory.keyToString(parentID + i); trashCanDao.create(userId, stringNodeID, nodeNameBase + i, stringParentID); //delete the even value nodes later if(nodeID % 2 == 0){ nodesToDelete.add(nodeID); } } assertEquals(numNodes, trashCanDao.getCount()); trashCanDao.delete(nodesToDelete); assertEquals(numNodes/2 , trashCanDao.getCount()); //check that the even nodes are all deleted for(int i = 0; i < numNodes; i++){ long nodeID = nodeIDBase + i; assertTrue( trashCanDao.exists(userId, KeyFactory.keyToString(nodeID)) != (nodeID % 2 == 0) ); //Only one of these conditions is true } } //time in milliseconds of numDays ago private Timestamp timeDaysAgo(int numDays){ return new Timestamp(System.currentTimeMillis() - numDays * 24 * 60 * 60 * 1000); } //Basically same as create() in TrashCanDao but can specify the timestamp. private void createTestNode(String userGroupId, String nodeId, String nodeName, String parentId, Timestamp ts){ DBOTrashedEntity dbo = new DBOTrashedEntity(); dbo.setNodeId(KeyFactory.stringToKey(nodeId)); dbo.setNodeName(nodeName); dbo.setDeletedBy(KeyFactory.stringToKey(userGroupId)); dbo.setDeletedOn(ts); dbo.setParentId(KeyFactory.stringToKey(parentId)); basicDao.createNew(dbo); } private void clear() throws Exception { List<TrashedEntity> trashList = trashCanDao.getInRangeForUser(userId, false, 0L, Long.MAX_VALUE); for (TrashedEntity trash : trashList) { trashCanDao.delete(userId, trash.getEntityId()); } } }
/* * Copyright (C) 2007 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.annotations.Beta; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import com.google.common.base.Function; import com.google.common.base.Objects; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; import java.util.Queue; import java.util.RandomAccess; import java.util.Set; import java.util.SortedSet; import javax.annotation.Nullable; /** * This class contains static utility methods that operate on or return objects * of type {@code Iterable}. Except as noted, each method has a corresponding * {@link Iterator}-based method in the {@link Iterators} class. * * <p><i>Performance notes:</i> Unless otherwise noted, all of the iterables * produced in this class are <i>lazy</i>, which means that their iterators * only advance the backing iteration when absolutely necessary. * * <p>See the Guava User Guide article on <a href= * "http://code.google.com/p/guava-libraries/wiki/CollectionUtilitiesExplained#Iterables"> * {@code Iterables}</a>. * * @author Kevin Bourrillion * @author Jared Levy * @since 2.0 (imported from Google Collections Library) */ @GwtCompatible(emulated = true) public final class Iterables { private Iterables() {} /** Returns an unmodifiable view of {@code iterable}. */ public static <T> Iterable<T> unmodifiableIterable( final Iterable<T> iterable) { checkNotNull(iterable); if (iterable instanceof UnmodifiableIterable || iterable instanceof ImmutableCollection) { return iterable; } return new UnmodifiableIterable<T>(iterable); } /** * Simply returns its argument. * * @deprecated no need to use this * @since 10.0 */ @Deprecated public static <E> Iterable<E> unmodifiableIterable( ImmutableCollection<E> iterable) { return checkNotNull(iterable); } private static final class UnmodifiableIterable<T> extends FluentIterable<T> { private final Iterable<T> iterable; private UnmodifiableIterable(Iterable<T> iterable) { this.iterable = iterable; } @Override public Iterator<T> iterator() { return Iterators.unmodifiableIterator(iterable.iterator()); } @Override public String toString() { return iterable.toString(); } // no equals and hashCode; it would break the contract! } /** * Returns the number of elements in {@code iterable}. */ public static int size(Iterable<?> iterable) { return (iterable instanceof Collection) ? ((Collection<?>) iterable).size() : Iterators.size(iterable.iterator()); } /** * Returns {@code true} if {@code iterable} contains any object for which {@code equals(element)} * is true. */ public static boolean contains(Iterable<?> iterable, @Nullable Object element) { if (iterable instanceof Collection) { Collection<?> collection = (Collection<?>) iterable; try { return collection.contains(element); } catch (NullPointerException e) { return false; } catch (ClassCastException e) { return false; } } return Iterators.contains(iterable.iterator(), element); } /** * Removes, from an iterable, every element that belongs to the provided * collection. * * <p>This method calls {@link Collection#removeAll} if {@code iterable} is a * collection, and {@link Iterators#removeAll} otherwise. * * @param removeFrom the iterable to (potentially) remove elements from * @param elementsToRemove the elements to remove * @return {@code true} if any element was removed from {@code iterable} */ public static boolean removeAll( Iterable<?> removeFrom, Collection<?> elementsToRemove) { return (removeFrom instanceof Collection) ? ((Collection<?>) removeFrom).removeAll(checkNotNull(elementsToRemove)) : Iterators.removeAll(removeFrom.iterator(), elementsToRemove); } /** * Removes, from an iterable, every element that does not belong to the * provided collection. * * <p>This method calls {@link Collection#retainAll} if {@code iterable} is a * collection, and {@link Iterators#retainAll} otherwise. * * @param removeFrom the iterable to (potentially) remove elements from * @param elementsToRetain the elements to retain * @return {@code true} if any element was removed from {@code iterable} */ public static boolean retainAll( Iterable<?> removeFrom, Collection<?> elementsToRetain) { return (removeFrom instanceof Collection) ? ((Collection<?>) removeFrom).retainAll(checkNotNull(elementsToRetain)) : Iterators.retainAll(removeFrom.iterator(), elementsToRetain); } /** * Removes, from an iterable, every element that satisfies the provided * predicate. * * @param removeFrom the iterable to (potentially) remove elements from * @param predicate a predicate that determines whether an element should * be removed * @return {@code true} if any elements were removed from the iterable * * @throws UnsupportedOperationException if the iterable does not support * {@code remove()}. * @since 2.0 */ public static <T> boolean removeIf( Iterable<T> removeFrom, Predicate<? super T> predicate) { if (removeFrom instanceof RandomAccess && removeFrom instanceof List) { return removeIfFromRandomAccessList( (List<T>) removeFrom, checkNotNull(predicate)); } return Iterators.removeIf(removeFrom.iterator(), predicate); } private static <T> boolean removeIfFromRandomAccessList( List<T> list, Predicate<? super T> predicate) { // Note: Not all random access lists support set() so we need to deal with // those that don't and attempt the slower remove() based solution. int from = 0; int to = 0; for (; from < list.size(); from++) { T element = list.get(from); if (!predicate.apply(element)) { if (from > to) { try { list.set(to, element); } catch (UnsupportedOperationException e) { slowRemoveIfForRemainingElements(list, predicate, to, from); return true; } } to++; } } // Clear the tail of any remaining items list.subList(to, list.size()).clear(); return from != to; } private static <T> void slowRemoveIfForRemainingElements(List<T> list, Predicate<? super T> predicate, int to, int from) { // Here we know that: // * (to < from) and that both are valid indices. // * Everything with (index < to) should be kept. // * Everything with (to <= index < from) should be removed. // * The element with (index == from) should be kept. // * Everything with (index > from) has not been checked yet. // Check from the end of the list backwards (minimize expected cost of // moving elements when remove() is called). Stop before 'from' because // we already know that should be kept. for (int n = list.size() - 1; n > from; n--) { if (predicate.apply(list.get(n))) { list.remove(n); } } // And now remove everything in the range [to, from) (going backwards). for (int n = from - 1; n >= to; n--) { list.remove(n); } } /** * Determines whether two iterables contain equal elements in the same order. * More specifically, this method returns {@code true} if {@code iterable1} * and {@code iterable2} contain the same number of elements and every element * of {@code iterable1} is equal to the corresponding element of * {@code iterable2}. */ public static boolean elementsEqual( Iterable<?> iterable1, Iterable<?> iterable2) { return Iterators.elementsEqual(iterable1.iterator(), iterable2.iterator()); } /** * Returns a string representation of {@code iterable}, with the format * {@code [e1, e2, ..., en]}. */ public static String toString(Iterable<?> iterable) { return Iterators.toString(iterable.iterator()); } /** * Returns the single element contained in {@code iterable}. * * @throws NoSuchElementException if the iterable is empty * @throws IllegalArgumentException if the iterable contains multiple * elements */ public static <T> T getOnlyElement(Iterable<T> iterable) { return Iterators.getOnlyElement(iterable.iterator()); } /** * Returns the single element contained in {@code iterable}, or {@code * defaultValue} if the iterable is empty. * * @throws IllegalArgumentException if the iterator contains multiple * elements */ public static <T> T getOnlyElement( Iterable<? extends T> iterable, @Nullable T defaultValue) { return Iterators.getOnlyElement(iterable.iterator(), defaultValue); } /** * Copies an iterable's elements into an array. * * @param iterable the iterable to copy * @param type the type of the elements * @return a newly-allocated array into which all the elements of the iterable * have been copied */ @GwtIncompatible("Array.newInstance(Class, int)") public static <T> T[] toArray(Iterable<? extends T> iterable, Class<T> type) { Collection<? extends T> collection = toCollection(iterable); T[] array = ObjectArrays.newArray(type, collection.size()); return collection.toArray(array); } /** * Copies an iterable's elements into an array. * * @param iterable the iterable to copy * @return a newly-allocated array into which all the elements of the iterable * have been copied */ static Object[] toArray(Iterable<?> iterable) { return toCollection(iterable).toArray(); } /** * Converts an iterable into a collection. If the iterable is already a * collection, it is returned. Otherwise, an {@link java.util.ArrayList} is * created with the contents of the iterable in the same iteration order. */ private static <E> Collection<E> toCollection(Iterable<E> iterable) { return (iterable instanceof Collection) ? (Collection<E>) iterable : Lists.newArrayList(iterable.iterator()); } /** * Adds all elements in {@code iterable} to {@code collection}. * * @return {@code true} if {@code collection} was modified as a result of this * operation. */ public static <T> boolean addAll( Collection<T> addTo, Iterable<? extends T> elementsToAdd) { if (elementsToAdd instanceof Collection) { Collection<? extends T> c = Collections2.cast(elementsToAdd); return addTo.addAll(c); } return Iterators.addAll(addTo, elementsToAdd.iterator()); } /** * Returns the number of elements in the specified iterable that equal the * specified object. This implementation avoids a full iteration when the * iterable is a {@link Multiset} or {@link Set}. * * @see Collections#frequency */ public static int frequency(Iterable<?> iterable, @Nullable Object element) { if ((iterable instanceof Multiset)) { return ((Multiset<?>) iterable).count(element); } if ((iterable instanceof Set)) { return ((Set<?>) iterable).contains(element) ? 1 : 0; } return Iterators.frequency(iterable.iterator(), element); } /** * Returns an iterable whose iterators cycle indefinitely over the elements of * {@code iterable}. * * <p>That iterator supports {@code remove()} if {@code iterable.iterator()} * does. After {@code remove()} is called, subsequent cycles omit the removed * element, which is no longer in {@code iterable}. The iterator's * {@code hasNext()} method returns {@code true} until {@code iterable} is * empty. * * <p><b>Warning:</b> Typical uses of the resulting iterator may produce an * infinite loop. You should use an explicit {@code break} or be certain that * you will eventually remove all the elements. * * <p>To cycle over the iterable {@code n} times, use the following: * {@code Iterables.concat(Collections.nCopies(n, iterable))} */ public static <T> Iterable<T> cycle(final Iterable<T> iterable) { checkNotNull(iterable); return new FluentIterable<T>() { @Override public Iterator<T> iterator() { return Iterators.cycle(iterable); } @Override public String toString() { return iterable.toString() + " (cycled)"; } }; } /** * Returns an iterable whose iterators cycle indefinitely over the provided * elements. * * <p>After {@code remove} is invoked on a generated iterator, the removed * element will no longer appear in either that iterator or any other iterator * created from the same source iterable. That is, this method behaves exactly * as {@code Iterables.cycle(Lists.newArrayList(elements))}. The iterator's * {@code hasNext} method returns {@code true} until all of the original * elements have been removed. * * <p><b>Warning:</b> Typical uses of the resulting iterator may produce an * infinite loop. You should use an explicit {@code break} or be certain that * you will eventually remove all the elements. * * <p>To cycle over the elements {@code n} times, use the following: * {@code Iterables.concat(Collections.nCopies(n, Arrays.asList(elements)))} */ public static <T> Iterable<T> cycle(T... elements) { return cycle(Lists.newArrayList(elements)); } /** * Combines two iterables into a single iterable. The returned iterable has an * iterator that traverses the elements in {@code a}, followed by the elements * in {@code b}. The source iterators are not polled until necessary. * * <p>The returned iterable's iterator supports {@code remove()} when the * corresponding input iterator supports it. */ @SuppressWarnings("unchecked") public static <T> Iterable<T> concat( Iterable<? extends T> a, Iterable<? extends T> b) { checkNotNull(a); checkNotNull(b); return concat(Arrays.asList(a, b)); } /** * Combines three iterables into a single iterable. The returned iterable has * an iterator that traverses the elements in {@code a}, followed by the * elements in {@code b}, followed by the elements in {@code c}. The source * iterators are not polled until necessary. * * <p>The returned iterable's iterator supports {@code remove()} when the * corresponding input iterator supports it. */ @SuppressWarnings("unchecked") public static <T> Iterable<T> concat(Iterable<? extends T> a, Iterable<? extends T> b, Iterable<? extends T> c) { checkNotNull(a); checkNotNull(b); checkNotNull(c); return concat(Arrays.asList(a, b, c)); } /** * Combines four iterables into a single iterable. The returned iterable has * an iterator that traverses the elements in {@code a}, followed by the * elements in {@code b}, followed by the elements in {@code c}, followed by * the elements in {@code d}. The source iterators are not polled until * necessary. * * <p>The returned iterable's iterator supports {@code remove()} when the * corresponding input iterator supports it. */ @SuppressWarnings("unchecked") public static <T> Iterable<T> concat(Iterable<? extends T> a, Iterable<? extends T> b, Iterable<? extends T> c, Iterable<? extends T> d) { checkNotNull(a); checkNotNull(b); checkNotNull(c); checkNotNull(d); return concat(Arrays.asList(a, b, c, d)); } /** * Combines multiple iterables into a single iterable. The returned iterable * has an iterator that traverses the elements of each iterable in * {@code inputs}. The input iterators are not polled until necessary. * * <p>The returned iterable's iterator supports {@code remove()} when the * corresponding input iterator supports it. * * @throws NullPointerException if any of the provided iterables is null */ public static <T> Iterable<T> concat(Iterable<? extends T>... inputs) { return concat(ImmutableList.copyOf(inputs)); } /** * Combines multiple iterables into a single iterable. The returned iterable * has an iterator that traverses the elements of each iterable in * {@code inputs}. The input iterators are not polled until necessary. * * <p>The returned iterable's iterator supports {@code remove()} when the * corresponding input iterator supports it. The methods of the returned * iterable may throw {@code NullPointerException} if any of the input * iterators is null. */ public static <T> Iterable<T> concat( final Iterable<? extends Iterable<? extends T>> inputs) { checkNotNull(inputs); return new FluentIterable<T>() { @Override public Iterator<T> iterator() { return Iterators.concat(iterators(inputs)); } }; } /** * Returns an iterator over the iterators of the given iterables. */ private static <T> UnmodifiableIterator<Iterator<? extends T>> iterators( Iterable<? extends Iterable<? extends T>> iterables) { final Iterator<? extends Iterable<? extends T>> iterableIterator = iterables.iterator(); return new UnmodifiableIterator<Iterator<? extends T>>() { @Override public boolean hasNext() { return iterableIterator.hasNext(); } @Override public Iterator<? extends T> next() { return iterableIterator.next().iterator(); } }; } /** * Divides an iterable into unmodifiable sublists of the given size (the final * iterable may be smaller). For example, partitioning an iterable containing * {@code [a, b, c, d, e]} with a partition size of 3 yields {@code * [[a, b, c], [d, e]]} -- an outer iterable containing two inner lists of * three and two elements, all in the original order. * * <p>Iterators returned by the returned iterable do not support the {@link * Iterator#remove()} method. The returned lists implement {@link * RandomAccess}, whether or not the input list does. * * <p><b>Note:</b> if {@code iterable} is a {@link List}, use {@link * Lists#partition(List, int)} instead. * * @param iterable the iterable to return a partitioned view of * @param size the desired size of each partition (the last may be smaller) * @return an iterable of unmodifiable lists containing the elements of {@code * iterable} divided into partitions * @throws IllegalArgumentException if {@code size} is nonpositive */ public static <T> Iterable<List<T>> partition( final Iterable<T> iterable, final int size) { checkNotNull(iterable); checkArgument(size > 0); return new FluentIterable<List<T>>() { @Override public Iterator<List<T>> iterator() { return Iterators.partition(iterable.iterator(), size); } }; } /** * Divides an iterable into unmodifiable sublists of the given size, padding * the final iterable with null values if necessary. For example, partitioning * an iterable containing {@code [a, b, c, d, e]} with a partition size of 3 * yields {@code [[a, b, c], [d, e, null]]} -- an outer iterable containing * two inner lists of three elements each, all in the original order. * * <p>Iterators returned by the returned iterable do not support the {@link * Iterator#remove()} method. * * @param iterable the iterable to return a partitioned view of * @param size the desired size of each partition * @return an iterable of unmodifiable lists containing the elements of {@code * iterable} divided into partitions (the final iterable may have * trailing null elements) * @throws IllegalArgumentException if {@code size} is nonpositive */ public static <T> Iterable<List<T>> paddedPartition( final Iterable<T> iterable, final int size) { checkNotNull(iterable); checkArgument(size > 0); return new FluentIterable<List<T>>() { @Override public Iterator<List<T>> iterator() { return Iterators.paddedPartition(iterable.iterator(), size); } }; } /** * Returns the elements of {@code unfiltered} that satisfy a predicate. The * resulting iterable's iterator does not support {@code remove()}. */ public static <T> Iterable<T> filter( final Iterable<T> unfiltered, final Predicate<? super T> predicate) { checkNotNull(unfiltered); checkNotNull(predicate); return new FluentIterable<T>() { @Override public Iterator<T> iterator() { return Iterators.filter(unfiltered.iterator(), predicate); } }; } /** * Returns all instances of class {@code type} in {@code unfiltered}. The * returned iterable has elements whose class is {@code type} or a subclass of * {@code type}. The returned iterable's iterator does not support * {@code remove()}. * * @param unfiltered an iterable containing objects of any type * @param type the type of elements desired * @return an unmodifiable iterable containing all elements of the original * iterable that were of the requested type */ @GwtIncompatible("Class.isInstance") public static <T> Iterable<T> filter( final Iterable<?> unfiltered, final Class<T> type) { checkNotNull(unfiltered); checkNotNull(type); return new FluentIterable<T>() { @Override public Iterator<T> iterator() { return Iterators.filter(unfiltered.iterator(), type); } }; } /** * Returns {@code true} if any element in {@code iterable} satisfies the predicate. */ public static <T> boolean any( Iterable<T> iterable, Predicate<? super T> predicate) { return Iterators.any(iterable.iterator(), predicate); } /** * Returns {@code true} if every element in {@code iterable} satisfies the * predicate. If {@code iterable} is empty, {@code true} is returned. */ public static <T> boolean all( Iterable<T> iterable, Predicate<? super T> predicate) { return Iterators.all(iterable.iterator(), predicate); } /** * Returns the first element in {@code iterable} that satisfies the given * predicate; use this method only when such an element is known to exist. If * it is possible that <i>no</i> element will match, use {@link #tryFind} or * {@link #find(Iterable, Predicate, Object)} instead. * * @throws NoSuchElementException if no element in {@code iterable} matches * the given predicate */ public static <T> T find(Iterable<T> iterable, Predicate<? super T> predicate) { return Iterators.find(iterable.iterator(), predicate); } /** * Returns the first element in {@code iterable} that satisfies the given * predicate, or {@code defaultValue} if none found. Note that this can * usually be handled more naturally using {@code * tryFind(iterable, predicate).or(defaultValue)}. * * @since 7.0 */ public static <T> T find(Iterable<? extends T> iterable, Predicate<? super T> predicate, @Nullable T defaultValue) { return Iterators.find(iterable.iterator(), predicate, defaultValue); } /** * Returns an {@link Optional} containing the first element in {@code * iterable} that satisfies the given predicate, if such an element exists. * * <p><b>Warning:</b> avoid using a {@code predicate} that matches {@code * null}. If {@code null} is matched in {@code iterable}, a * NullPointerException will be thrown. * * @since 11.0 */ public static <T> Optional<T> tryFind(Iterable<T> iterable, Predicate<? super T> predicate) { return Iterators.tryFind(iterable.iterator(), predicate); } /** * Returns the index in {@code iterable} of the first element that satisfies * the provided {@code predicate}, or {@code -1} if the Iterable has no such * elements. * * <p>More formally, returns the lowest index {@code i} such that * {@code predicate.apply(Iterables.get(iterable, i))} returns {@code true}, * or {@code -1} if there is no such index. * * @since 2.0 */ public static <T> int indexOf( Iterable<T> iterable, Predicate<? super T> predicate) { return Iterators.indexOf(iterable.iterator(), predicate); } /** * Returns an iterable that applies {@code function} to each element of {@code * fromIterable}. * * <p>The returned iterable's iterator supports {@code remove()} if the * provided iterator does. After a successful {@code remove()} call, * {@code fromIterable} no longer contains the corresponding element. * * <p>If the input {@code Iterable} is known to be a {@code List} or other * {@code Collection}, consider {@link Lists#transform} and {@link * Collections2#transform}. */ public static <F, T> Iterable<T> transform(final Iterable<F> fromIterable, final Function<? super F, ? extends T> function) { checkNotNull(fromIterable); checkNotNull(function); return new FluentIterable<T>() { @Override public Iterator<T> iterator() { return Iterators.transform(fromIterable.iterator(), function); } }; } /** * Returns the element at the specified position in an iterable. * * @param position position of the element to return * @return the element at the specified position in {@code iterable} * @throws IndexOutOfBoundsException if {@code position} is negative or * greater than or equal to the size of {@code iterable} */ public static <T> T get(Iterable<T> iterable, int position) { checkNotNull(iterable); if (iterable instanceof List) { return ((List<T>) iterable).get(position); } if (iterable instanceof Collection) { // Can check both ends Collection<T> collection = (Collection<T>) iterable; Preconditions.checkElementIndex(position, collection.size()); } else { // Can only check the lower end checkNonnegativeIndex(position); } return Iterators.get(iterable.iterator(), position); } private static void checkNonnegativeIndex(int position) { if (position < 0) { throw new IndexOutOfBoundsException( "position cannot be negative: " + position); } } /** * Returns the element at the specified position in an iterable or a default * value otherwise. * * @param position position of the element to return * @param defaultValue the default value to return if {@code position} is * greater than or equal to the size of the iterable * @return the element at the specified position in {@code iterable} or * {@code defaultValue} if {@code iterable} contains fewer than * {@code position + 1} elements. * @throws IndexOutOfBoundsException if {@code position} is negative * @since 4.0 */ public static <T> T get(Iterable<? extends T> iterable, int position, @Nullable T defaultValue) { checkNotNull(iterable); checkNonnegativeIndex(position); try { return get(iterable, position); } catch (IndexOutOfBoundsException e) { return defaultValue; } } /** * Returns the first element in {@code iterable} or {@code defaultValue} if * the iterable is empty. The {@link Iterators} analog to this method is * {@link Iterators#getNext}. * * @param defaultValue the default value to return if the iterable is empty * @return the first element of {@code iterable} or the default value * @since 7.0 */ public static <T> T getFirst(Iterable<? extends T> iterable, @Nullable T defaultValue) { return Iterators.getNext(iterable.iterator(), defaultValue); } /** * Returns the last element of {@code iterable}. * * @return the last element of {@code iterable} * @throws NoSuchElementException if the iterable is empty */ public static <T> T getLast(Iterable<T> iterable) { // TODO(kevinb): Support a concurrently modified collection? if (iterable instanceof List) { List<T> list = (List<T>) iterable; if (list.isEmpty()) { throw new NoSuchElementException(); } return getLastInNonemptyList(list); } /* * TODO(kevinb): consider whether this "optimization" is worthwhile. Users * with SortedSets tend to know they are SortedSets and probably would not * call this method. */ if (iterable instanceof SortedSet) { SortedSet<T> sortedSet = (SortedSet<T>) iterable; return sortedSet.last(); } return Iterators.getLast(iterable.iterator()); } /** * Returns the last element of {@code iterable} or {@code defaultValue} if * the iterable is empty. * * @param defaultValue the value to return if {@code iterable} is empty * @return the last element of {@code iterable} or the default value * @since 3.0 */ public static <T> T getLast(Iterable<? extends T> iterable, @Nullable T defaultValue) { if (iterable instanceof Collection) { Collection<? extends T> collection = Collections2.cast(iterable); if (collection.isEmpty()) { return defaultValue; } } if (iterable instanceof List) { List<? extends T> list = Lists.cast(iterable); return getLastInNonemptyList(list); } /* * TODO(kevinb): consider whether this "optimization" is worthwhile. Users * with SortedSets tend to know they are SortedSets and probably would not * call this method. */ if (iterable instanceof SortedSet) { SortedSet<? extends T> sortedSet = Sets.cast(iterable); return sortedSet.last(); } return Iterators.getLast(iterable.iterator(), defaultValue); } private static <T> T getLastInNonemptyList(List<T> list) { return list.get(list.size() - 1); } /** * Returns a view of {@code iterable} that skips its first * {@code numberToSkip} elements. If {@code iterable} contains fewer than * {@code numberToSkip} elements, the returned iterable skips all of its * elements. * * <p>Modifications to the underlying {@link Iterable} before a call to * {@code iterator()} are reflected in the returned iterator. That is, the * iterator skips the first {@code numberToSkip} elements that exist when the * {@code Iterator} is created, not when {@code skip()} is called. * * <p>The returned iterable's iterator supports {@code remove()} if the * iterator of the underlying iterable supports it. Note that it is * <i>not</i> possible to delete the last skipped element by immediately * calling {@code remove()} on that iterator, as the {@code Iterator} * contract states that a call to {@code remove()} before a call to * {@code next()} will throw an {@link IllegalStateException}. * * @since 3.0 */ public static <T> Iterable<T> skip(final Iterable<T> iterable, final int numberToSkip) { checkNotNull(iterable); checkArgument(numberToSkip >= 0, "number to skip cannot be negative"); if (iterable instanceof List) { final List<T> list = (List<T>) iterable; return new FluentIterable<T>() { @Override public Iterator<T> iterator() { // TODO(kevinb): Support a concurrently modified collection? return (numberToSkip >= list.size()) ? Iterators.<T>emptyIterator() : list.subList(numberToSkip, list.size()).iterator(); } }; } return new FluentIterable<T>() { @Override public Iterator<T> iterator() { final Iterator<T> iterator = iterable.iterator(); Iterators.advance(iterator, numberToSkip); /* * We can't just return the iterator because an immediate call to its * remove() method would remove one of the skipped elements instead of * throwing an IllegalStateException. */ return new Iterator<T>() { boolean atStart = true; @Override public boolean hasNext() { return iterator.hasNext(); } @Override public T next() { if (!hasNext()) { throw new NoSuchElementException(); } try { return iterator.next(); } finally { atStart = false; } } @Override public void remove() { if (atStart) { throw new IllegalStateException(); } iterator.remove(); } }; } }; } /** * Creates an iterable with the first {@code limitSize} elements of the given * iterable. If the original iterable does not contain that many elements, the * returned iterator will have the same behavior as the original iterable. The * returned iterable's iterator supports {@code remove()} if the original * iterator does. * * @param iterable the iterable to limit * @param limitSize the maximum number of elements in the returned iterator * @throws IllegalArgumentException if {@code limitSize} is negative * @since 3.0 */ public static <T> Iterable<T> limit( final Iterable<T> iterable, final int limitSize) { checkNotNull(iterable); checkArgument(limitSize >= 0, "limit is negative"); return new FluentIterable<T>() { @Override public Iterator<T> iterator() { return Iterators.limit(iterable.iterator(), limitSize); } }; } /** * Returns a view of the supplied iterable that wraps each generated * {@link Iterator} through {@link Iterators#consumingIterator(Iterator)}. * * <p>Note: If {@code iterable} is a {@link Queue}, the returned iterable will * get entries from {@link Queue#remove()} since {@link Queue}'s iteration * order is undefined. Calling {@link Iterator#hasNext()} on a generated * iterator from the returned iterable may cause an item to be immediately * dequeued for return on a subsequent call to {@link Iterator#next()}. * * @param iterable the iterable to wrap * @return a view of the supplied iterable that wraps each generated iterator * through {@link Iterators#consumingIterator(Iterator)}; for queues, * an iterable that generates iterators that return and consume the * queue's elements in queue order * * @see Iterators#consumingIterator(Iterator) * @since 2.0 */ public static <T> Iterable<T> consumingIterable(final Iterable<T> iterable) { if (iterable instanceof Queue) { return new FluentIterable<T>() { @Override public Iterator<T> iterator() { return new ConsumingQueueIterator<T>((Queue<T>) iterable); } }; } checkNotNull(iterable); return new FluentIterable<T>() { @Override public Iterator<T> iterator() { return Iterators.consumingIterator(iterable.iterator()); } }; } private static class ConsumingQueueIterator<T> extends AbstractIterator<T> { private final Queue<T> queue; private ConsumingQueueIterator(Queue<T> queue) { this.queue = queue; } @Override public T computeNext() { try { return queue.remove(); } catch (NoSuchElementException e) { return endOfData(); } } } // Methods only in Iterables, not in Iterators /** * Determines if the given iterable contains no elements. * * <p>There is no precise {@link Iterator} equivalent to this method, since * one can only ask an iterator whether it has any elements <i>remaining</i> * (which one does using {@link Iterator#hasNext}). * * @return {@code true} if the iterable contains no elements */ public static boolean isEmpty(Iterable<?> iterable) { if (iterable instanceof Collection) { return ((Collection<?>) iterable).isEmpty(); } return !iterable.iterator().hasNext(); } // Non-public /** * Removes the specified element from the specified iterable. * * <p>This method iterates over the iterable, checking each element returned * by the iterator in turn to see if it equals the object {@code o}. If they * are equal, it is removed from the iterable with the iterator's * {@code remove} method. At most one element is removed, even if the iterable * contains multiple members that equal {@code o}. * * <p><b>Warning:</b> Do not use this method for a collection, such as a * {@link HashSet}, that has a fast {@code remove} method. * * @param iterable the iterable from which to remove * @param o an element to remove from the collection * @return {@code true} if the iterable changed as a result * @throws UnsupportedOperationException if the iterator does not support the * {@code remove} method and the iterable contains the object */ static boolean remove(Iterable<?> iterable, @Nullable Object o) { Iterator<?> i = iterable.iterator(); while (i.hasNext()) { if (Objects.equal(i.next(), o)) { i.remove(); return true; } } return false; } /** * Returns an iterable over the merged contents of all given * {@code iterables}. Equivalent entries will not be de-duplicated. * * <p>Callers must ensure that the source {@code iterables} are in * non-descending order as this method does not sort its input. * * <p>For any equivalent elements across all {@code iterables}, it is * undefined which element is returned first. * * @since 11.0 */ @Beta public static <T> Iterable<T> mergeSorted( final Iterable<? extends Iterable<? extends T>> iterables, final Comparator<? super T> comparator) { checkNotNull(iterables, "iterables"); checkNotNull(comparator, "comparator"); Iterable<T> iterable = new FluentIterable<T>() { @Override public Iterator<T> iterator() { return Iterators.mergeSorted( Iterables.transform(iterables, Iterables.<T>toIterator()), comparator); } }; return new UnmodifiableIterable<T>(iterable); } // TODO(user): Is this the best place for this? Move to fluent functions? // Useful as a public method? private static <T> Function<Iterable<? extends T>, Iterator<? extends T>> toIterator() { return new Function<Iterable<? extends T>, Iterator<? extends T>>() { @Override public Iterator<? extends T> apply(Iterable<? extends T> iterable) { return iterable.iterator(); } }; } }
/* * Copyright 2004-2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.compass.core.lucene.support; import java.io.IOException; import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Filter; import org.apache.lucene.util.OpenBitSet; import org.apache.lucene.util.OpenBitSetDISI; import org.apache.lucene.util.SortedVIntList; /** * <p> * Allows multiple {@link Filter}s to be chained. * Logical operations such as <b>NOT</b> and <b>XOR</b> * are applied between filters. One operation can be used * for all filters, or a specific operation can be declared * for each filter. * </p> * <p> * Order in which filters are called depends on * the position of the filter in the chain. It's probably * more efficient to place the most restrictive filters * /least computationally-intensive filters first. * </p> */ public class ChainedFilter extends Filter { public static final int OR = 0; public static final int AND = 1; public static final int ANDNOT = 2; public static final int XOR = 3; /** * Logical operation when none is declared. Defaults to * OR. */ public static int DEFAULT = OR; /** * The filter chain */ private Filter[] chain = null; private int[] logicArray; private int logic = -1; /** * Ctor. * * @param chain The chain of filters */ public ChainedFilter(Filter[] chain) { this.chain = chain; } /** * Ctor. * * @param chain The chain of filters * @param logicArray Logical operations to apply between filters */ public ChainedFilter(Filter[] chain, int[] logicArray) { this.chain = chain; this.logicArray = logicArray; } /** * Ctor. * * @param chain The chain of filters * @param logic Logicial operation to apply to ALL filters */ public ChainedFilter(Filter[] chain, int logic) { this.chain = chain; this.logic = logic; } /** * {@link Filter#getDocIdSet}. */ public DocIdSet getDocIdSet(IndexReader reader) throws IOException { int[] index = new int[1]; // use array as reference to modifiable int; index[0] = 0; // an object attribute would not be thread safe. if (logic != -1) return getDocIdSet(reader, logic, index); else if (logicArray != null) return getDocIdSet(reader, logicArray, index); else return getDocIdSet(reader, DEFAULT, index); } private DocIdSetIterator getDISI(Filter filter, IndexReader reader) throws IOException { return filter.getDocIdSet(reader).iterator(); } private OpenBitSetDISI initialResult(IndexReader reader, int logic, int[] index) throws IOException { OpenBitSetDISI result; /** * First AND operation takes place against a completely false * bitset and will always return zero results. */ if (logic == AND) { result = new OpenBitSetDISI(getDISI(chain[index[0]], reader), reader.maxDoc()); ++index[0]; } else if (logic == ANDNOT) { result = new OpenBitSetDISI(getDISI(chain[index[0]], reader), reader.maxDoc()); result.flip(0, reader.maxDoc()); // NOTE: may set bits for deleted docs. ++index[0]; } else { result = new OpenBitSetDISI(reader.maxDoc()); } return result; } /** * Provide a SortedVIntList when it is definitely smaller than an OpenBitSet */ protected DocIdSet finalResult(OpenBitSetDISI result, int maxDocs) { return (result.cardinality() < (maxDocs / 9)) ? (DocIdSet) new SortedVIntList(result) : (DocIdSet) result; } /** * Delegates to each filter in the chain. * * @param reader IndexReader * @param logic Logical operation * @return DocIdSet */ private DocIdSet getDocIdSet(IndexReader reader, int logic, int[] index) throws IOException { OpenBitSetDISI result = initialResult(reader, logic, index); for (; index[0] < chain.length; index[0]++) { doChain(result, logic, chain[index[0]].getDocIdSet(reader)); } return finalResult(result, reader.maxDoc()); } /** * Delegates to each filter in the chain. * * @param reader IndexReader * @param logic Logical operation * @return DocIdSet */ private DocIdSet getDocIdSet(IndexReader reader, int[] logic, int[] index) throws IOException { if (logic.length != chain.length) throw new IllegalArgumentException("Invalid number of elements in logic array"); OpenBitSetDISI result = initialResult(reader, logic[0], index); for (; index[0] < chain.length; index[0]++) { doChain(result, logic[index[0]], chain[index[0]].getDocIdSet(reader)); } return finalResult(result, reader.maxDoc()); } public String toString() { StringBuffer sb = new StringBuffer(); sb.append("ChainedFilter: ["); for (int i = 0; i < chain.length; i++) { sb.append(chain[i]); sb.append(' '); } sb.append(']'); return sb.toString(); } private void doChain(OpenBitSetDISI result, int logic, DocIdSet dis) throws IOException { if (dis instanceof OpenBitSet) { // optimized case for OpenBitSets switch (logic) { case OR: result.or((OpenBitSet) dis); break; case AND: result.and((OpenBitSet) dis); break; case ANDNOT: result.andNot((OpenBitSet) dis); break; case XOR: result.xor((OpenBitSet) dis); break; default: doChain(result, DEFAULT, dis); break; } } else { DocIdSetIterator disi = dis.iterator(); switch (logic) { case OR: result.inPlaceOr(disi); break; case AND: result.inPlaceAnd(disi); break; case ANDNOT: result.inPlaceNot(disi); break; case XOR: result.inPlaceXor(disi); break; default: doChain(result, DEFAULT, dis); break; } } } }
/** * DNet eBusiness Suite * Copyright: 2013 Nan21 Electronics SRL. All rights reserved. * Use is subject to license terms. */ package seava.j4e.presenter.descriptor; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.persistence.QueryHint; import org.eclipse.persistence.config.QueryHints; import seava.j4e.api.annotation.Ds; import seava.j4e.api.annotation.DsField; import seava.j4e.api.annotation.DsQueryHints; import seava.j4e.api.annotation.SortField; import seava.j4e.api.descriptor.IViewModelDescriptor; public abstract class AbstractViewModelDescriptor<M> implements IViewModelDescriptor<M> { private Class<M> modelClass; /** * Holds the mapping from a data-source field to the corresponding entity property as a navigation path expression. * Used by the base converter to populate the view model instances from the source entity. * Example: dsFieldName => entityField or dsFieldName => entityRefField.entityRefField.entityAttribute */ private Map<String, String> refPaths; /** * Holds the reversed mapping only for the root entity attributes. * Used in the base converter to update the root entity attributes from the view model instance. * Example: entityField => dsFieldName. */ private Map<String, String> m2eConv; private Map<String, String> e2mConv; private Map<String, String> jpqlFieldFilterRules; private Map<String, String> fetchJoins; private List<String> noInserts; private List<String> noUpdates; //private Map<String, String> nestedFetchJoins; private Map<String, String[]> orderBys; private Map<String, Object> queryHints; private boolean worksWithJpql = true; private String jpqlDefaultWhere; private String jpqlDefaultSort; public AbstractViewModelDescriptor() { } public AbstractViewModelDescriptor(Class<M> modelClass) { this.modelClass = modelClass; this.buildElements(); this.buildHeaders(); } /** * Process model level annotations */ protected void buildHeaders () { if (this.modelClass.isAnnotationPresent(Ds.class)) { this.jpqlDefaultWhere = this.modelClass.getAnnotation(Ds.class).jpqlWhere(); SortField[] sf = this.modelClass.getAnnotation(Ds.class).sort(); if (sf != null && sf.length > 0) { StringBuffer sb = new StringBuffer(); for (int i=0, len = sf.length; i<len ; i++) { if (i>0) { sb.append(","); } sb.append("e."+this.refPaths.get( sf[i].field()) + "" + ((sf[i].desc())? " desc": "" )); } this.jpqlDefaultSort = sb.toString(); } else { this.jpqlDefaultSort = this.modelClass.getAnnotation(Ds.class).jpqlSort(); } // query hints if (this.modelClass.isAnnotationPresent(DsQueryHints.class)) { queryHints = new HashMap<String, Object>(); QueryHint[] hints = this.modelClass.getAnnotation(DsQueryHints.class).value(); for( int i=0, len = hints.length; i<len ; i++) { queryHints.put(hints[i].name(), hints[i].value()); } } } } /** * Process field annotations */ protected void buildElements() { if (this.refPaths == null) { this.refPaths = new HashMap<String, String>(); this.m2eConv = new HashMap<String, String>(); this.e2mConv = new HashMap<String, String>(); this.noInserts = new ArrayList<String>(); this.noUpdates = new ArrayList<String>(); this.jpqlFieldFilterRules = new HashMap<String, String>(); this.fetchJoins = new HashMap<String, String>(); this.orderBys = new HashMap<String, String[]>(); boolean createHintsForNestedFetchJoins = false; if (queryHints == null) { queryHints = new HashMap<String, Object>(); } //this.nestedFetchJoins = new HashMap<String, String>(); Class<?> clz = this.modelClass; while (clz != null ) { Field[] fields = clz.getDeclaredFields(); clz = clz.getSuperclass(); for (Field field : fields) { if(field.isAnnotationPresent(DsField.class)) { String fieldName = field.getName(); if (field.getAnnotation(DsField.class).noInsert()) { this.noInserts.add(fieldName); } if (field.getAnnotation(DsField.class).noUpdate()) { this.noUpdates.add(fieldName); } String path = field.getAnnotation(DsField.class).path(); if (path.equals("")) { path = field.getName(); } String orderBy = field.getAnnotation(DsField.class).orderBy(); if (!orderBy.equals("")) { String[] orderByFields = orderBy.split(","); String[] orderBys = new String[orderByFields.length] ; int pos = path.lastIndexOf("."); String prefix = (pos>0)?path.substring(0, pos):null; for (int y=0,l=orderByFields.length; y<l;y++) { if (prefix != null && !prefix.equals("")) { orderBys[y] = prefix + "."+ orderByFields[y]; } else { orderBys[y] = orderByFields[y]; } } this.orderBys.put(fieldName, orderBys); } this.e2mConv.put(fieldName, path); if (field.getAnnotation(DsField.class).fetch()) { this.refPaths.put(fieldName, path); int firstDot = path.indexOf("."); if (firstDot > 0) { if (firstDot == path.lastIndexOf(".")) { this.fetchJoins.put("e."+path.substring(0, path.lastIndexOf(".")), field.getAnnotation(DsField.class).join()); } else { if (createHintsForNestedFetchJoins) { String p = "e."+path.substring(0, path.lastIndexOf(".")); String type = field.getAnnotation(DsField.class).join(); if (type != null && type.equals("left")) { this.queryHints.put(QueryHints.LEFT_FETCH, p); } else { this.queryHints.put(QueryHints.FETCH, p); } } } } else { this.m2eConv.put(path, fieldName); } } String jpqlFieldFilterRule = field.getAnnotation(DsField.class).jpqlFilter(); if(jpqlFieldFilterRule!=null && !"".equals(jpqlFieldFilterRule)) { this.jpqlFieldFilterRules.put(fieldName, jpqlFieldFilterRule); } } } } } } // ---------------- getters - setters ------------------- public Class<M> getModelClass() { return modelClass; } public void setModelClass(Class<M> modelClass) { this.modelClass = modelClass; } public Map<String, String> getRefPaths() { return refPaths; } public boolean isWorksWithJpql() { return worksWithJpql; } public String getJpqlDefaultWhere() { return jpqlDefaultWhere; } public String getJpqlDefaultSort() { return jpqlDefaultSort; } public Map<String, String> getJpqlFieldFilterRules() { return jpqlFieldFilterRules; } public Map<String, String> getFetchJoins() { return fetchJoins; } // public Map<String, String> getNestedFetchJoins() { // return nestedFetchJoins; // } public Map<String, String> getM2eConv() { return m2eConv; } public Map<String, Object> getQueryHints() { return queryHints; } public Map<String, String> getE2mConv() { return e2mConv; } public List<String> getNoInserts() { return noInserts; } public List<String> getNoUpdates() { return noUpdates; } public Map<String, String[]> getOrderBys() { return orderBys; } }
package com.carrotcreative.recyclercore.widget; import android.content.Context; import android.content.res.TypedArray; import android.support.annotation.IntDef; import android.support.annotation.NonNull; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.util.AttributeSet; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.FrameLayout; import android.widget.ProgressBar; import android.widget.RelativeLayout; import com.carrotcreative.recyclercore.R; import com.carrotcreative.recyclercore.adapter.RecyclerCoreBaseAdapter; import com.carrotcreative.recyclercore.adapter.RecyclerCoreController; import java.util.ArrayList; public class ProgressRecyclerViewLayout extends RelativeLayout { private FrameLayout mContainer; private RecyclerCoreRecyclerView mCoreRecyclerView; private RecyclerView.Adapter mAdapter; private ProgressBar mProgressBar; private View mErrorStateView; private View mEmptyStateView; private ViewVisibilityInstanceState mPrevViewVisibilityState; private UnlimitedScrollHelper mUnlimitedScrollHelper = new UnlimitedScrollHelper(); /** * An interface to add a callback that gets called when the load point is reached. * For the load point callback to work, we need to set * {@link #setDistanceFromBottomToLoadMore(int)} and the * {@link #setOnLoadPointListener(OnLoadPointListener)} * <p> * This currently only supports {@link android.support.v7.widget.LinearLayoutManager}, * {@link android.support.v7.widget.StaggeredGridLayoutManager}, * {@link android.support.v7.widget.GridLayoutManager} */ public interface OnLoadPointListener { void onReachedLoadPoint(); } /** * Data observer to check for the empty states. */ private RecyclerView.AdapterDataObserver mDataObserver = new RecyclerView.AdapterDataObserver() { @Override public void onChanged() { super.onChanged(); checkEmptyState(); } }; public ProgressRecyclerViewLayout(Context context) { this(context, null); } public ProgressRecyclerViewLayout(Context context, AttributeSet attrs) { this(context, attrs, 0); } public ProgressRecyclerViewLayout(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); initAttributes(attrs); // Inflating the View LayoutInflater inflater = (LayoutInflater) context .getSystemService(Context.LAYOUT_INFLATER_SERVICE); inflater.inflate(R.layout.rv_core_progress_recycler_view, this, true); // prepare the views findViews(); setDefaultLayoutManager(); setScrollListener(); } private void initAttributes(AttributeSet attrs) { if(attrs != null) { TypedArray customAttr = getContext().obtainStyledAttributes(attrs, R.styleable .ProgressRecyclerViewLayout); int distanceFromBottomToRefresh = customAttr.getInt( R.styleable.ProgressRecyclerViewLayout_distanceFromBottomToLoadMore, UnlimitedScrollHelper.INVALID_DISTANCE_FROM_BOTTOM_TO_LOAD_MORE); mUnlimitedScrollHelper.setDistanceFromBottomToRefresh(distanceFromBottomToRefresh); } } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); if(mAdapter != null) { mAdapter.unregisterAdapterDataObserver(mDataObserver); } } private void findViews() { mContainer = (FrameLayout) findViewById(R.id.rv_core_progress_recycler_view_container); mCoreRecyclerView = (RecyclerCoreRecyclerView) findViewById(R.id.rv_core_progress_recycler_view_recycler_view); mProgressBar = (ProgressBar) findViewById(R.id.rv_core_progress_recycler_view_progress_bar); } private void setDefaultLayoutManager() { LinearLayoutManager manager = new LinearLayoutManager(mCoreRecyclerView.getContext()); manager.setOrientation(LinearLayoutManager.VERTICAL); mCoreRecyclerView.setLayoutManager(manager); } private void setScrollListener() { mCoreRecyclerView.addOnScrollListener(new RecyclerView.OnScrollListener() { @Override public void onScrolled(RecyclerView recyclerView, int dx, int dy) { super.onScrolled(recyclerView, dx, dy); mUnlimitedScrollHelper.onScrolled(recyclerView.getLayoutManager(), mAdapter.getItemCount()); } }); } /** * Helper method to set the distance from bottom value programmatically, if its not set in * the layout file. * <p> * A value of 0, means the #onReachedLoadPoint is called when the last child starts becoming * visible. * <p> * A value of 1 means the #onReachedLoadPoint is called when the second last child starts * becoming visible * <p> * A Negative value is considered and invalid value * * @param distanceFromBottomToRefresh The no of item from the bottom of the recycler view, * after which #OnLoadPointListener is called */ public void setDistanceFromBottomToLoadMore(int distanceFromBottomToRefresh) { mUnlimitedScrollHelper.setDistanceFromBottomToRefresh(distanceFromBottomToRefresh); } /** * Set the listener that will be called once the load point is reached. * This will not work if {@link #setDistanceFromBottomToLoadMore(int)} is not set. * * @param loadPointListener */ public void setOnLoadPointListener(@NonNull OnLoadPointListener loadPointListener) { mUnlimitedScrollHelper.setLoadPointListener(loadPointListener); } /** Wrapper for {@link android.support.v7.widget.RecyclerView#setLayoutManager} */ public void setLayoutManager(RecyclerView.LayoutManager layoutManager) { mCoreRecyclerView.setLayoutManager(layoutManager); } /** Wrapper for {@link android.support.v7.widget.RecyclerView#setAdapter} */ public void setAdapter(RecyclerCoreBaseAdapter adapter) { /** * Need to reset the visibility, in case the adapter is set multiple times. */ resetViewVisibility(); /** * deregister the previous observer. */ if(mAdapter != null && mAdapter != adapter) { mAdapter.unregisterAdapterDataObserver(mDataObserver); } mAdapter = adapter; mAdapter.registerAdapterDataObserver(mDataObserver); mCoreRecyclerView.setCoreAdapter(adapter); mProgressBar.setVisibility(GONE); checkEmptyState(); } /** * * @return The instance of RecyclerView. Note that you cannot use RecyclerView.setAdapter * directly on this recycler view. You should use ProgressRecyclerViewLayout.setAdapter instead. * */ public RecyclerView getRecyclerView() { return mCoreRecyclerView; } public void setOnScrollListener(RecyclerView.OnScrollListener scrollListener) { mCoreRecyclerView.addOnScrollListener(scrollListener); } public boolean contains(RecyclerView recyclerView) { if(mCoreRecyclerView == recyclerView) { return true; } return false; } public void scrollRecyclerViewBy(int dx, int dy) { mCoreRecyclerView.scrollBy(dy, dy); } public void scrollRecyclerViewToTop() { mCoreRecyclerView.getLayoutManager().scrollToPosition(0); } public void scrollRecyclerViewToBottom() { if(mCoreRecyclerView.getAdapter().getItemCount() > 0) { mCoreRecyclerView.getLayoutManager().scrollToPosition(mCoreRecyclerView.getAdapter().getItemCount()-1); } } public void stopScroll() { mCoreRecyclerView.stopScroll(); } // ========================================== // // ============= Error State =============== // // ========================================== // /** * * @return True if the errorStateView is set and is Visible. */ public boolean isErrorStateEnabled() { if(mErrorStateView != null && mErrorStateView.getVisibility() == View.VISIBLE) { return true; } return false; } /** * A methods to show/hide the error state. When #setErrorStateEnabled(true) is called * after #setErrorStateEnabled(false), the view is reset to what is was before is was enabled. * * @param enable True to show the error state * False to hide the error state. * Throws and #IllegalStateException if the error view is not set. * Set the error view using #setErrorView * Note: that the error view is set only if the empty state is not enabled. * Only one of them can be active at one time. */ public void setErrorStateEnabled(boolean enable) { if(mErrorStateView == null) { throw new IllegalStateException("Trying to setErrorStateEnabled without setting the error state View."); } if(enable) { if(! isErrorStateEnabled() && ! isEmptyStateEnabled()) { saveCurrentViewState(); mErrorStateView.setVisibility(VISIBLE); mCoreRecyclerView.setVisibility(GONE); mProgressBar.setVisibility(GONE); if(mEmptyStateView != null) { mEmptyStateView.setVisibility(GONE); } } } else { restorePreviousViewState(); } } /** * * @param errorView The error view that will be displayed when we call the method * #setErrorStateEnabled true. */ public void setErrorView(View errorView) { if(mErrorStateView == null) { mErrorStateView = errorView; mContainer.addView(mErrorStateView); mErrorStateView.setVisibility(GONE); } if(mErrorStateView != errorView) { mContainer.removeView(mErrorStateView); mErrorStateView = errorView; mContainer.addView(mErrorStateView); mErrorStateView.setVisibility(GONE); } } // ========================================== // // ============= Empty State =============== // // ========================================== // private void checkEmptyState() { if(mAdapter != null) { if(mAdapter.getItemCount() == 0) { setEmptyStateEnabled(true); } else { setEmptyStateEnabled(false); } } } /** * * @param emptyStateView The view that will be displayed when there are no items to * display in the adapter. * Note that either the ErrorState view or the EmptyState view * can be shown at one time. * If the errorState is enabled then the empty view will not be set, * and vice versa. */ public void setEmptyStateView(View emptyStateView) { if(mEmptyStateView == null) { mEmptyStateView = emptyStateView; mContainer.addView(mEmptyStateView); mEmptyStateView.setVisibility(GONE); } if(mEmptyStateView != emptyStateView) { mContainer.removeView(mEmptyStateView); mEmptyStateView = emptyStateView; mContainer.addView(mEmptyStateView); mEmptyStateView.setVisibility(GONE); } } /** * * @return True if the EmptyStateView is set and is Visible. */ private boolean isEmptyStateEnabled() { if(mEmptyStateView != null && mEmptyStateView.getVisibility() == View.VISIBLE) { return true; } return false; } /** * * A methods to show/hide the empty state. When #setEmptyStateEnabled(true) is called * after #setEmptyStateEnabled(false), the view is reset to what is was before is was enabled. * * @param enable True to show the empty state * False to hide the empty state. * Throws and #IllegalStateException if the empty view is not set. * Set the empty view using #setEmptyStateView */ private void setEmptyStateEnabled(boolean enable) { /** * If the empty state view is not set, do an early return. */ if(mEmptyStateView == null) { return; } if(enable) { if(! isEmptyStateEnabled() && ! isErrorStateEnabled()) { saveCurrentViewState(); mEmptyStateView.setVisibility(VISIBLE); mCoreRecyclerView.setVisibility(GONE); mProgressBar.setVisibility(GONE); if(mErrorStateView != null) { mErrorStateView.setVisibility(GONE); } } } else { resetViewVisibility(); mProgressBar.setVisibility(GONE); } } // ============================================== // // ============= Helper Functions =============== // // ============================================= // private void resetViewVisibility() { mPrevViewVisibilityState = null; mCoreRecyclerView.setVisibility(VISIBLE); mProgressBar.setVisibility(VISIBLE); if(mEmptyStateView != null) { mEmptyStateView.setVisibility(GONE); } if(mErrorStateView != null) { mErrorStateView.setVisibility(GONE); } } private void saveCurrentViewState() { if(mPrevViewVisibilityState == null) { mPrevViewVisibilityState = new ViewVisibilityInstanceState(); } mPrevViewVisibilityState.setProgressViewVisibility(mProgressBar.getVisibility()); mPrevViewVisibilityState.setRecyclerViewVisibility(mCoreRecyclerView.getVisibility()); /** * If error view is not set, set the visibility to gone. */ if(mErrorStateView != null) { mPrevViewVisibilityState.setErrorViewVisibility(mErrorStateView.getVisibility()); } else { mPrevViewVisibilityState.setErrorViewVisibility(View.GONE); } /** * If empty view is not set, set the visibility to gone. */ if(mEmptyStateView != null) { mPrevViewVisibilityState.setEmptyViewVisibility(mEmptyStateView.getVisibility()); } else { mPrevViewVisibilityState.setEmptyViewVisibility(View.GONE); } } private void restorePreviousViewState() { if(mPrevViewVisibilityState != null) { setCurrentVisibilityState(mPrevViewVisibilityState); } } private void setCurrentVisibilityState(ViewVisibilityInstanceState currentState) { mCoreRecyclerView.setVisibility(currentState.getRecyclerViewVisibility()); mProgressBar.setVisibility(currentState.getProgressViewVisibility()); if(mEmptyStateView != null) { mEmptyStateView.setVisibility(currentState.getEmptyViewVisibility()); } if(mErrorStateView != null) { mErrorStateView.setVisibility(currentState.getErrorViewVisibility()); } } /** * Add Visibility annotation for lint checks. */ @IntDef({VISIBLE, INVISIBLE, GONE}) public @interface Visibility {} /** * A Helper class that wraps the visibility of the views. */ private static class ViewVisibilityInstanceState { private int mRecyclerViewVisibility; private int mProgressViewVisibility; private int mErrorViewVisibility; private int mEmptyViewVisibility; @Visibility public int getEmptyViewVisibility() { return mEmptyViewVisibility; } public void setEmptyViewVisibility(@Visibility int emptyViewVisibility) { mEmptyViewVisibility = emptyViewVisibility; } @Visibility public int getErrorViewVisibility() { return mErrorViewVisibility; } public void setErrorViewVisibility(@Visibility int errorViewVisibility) { mErrorViewVisibility = errorViewVisibility; } @Visibility public int getRecyclerViewVisibility() { return mRecyclerViewVisibility; } public void setRecyclerViewVisibility(@Visibility int recyclerViewVisibility) { mRecyclerViewVisibility = recyclerViewVisibility; } @Visibility public int getProgressViewVisibility() { return mProgressViewVisibility; } public void setProgressViewVisibility(@Visibility int progressViewVisibility) { mProgressViewVisibility = progressViewVisibility; } } }
/* * Copyright (c) 2005 - 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed * under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package org.wso2.carbon.event.receiver.admin; import org.apache.axis2.AxisFault; import org.apache.axis2.engine.AxisConfiguration; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.core.AbstractAdmin; import org.wso2.carbon.databridge.commons.Attribute; import org.wso2.carbon.databridge.commons.StreamDefinition; import org.wso2.carbon.event.input.adapter.core.InputEventAdapterConfiguration; import org.wso2.carbon.event.input.adapter.core.InputEventAdapterSchema; import org.wso2.carbon.event.input.adapter.core.InputEventAdapterService; import org.wso2.carbon.event.input.adapter.core.Property; import org.wso2.carbon.event.receiver.admin.internal.EventReceiverAdminConstants; import org.wso2.carbon.event.receiver.admin.internal.ds.EventReceiverAdminServiceValueHolder; import org.wso2.carbon.event.receiver.core.EventReceiverService; import org.wso2.carbon.event.receiver.core.config.*; import org.wso2.carbon.event.receiver.core.config.mapping.*; import org.wso2.carbon.event.receiver.core.exception.EventReceiverConfigurationException; import java.util.*; public class EventReceiverAdminService extends AbstractAdmin { private static Log log = LogFactory.getLog(EventReceiverAdminService.class); public EventReceiverConfigurationInfoDto[] getAllActiveEventReceiverConfigurations() throws AxisFault { EventReceiverService eventReceiverService = EventReceiverAdminServiceValueHolder.getEventReceiverService(); // get event receiver configurations List<EventReceiverConfiguration> eventReceiverConfigurationList; eventReceiverConfigurationList = eventReceiverService.getAllActiveEventReceiverConfigurations(); if (eventReceiverConfigurationList != null) { // create event receiver configuration details array EventReceiverConfigurationInfoDto[] eventReceiverConfigurationInfoDtoArray = new EventReceiverConfigurationInfoDto[eventReceiverConfigurationList.size()]; for (int index = 0; index < eventReceiverConfigurationInfoDtoArray.length; index++) { EventReceiverConfiguration eventReceiverConfiguration = eventReceiverConfigurationList.get(index); String eventReceiverName = eventReceiverConfiguration.getEventReceiverName(); String mappingType = eventReceiverConfiguration.getInputMapping().getMappingType(); String inputEventAdapterType = eventReceiverConfiguration.getFromAdapterConfiguration().getType(); String streamNameWithVersion = eventReceiverConfiguration.getToStreamName() + ":" + eventReceiverConfiguration.getToStreamVersion(); eventReceiverConfigurationInfoDtoArray[index] = new EventReceiverConfigurationInfoDto(); eventReceiverConfigurationInfoDtoArray[index].setEventReceiverName(eventReceiverName); eventReceiverConfigurationInfoDtoArray[index].setMessageFormat(mappingType); eventReceiverConfigurationInfoDtoArray[index].setInputAdapterType(inputEventAdapterType); eventReceiverConfigurationInfoDtoArray[index].setInputStreamId(streamNameWithVersion); eventReceiverConfigurationInfoDtoArray[index].setEnableStats(eventReceiverConfiguration.isStatisticsEnabled()); eventReceiverConfigurationInfoDtoArray[index].setEnableTracing(eventReceiverConfiguration.isTraceEnabled()); eventReceiverConfigurationInfoDtoArray[index].setEditable(eventReceiverConfiguration.isEditable()); } Arrays.sort(eventReceiverConfigurationInfoDtoArray, new Comparator() { @Override public int compare(Object o1, Object o2) { return ((EventReceiverConfigurationInfoDto) o1).getEventReceiverName().compareTo(((EventReceiverConfigurationInfoDto) o2).getEventReceiverName()); } }); return eventReceiverConfigurationInfoDtoArray; } else { return new EventReceiverConfigurationInfoDto[0]; } } public EventReceiverConfigurationInfoDto[] getAllStreamSpecificActiveEventReceiverConfigurations( String streamId) throws AxisFault { EventReceiverService eventReceiverService = EventReceiverAdminServiceValueHolder.getEventReceiverService(); // get event receiver configurations List<EventReceiverConfiguration> eventReceiverConfigurationList; eventReceiverConfigurationList = eventReceiverService.getAllActiveEventReceiverConfigurations(streamId); if (eventReceiverConfigurationList != null) { // create event receiver configuration details array EventReceiverConfigurationInfoDto[] eventReceiverConfigurationInfoDtoArray = new EventReceiverConfigurationInfoDto[eventReceiverConfigurationList.size()]; for (int index = 0; index < eventReceiverConfigurationInfoDtoArray.length; index++) { EventReceiverConfiguration eventReceiverConfiguration = eventReceiverConfigurationList.get(index); String eventReceiverName = eventReceiverConfiguration.getEventReceiverName(); String mappingType = eventReceiverConfiguration.getInputMapping().getMappingType(); String inputEventAdapterType = eventReceiverConfiguration.getFromAdapterConfiguration().getType(); eventReceiverConfigurationInfoDtoArray[index] = new EventReceiverConfigurationInfoDto(); eventReceiverConfigurationInfoDtoArray[index].setEventReceiverName(eventReceiverName); eventReceiverConfigurationInfoDtoArray[index].setMessageFormat(mappingType); eventReceiverConfigurationInfoDtoArray[index].setInputAdapterType(inputEventAdapterType); eventReceiverConfigurationInfoDtoArray[index].setEnableStats(eventReceiverConfiguration.isStatisticsEnabled()); eventReceiverConfigurationInfoDtoArray[index].setEnableTracing(eventReceiverConfiguration.isTraceEnabled()); eventReceiverConfigurationInfoDtoArray[index].setEditable(eventReceiverConfiguration.isEditable()); } Arrays.sort(eventReceiverConfigurationInfoDtoArray,new Comparator() { @Override public int compare(Object o1, Object o2) { return ((EventReceiverConfigurationInfoDto) o1).getEventReceiverName().compareTo(((EventReceiverConfigurationInfoDto) o2).getEventReceiverName()); } }); return eventReceiverConfigurationInfoDtoArray; } else { return new EventReceiverConfigurationInfoDto[0]; } } public EventReceiverConfigurationFileDto[] getAllInactiveEventReceiverConfigurations() throws AxisFault { EventReceiverService eventReceiverService = EventReceiverAdminServiceValueHolder.getEventReceiverService(); List<EventReceiverConfigurationFile> eventReceiverConfigurationFileList = eventReceiverService.getAllInactiveEventReceiverConfigurations(); if (eventReceiverConfigurationFileList != null) { // create event receiver file details array EventReceiverConfigurationFileDto[] eventReceiverFileDtoArray = new EventReceiverConfigurationFileDto[eventReceiverConfigurationFileList.size()]; for (int index = 0; index < eventReceiverFileDtoArray.length; index++) { EventReceiverConfigurationFile eventReceiverConfigurationFile = eventReceiverConfigurationFileList.get(index); String fileName = eventReceiverConfigurationFile.getFileName(); String eventReceiverName = eventReceiverConfigurationFile.getEventReceiverName(); String statusMsg = eventReceiverConfigurationFile.getDeploymentStatusMessage(); if (eventReceiverConfigurationFile.getDependency() != null) { statusMsg = statusMsg + " [Dependency: " + eventReceiverConfigurationFile.getDependency() + "]"; } eventReceiverFileDtoArray[index] = new EventReceiverConfigurationFileDto(fileName, eventReceiverName, statusMsg); } Arrays.sort(eventReceiverFileDtoArray,new Comparator() { @Override public int compare(Object o1, Object o2) { return ((EventReceiverConfigurationFileDto) o1).getFileName().compareTo(((EventReceiverConfigurationFileDto) o2).getFileName()); } }); return eventReceiverFileDtoArray; } else { return new EventReceiverConfigurationFileDto[0]; } } public EventReceiverConfigurationDto getActiveEventReceiverConfiguration( String eventReceiverName) throws AxisFault { EventReceiverService eventReceiverService = EventReceiverAdminServiceValueHolder.getEventReceiverService(); EventReceiverConfiguration eventReceiverConfiguration = eventReceiverService.getActiveEventReceiverConfiguration(eventReceiverName); if (eventReceiverConfiguration != null) { EventReceiverConfigurationDto eventReceiverConfigurationDto = new EventReceiverConfigurationDto(); eventReceiverConfigurationDto.setEventReceiverName(eventReceiverConfiguration.getEventReceiverName()); String streamNameWithVersion = eventReceiverConfiguration.getToStreamName() + ":" + eventReceiverConfiguration.getToStreamVersion(); eventReceiverConfigurationDto.setToStreamNameWithVersion(streamNameWithVersion); InputEventAdapterConfiguration fromAdapterConfiguration = eventReceiverConfiguration.getFromAdapterConfiguration(); if (fromAdapterConfiguration != null) { InputEventAdapterService inputEventAdapterService = EventReceiverAdminServiceValueHolder.getInputEventAdapterService(); InputEventAdapterSchema inputEventAdapterSchema = inputEventAdapterService.getInputEventAdapterSchema(fromAdapterConfiguration.getType()); InputAdapterConfigurationDto fromAdapterConfigurationDto = new InputAdapterConfigurationDto(); fromAdapterConfigurationDto.setEventAdapterType(fromAdapterConfiguration.getType()); fromAdapterConfigurationDto.setSupportedMessageFormats( inputEventAdapterSchema.getSupportedMessageFormats(). toArray(new String[inputEventAdapterSchema.getSupportedMessageFormats().size()])); Map<String, String> inputAdapterProperties = new HashMap<String, String>(); inputAdapterProperties.putAll(fromAdapterConfiguration.getProperties()); DetailInputAdapterPropertyDto[] detailInputAdapterPropertyDtos = getPropertyConfigurations(inputAdapterProperties, inputEventAdapterSchema.getPropertyList()); fromAdapterConfigurationDto.setInputEventAdapterProperties(detailInputAdapterPropertyDtos); eventReceiverConfigurationDto.setFromAdapterConfigurationDto(fromAdapterConfigurationDto); } InputMapping inputMapping = eventReceiverConfiguration.getInputMapping(); eventReceiverConfigurationDto.setCustomMappingEnabled(eventReceiverConfiguration.getInputMapping().isCustomMappingEnabled()); eventReceiverConfigurationDto.setMessageFormat(inputMapping.getMappingType()); if (inputMapping.isCustomMappingEnabled()) { if (inputMapping.getMappingType().equalsIgnoreCase(EventReceiverConstants.ER_WSO2EVENT_MAPPING_TYPE)) { List<EventMappingPropertyDto> metaMappingPropertyDtos = new ArrayList<EventMappingPropertyDto>(); List<EventMappingPropertyDto> correlationMappingPropertyDtos = new ArrayList<EventMappingPropertyDto>(); List<EventMappingPropertyDto> payloadMappingPropertyDtos = new ArrayList<EventMappingPropertyDto>(); for (InputMappingAttribute inputMappingAttribute : inputMapping.getInputMappingAttributes()) { EventMappingPropertyDto mappingPropertyDto = new EventMappingPropertyDto(); mappingPropertyDto.setName(inputMappingAttribute.getFromElementKey()); mappingPropertyDto.setValueOf(inputMappingAttribute.getToElementKey()); mappingPropertyDto.setType(EventReceiverAdminConstants.ATTRIBUTE_TYPE_STRING_MAP.get(inputMappingAttribute.getToElementType())); mappingPropertyDto.setDefaultValue(inputMappingAttribute.getDefaultValue()); if (EventReceiverConstants.META_DATA_VAL.equalsIgnoreCase(inputMappingAttribute.getFromElementType())) { metaMappingPropertyDtos.add(mappingPropertyDto); } else if (EventReceiverConstants.CORRELATION_DATA_VAL.equalsIgnoreCase(inputMappingAttribute.getFromElementType())) { correlationMappingPropertyDtos.add(mappingPropertyDto); } else if (EventReceiverConstants.PAYLOAD_DATA_VAL.equalsIgnoreCase(inputMappingAttribute.getFromElementType())) { payloadMappingPropertyDtos.add(mappingPropertyDto); } } eventReceiverConfigurationDto.setMetaMappingPropertyDtos(metaMappingPropertyDtos.toArray(new EventMappingPropertyDto[metaMappingPropertyDtos.size()])); eventReceiverConfigurationDto.setCorrelationMappingPropertyDtos(correlationMappingPropertyDtos.toArray(new EventMappingPropertyDto[correlationMappingPropertyDtos.size()])); eventReceiverConfigurationDto.setMappingPropertyDtos(payloadMappingPropertyDtos.toArray(new EventMappingPropertyDto[payloadMappingPropertyDtos.size()])); } else if (inputMapping.getMappingType().equalsIgnoreCase(EventReceiverConstants.ER_XML_MAPPING_TYPE)) { List<EventMappingPropertyDto> xPathDefinitions = new ArrayList<EventMappingPropertyDto>(); for (XPathDefinition xPathDefinition : ((XMLInputMapping) inputMapping).getXPathDefinitions()) { EventMappingPropertyDto mappingPropertyDto = new EventMappingPropertyDto(); mappingPropertyDto.setName(xPathDefinition.getPrefix()); mappingPropertyDto.setValueOf(xPathDefinition.getNamespaceUri()); xPathDefinitions.add(mappingPropertyDto); } eventReceiverConfigurationDto.setXpathDefinitionMappingPropertyDtos(xPathDefinitions.toArray(new EventMappingPropertyDto[xPathDefinitions.size()])); List<EventMappingPropertyDto> mappingPropertyDtos = new ArrayList<EventMappingPropertyDto>(); for (InputMappingAttribute inputMappingAttribute : inputMapping.getInputMappingAttributes()) { EventMappingPropertyDto mappingPropertyDto = new EventMappingPropertyDto(); mappingPropertyDto.setName(inputMappingAttribute.getFromElementKey()); mappingPropertyDto.setValueOf(inputMappingAttribute.getToElementKey()); mappingPropertyDto.setType(EventReceiverAdminConstants.ATTRIBUTE_TYPE_STRING_MAP.get(inputMappingAttribute.getToElementType())); mappingPropertyDto.setDefaultValue(inputMappingAttribute.getDefaultValue()); mappingPropertyDtos.add(mappingPropertyDto); } eventReceiverConfigurationDto.setMappingPropertyDtos(mappingPropertyDtos.toArray(new EventMappingPropertyDto[mappingPropertyDtos.size()])); eventReceiverConfigurationDto.setParentSelectorXpath(((XMLInputMapping) inputMapping).getParentSelectorXpath()); } else { // for map, text and json List<EventMappingPropertyDto> mappingPropertyDtos = new ArrayList<EventMappingPropertyDto>(); for (InputMappingAttribute inputMappingAttribute : inputMapping.getInputMappingAttributes()) { EventMappingPropertyDto mappingPropertyDto = new EventMappingPropertyDto(); mappingPropertyDto.setName(inputMappingAttribute.getFromElementKey()); mappingPropertyDto.setValueOf(inputMappingAttribute.getToElementKey()); mappingPropertyDto.setType(EventReceiverAdminConstants.ATTRIBUTE_TYPE_STRING_MAP.get(inputMappingAttribute.getToElementType())); mappingPropertyDto.setDefaultValue(inputMappingAttribute.getDefaultValue()); mappingPropertyDtos.add(mappingPropertyDto); } eventReceiverConfigurationDto.setMappingPropertyDtos(mappingPropertyDtos.toArray(new EventMappingPropertyDto[mappingPropertyDtos.size()])); } } return eventReceiverConfigurationDto; } return null; } public String getActiveEventReceiverConfigurationContent(String eventReceiverName) throws AxisFault { EventReceiverService eventReceiverService = EventReceiverAdminServiceValueHolder.getEventReceiverService(); try { return eventReceiverService.getActiveEventReceiverConfigurationContent(eventReceiverName); } catch (EventReceiverConfigurationException e) { log.error(e.getMessage(), e); throw new AxisFault(e.getMessage()); } } public String getInactiveEventReceiverConfigurationContent(String fileName) throws AxisFault { EventReceiverService eventReceiverService = EventReceiverAdminServiceValueHolder.getEventReceiverService(); try { String eventReceiverConfigurationFile = eventReceiverService.getInactiveEventReceiverConfigurationContent(fileName); return eventReceiverConfigurationFile.trim(); } catch (EventReceiverConfigurationException e) { log.error(e.getMessage(), e); throw new AxisFault(e.getMessage()); } } public boolean undeployActiveEventReceiverConfiguration(String eventReceiverName) throws AxisFault { EventReceiverService eventReceiverService = EventReceiverAdminServiceValueHolder.getEventReceiverService(); AxisConfiguration axisConfiguration = getAxisConfig(); try { eventReceiverService.undeployActiveEventReceiverConfiguration(eventReceiverName); } catch (EventReceiverConfigurationException e) { log.error(e.getMessage(), e); throw new AxisFault(e.getMessage()); } return true; } public boolean undeployInactiveEventReceiverConfiguration(String fileName) throws AxisFault { EventReceiverService eventReceiverService = EventReceiverAdminServiceValueHolder.getEventReceiverService(); try { AxisConfiguration axisConfiguration = getAxisConfig(); eventReceiverService.undeployInactiveEventReceiverConfiguration(fileName); } catch (EventReceiverConfigurationException e) { log.error(e.getMessage(), e); throw new AxisFault(e.getMessage()); } return true; } public boolean editActiveEventReceiverConfiguration(String eventReceiverConfiguration, String eventReceiverName) throws AxisFault { EventReceiverService eventReceiverService = EventReceiverAdminServiceValueHolder.getEventReceiverService(); AxisConfiguration axisConfiguration = getAxisConfig(); try { eventReceiverService.editActiveEventReceiverConfiguration(eventReceiverConfiguration, eventReceiverName); } catch (EventReceiverConfigurationException e) { log.error(e.getMessage(), e); throw new AxisFault(e.getMessage()); } return true; } public boolean editInactiveEventReceiverConfiguration( String eventReceiverConfiguration, String fileName) throws AxisFault { EventReceiverService eventReceiverService = EventReceiverAdminServiceValueHolder.getEventReceiverService(); AxisConfiguration axisConfiguration = getAxisConfig(); try { eventReceiverService.editInactiveEventReceiverConfiguration(eventReceiverConfiguration, fileName); } catch (EventReceiverConfigurationException e) { log.error(e.getMessage(), e); throw new AxisFault(e.getMessage()); } return true; } public boolean deployEventReceiverConfiguration(String eventReceiverConfigXml) throws AxisFault { try { EventReceiverService eventReceiverService = EventReceiverAdminServiceValueHolder.getEventReceiverService(); eventReceiverService.deployEventReceiverConfiguration(eventReceiverConfigXml); } catch (EventReceiverConfigurationException e) { log.error(e.getMessage(), e); throw new AxisFault(e.getMessage()); } return true; } public boolean deployWso2EventReceiverConfiguration(String eventReceiverName, String streamNameWithVersion, String eventAdapterType, EventMappingPropertyDto[] metaData, EventMappingPropertyDto[] correlationData, EventMappingPropertyDto[] payloadData, BasicInputAdapterPropertyDto[] inputPropertyConfiguration, boolean mappingEnabled, String fromStreamNameWithVersion) throws AxisFault { if (checkEventReceiverValidity(eventReceiverName)) { try { EventReceiverService eventReceiverService = EventReceiverAdminServiceValueHolder.getEventReceiverService(); EventReceiverConfiguration eventReceiverConfiguration = new EventReceiverConfiguration(); eventReceiverConfiguration.setEventReceiverName(eventReceiverName); String[] toStreamProperties = streamNameWithVersion.split(":"); eventReceiverConfiguration.setToStreamName(toStreamProperties[0]); eventReceiverConfiguration.setToStreamVersion(toStreamProperties[1]); constructInputAdapterRelatedConfigs(eventReceiverName, eventAdapterType, inputPropertyConfiguration, eventReceiverConfiguration, EventReceiverConstants.ER_WSO2EVENT_MAPPING_TYPE); WSO2EventInputMapping wso2EventInputMapping = new WSO2EventInputMapping(); wso2EventInputMapping.setCustomMappingEnabled(mappingEnabled); if (mappingEnabled) { if (metaData != null && metaData.length != 0) { for (EventMappingPropertyDto mappingPropertyDto : metaData) { InputMappingAttribute inputProperty = new InputMappingAttribute(mappingPropertyDto.getName(), mappingPropertyDto.getValueOf(), EventReceiverAdminConstants.STRING_ATTRIBUTE_TYPE_MAP.get(mappingPropertyDto.getType()), EventReceiverConstants.META_DATA_VAL); inputProperty.setDefaultValue(mappingPropertyDto.getDefaultValue()); wso2EventInputMapping.addInputMappingAttribute(inputProperty); } } if (correlationData != null && correlationData.length != 0) { for (EventMappingPropertyDto mappingPropertyDto : correlationData) { InputMappingAttribute inputProperty = new InputMappingAttribute(mappingPropertyDto.getName(), mappingPropertyDto.getValueOf(), EventReceiverAdminConstants.STRING_ATTRIBUTE_TYPE_MAP.get(mappingPropertyDto.getType()), EventReceiverConstants.CORRELATION_DATA_VAL); inputProperty.setDefaultValue(mappingPropertyDto.getDefaultValue()); wso2EventInputMapping.addInputMappingAttribute(inputProperty); } } if (payloadData != null && payloadData.length != 0) { for (EventMappingPropertyDto mappingPropertyDto : payloadData) { InputMappingAttribute inputProperty = new InputMappingAttribute(mappingPropertyDto.getName(), mappingPropertyDto.getValueOf(), EventReceiverAdminConstants.STRING_ATTRIBUTE_TYPE_MAP.get(mappingPropertyDto.getType()), EventReceiverConstants.PAYLOAD_DATA_VAL); inputProperty.setDefaultValue(mappingPropertyDto.getDefaultValue()); wso2EventInputMapping.addInputMappingAttribute(inputProperty); } } String[] fromStreamProperties = fromStreamNameWithVersion.split(":"); wso2EventInputMapping.setFromEventName(fromStreamProperties[0]); wso2EventInputMapping.setFromEventVersion(fromStreamProperties[1]); } eventReceiverConfiguration.setInputMapping(wso2EventInputMapping); eventReceiverService.deployEventReceiverConfiguration(eventReceiverConfiguration); } catch (EventReceiverConfigurationException e) { log.error(e.getMessage(), e); throw new AxisFault(e.getMessage()); } } else { throw new AxisFault(eventReceiverName + " is already registered for this tenant"); } return true; } public boolean deployTextEventReceiverConfiguration(String eventReceiverName, String streamNameWithVersion, String eventAdapterType, EventMappingPropertyDto[] inputMappings, BasicInputAdapterPropertyDto[] inputPropertyConfiguration, boolean mappingEnabled) throws AxisFault { if (checkEventReceiverValidity(eventReceiverName)) { try { EventReceiverService eventReceiverService = EventReceiverAdminServiceValueHolder.getEventReceiverService(); EventReceiverConfiguration eventReceiverConfiguration = new EventReceiverConfiguration(); eventReceiverConfiguration.setEventReceiverName(eventReceiverName); String[] toStreamProperties = streamNameWithVersion.split(":"); eventReceiverConfiguration.setToStreamName(toStreamProperties[0]); eventReceiverConfiguration.setToStreamVersion(toStreamProperties[1]); constructInputAdapterRelatedConfigs(eventReceiverName, eventAdapterType, inputPropertyConfiguration, eventReceiverConfiguration, EventReceiverConstants.ER_TEXT_MAPPING_TYPE); TextInputMapping textInputMapping = new TextInputMapping(); textInputMapping.setCustomMappingEnabled(mappingEnabled); if (mappingEnabled) { if (inputMappings != null && inputMappings.length != 0) { for (EventMappingPropertyDto mappingProperty : inputMappings) { InputMappingAttribute inputProperty = new InputMappingAttribute(mappingProperty.getName(), mappingProperty.getValueOf(), EventReceiverAdminConstants.STRING_ATTRIBUTE_TYPE_MAP.get(mappingProperty.getType())); inputProperty.setDefaultValue(mappingProperty.getDefaultValue()); textInputMapping.addInputMappingAttribute(inputProperty); } } } eventReceiverConfiguration.setInputMapping(textInputMapping); eventReceiverService.deployEventReceiverConfiguration(eventReceiverConfiguration); } catch (EventReceiverConfigurationException e) { log.error(e.getMessage(), e); throw new AxisFault(e.getMessage()); } } else { throw new AxisFault(eventReceiverName + " is already registered for this tenant"); } return true; } public boolean deployXmlEventReceiverConfiguration(String eventReceiverName, String streamNameWithVersion, String eventAdapterType, String parentXpath, EventMappingPropertyDto[] namespaces, EventMappingPropertyDto[] inputMappings, BasicInputAdapterPropertyDto[] inputPropertyConfiguration, boolean mappingEnabled) throws AxisFault { if (checkEventReceiverValidity(eventReceiverName)) { try { EventReceiverService eventReceiverService = EventReceiverAdminServiceValueHolder.getEventReceiverService(); EventReceiverConfiguration eventReceiverConfiguration = new EventReceiverConfiguration(); eventReceiverConfiguration.setEventReceiverName(eventReceiverName); String[] toStreamProperties = streamNameWithVersion.split(":"); eventReceiverConfiguration.setToStreamName(toStreamProperties[0]); eventReceiverConfiguration.setToStreamVersion(toStreamProperties[1]); constructInputAdapterRelatedConfigs(eventReceiverName, eventAdapterType, inputPropertyConfiguration, eventReceiverConfiguration, EventReceiverConstants.ER_XML_MAPPING_TYPE); XMLInputMapping xmlInputMapping = new XMLInputMapping(); xmlInputMapping.setCustomMappingEnabled(mappingEnabled); xmlInputMapping.setParentSelectorXpath(parentXpath); if (namespaces != null && namespaces.length != 0) { List<XPathDefinition> xPathDefinitions = new ArrayList<XPathDefinition>(); for (EventMappingPropertyDto namespace : namespaces) { XPathDefinition xPathDefinition = new XPathDefinition(namespace.getName(), namespace.getValueOf()); xPathDefinitions.add(xPathDefinition); } xmlInputMapping.setXPathDefinitions(xPathDefinitions); } if (mappingEnabled) { if (inputMappings != null && inputMappings.length != 0) { for (EventMappingPropertyDto mappingProperty : inputMappings) { InputMappingAttribute inputProperty = new InputMappingAttribute(mappingProperty.getName(), mappingProperty.getValueOf(), EventReceiverAdminConstants.STRING_ATTRIBUTE_TYPE_MAP.get(mappingProperty.getType())); inputProperty.setDefaultValue(mappingProperty.getDefaultValue()); xmlInputMapping.addInputMappingAttribute(inputProperty); } } } eventReceiverConfiguration.setInputMapping(xmlInputMapping); eventReceiverService.deployEventReceiverConfiguration(eventReceiverConfiguration); } catch (EventReceiverConfigurationException e) { log.error(e.getMessage(), e); throw new AxisFault(e.getMessage()); } } else { throw new AxisFault(eventReceiverName + " is already registered for this tenant"); } return true; } public boolean deployMapEventReceiverConfiguration(String eventReceiverName, String streamNameWithVersion, String eventAdapterType, EventMappingPropertyDto[] inputMappings, BasicInputAdapterPropertyDto[] inputPropertyConfiguration, boolean mappingEnabled) throws AxisFault { if (checkEventReceiverValidity(eventReceiverName)) { try { EventReceiverService eventReceiverService = EventReceiverAdminServiceValueHolder.getEventReceiverService(); EventReceiverConfiguration eventReceiverConfiguration = new EventReceiverConfiguration(); eventReceiverConfiguration.setEventReceiverName(eventReceiverName); String[] toStreamProperties = streamNameWithVersion.split(":"); eventReceiverConfiguration.setToStreamName(toStreamProperties[0]); eventReceiverConfiguration.setToStreamVersion(toStreamProperties[1]); constructInputAdapterRelatedConfigs(eventReceiverName, eventAdapterType, inputPropertyConfiguration, eventReceiverConfiguration, EventReceiverConstants.ER_MAP_MAPPING_TYPE); MapInputMapping mapInputMapping = new MapInputMapping(); mapInputMapping.setCustomMappingEnabled(mappingEnabled); if (mappingEnabled) { if (inputMappings != null && inputMappings.length != 0) { for (EventMappingPropertyDto mappingProperty : inputMappings) { InputMappingAttribute inputProperty = new InputMappingAttribute(mappingProperty.getName(), mappingProperty.getValueOf(), EventReceiverAdminConstants.STRING_ATTRIBUTE_TYPE_MAP.get(mappingProperty.getType())); inputProperty.setDefaultValue(mappingProperty.getDefaultValue()); mapInputMapping.addInputMappingAttribute(inputProperty); } } } eventReceiverConfiguration.setInputMapping(mapInputMapping); eventReceiverService.deployEventReceiverConfiguration(eventReceiverConfiguration); } catch (EventReceiverConfigurationException ex) { log.error(ex.getMessage(), ex); throw new AxisFault(ex.getMessage()); } } else { throw new AxisFault(eventReceiverName + " is already registered for this tenant"); } return true; } public boolean deployJsonEventReceiverConfiguration(String eventReceiverName, String streamNameWithVersion, String eventAdapterType, EventMappingPropertyDto[] inputMappings, BasicInputAdapterPropertyDto[] inputPropertyConfiguration, boolean mappingEnabled) throws AxisFault { if (checkEventReceiverValidity(eventReceiverName)) { try { EventReceiverService eventReceiverService = EventReceiverAdminServiceValueHolder.getEventReceiverService(); EventReceiverConfiguration eventReceiverConfiguration = new EventReceiverConfiguration(); eventReceiverConfiguration.setEventReceiverName(eventReceiverName); String[] toStreamProperties = streamNameWithVersion.split(":"); eventReceiverConfiguration.setToStreamName(toStreamProperties[0]); eventReceiverConfiguration.setToStreamVersion(toStreamProperties[1]); constructInputAdapterRelatedConfigs(eventReceiverName, eventAdapterType, inputPropertyConfiguration, eventReceiverConfiguration, EventReceiverConstants.ER_JSON_MAPPING_TYPE); JSONInputMapping jsonInputMapping = new JSONInputMapping(); jsonInputMapping.setCustomMappingEnabled(mappingEnabled); if (mappingEnabled) { if (inputMappings != null && inputMappings.length != 0) { for (EventMappingPropertyDto mappingProperty : inputMappings) { InputMappingAttribute inputProperty = new InputMappingAttribute(mappingProperty.getName(), mappingProperty.getValueOf(), EventReceiverAdminConstants.STRING_ATTRIBUTE_TYPE_MAP.get(mappingProperty.getType())); inputProperty.setDefaultValue(mappingProperty.getDefaultValue()); jsonInputMapping.addInputMappingAttribute(inputProperty); } } } eventReceiverConfiguration.setInputMapping(jsonInputMapping); eventReceiverService.deployEventReceiverConfiguration(eventReceiverConfiguration); } catch (EventReceiverConfigurationException ex) { log.error(ex.getMessage(), ex); throw new AxisFault(ex.getMessage()); } } else { throw new AxisFault(eventReceiverName + " is already registered for this tenant"); } return true; } public boolean setStatisticsEnabled(String eventReceiverName, boolean flag) throws AxisFault { EventReceiverService eventReceiverService = EventReceiverAdminServiceValueHolder.getEventReceiverService(); AxisConfiguration axisConfiguration = getAxisConfig(); try { eventReceiverService.setStatisticsEnabled(eventReceiverName, flag); } catch (EventReceiverConfigurationException e) { log.error(e.getMessage(), e); throw new AxisFault(e.getMessage()); } return true; } public boolean setTracingEnabled(String eventReceiverName, boolean flag) throws AxisFault { EventReceiverService eventReceiverService = EventReceiverAdminServiceValueHolder.getEventReceiverService(); AxisConfiguration axisConfiguration = getAxisConfig(); try { eventReceiverService.setTraceEnabled(eventReceiverName, flag); } catch (EventReceiverConfigurationException e) { log.error(e.getMessage(), e); throw new AxisFault(e.getMessage()); } return true; } public InputAdapterConfigurationDto getInputAdapterConfigurationSchema(String adopterType) { InputEventAdapterService inputEventAdapterService = EventReceiverAdminServiceValueHolder.getInputEventAdapterService(); InputEventAdapterSchema inputEventAdapterSchema = inputEventAdapterService.getInputEventAdapterSchema(adopterType); InputAdapterConfigurationDto inputAdapterConfigurationDto = new InputAdapterConfigurationDto(); inputAdapterConfigurationDto.setInputEventAdapterProperties(getPropertyConfigurations(null, inputEventAdapterSchema.getPropertyList())); inputAdapterConfigurationDto.setEventAdapterType(adopterType); inputAdapterConfigurationDto.setSupportedMessageFormats( inputEventAdapterSchema.getSupportedMessageFormats(). toArray(new String[inputEventAdapterSchema.getSupportedMessageFormats().size()])); return inputAdapterConfigurationDto; } public String[] getAllInputAdapterTypes() { InputEventAdapterService inputEventAdapterService = EventReceiverAdminServiceValueHolder.getInputEventAdapterService(); List<String> inputEventAdapters = inputEventAdapterService.getInputEventAdapterTypes(); if (inputEventAdapters == null) { return new String[0]; } else { Collections.sort(inputEventAdapters); String[] types = new String[inputEventAdapters.size()]; return inputEventAdapters.toArray(types); } } private DetailInputAdapterPropertyDto[] getPropertyConfigurations(Map<String, String> messageProperties, List<Property> propertyList) { if (propertyList != null && propertyList.size() > 0) { DetailInputAdapterPropertyDto[] detailInputAdapterPropertyDtoArray = new DetailInputAdapterPropertyDto[propertyList.size()]; int index = 0; for (Property property : propertyList) { // create input event property String value = null; if (messageProperties != null) { value = messageProperties.get(property.getPropertyName()); } detailInputAdapterPropertyDtoArray[index] = new DetailInputAdapterPropertyDto(property.getPropertyName(), value); // set input event property parameters detailInputAdapterPropertyDtoArray[index].setSecured(property.isSecured()); detailInputAdapterPropertyDtoArray[index].setRequired(property.isRequired()); detailInputAdapterPropertyDtoArray[index].setDisplayName(property.getDisplayName()); detailInputAdapterPropertyDtoArray[index].setDefaultValue(property.getDefaultValue()); detailInputAdapterPropertyDtoArray[index].setHint(property.getHint()); detailInputAdapterPropertyDtoArray[index].setOptions(property.getOptions()); index++; } return detailInputAdapterPropertyDtoArray; } return new DetailInputAdapterPropertyDto[0]; } private boolean checkStreamAttributeValidity(List<String> inputEventAttributes, StreamDefinition streamDefinition) { if (streamDefinition != null) { List<String> inComingStreamAttributes = new ArrayList<String>(); final String PROPERTY_META_PREFIX = "meta_"; final String PROPERTY_CORRELATION_PREFIX = "correlation_"; List<Attribute> metaAttributeList = streamDefinition.getMetaData(); List<Attribute> correlationAttributeList = streamDefinition.getCorrelationData(); List<Attribute> payloadAttributeList = streamDefinition.getPayloadData(); if (metaAttributeList != null) { for (Attribute attribute : metaAttributeList) { inComingStreamAttributes.add(PROPERTY_META_PREFIX + attribute.getName()); } } if (correlationAttributeList != null) { for (Attribute attribute : correlationAttributeList) { inComingStreamAttributes.add(PROPERTY_CORRELATION_PREFIX + attribute.getName()); } } if (payloadAttributeList != null) { for (Attribute attribute : payloadAttributeList) { inComingStreamAttributes.add(attribute.getName()); } } if (inputEventAttributes.size() > 0) { if (inComingStreamAttributes.containsAll(inputEventAttributes)) { return true; } else { return false; } } return true; } else { return false; } } private String getStreamAttributes(StreamDefinition streamDefinition) { List<Attribute> metaAttributeList = streamDefinition.getMetaData(); List<Attribute> correlationAttributeList = streamDefinition.getCorrelationData(); List<Attribute> payloadAttributeList = streamDefinition.getPayloadData(); String attributes = ""; if (metaAttributeList != null) { for (Attribute attribute : metaAttributeList) { attributes += EventReceiverAdminConstants.PROPERTY_META_PREFIX + attribute.getName() + " " + attribute.getType().toString().toLowerCase() + ", \n"; } } if (correlationAttributeList != null) { for (Attribute attribute : correlationAttributeList) { attributes += EventReceiverAdminConstants.PROPERTY_CORRELATION_PREFIX + attribute.getName() + " " + attribute.getType().toString().toLowerCase() + ", \n"; } } if (payloadAttributeList != null) { for (Attribute attribute : payloadAttributeList) { attributes += attribute.getName() + " " + attribute.getType().toString().toLowerCase() + ", \n"; } } if (!attributes.equals("")) { return attributes.substring(0, attributes.lastIndexOf(",")); } else { return attributes; } } private boolean checkEventReceiverValidity(String eventReceiverName) throws AxisFault { EventReceiverService eventReceiverService = EventReceiverAdminServiceValueHolder.getEventReceiverService(); int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); List<EventReceiverConfiguration> eventReceiverConfigurationList = null; eventReceiverConfigurationList = eventReceiverService.getAllActiveEventReceiverConfigurations(); Iterator eventReceiverConfigurationIterator = eventReceiverConfigurationList.iterator(); while (eventReceiverConfigurationIterator.hasNext()) { EventReceiverConfiguration eventReceiverConfiguration = (EventReceiverConfiguration) eventReceiverConfigurationIterator.next(); if (eventReceiverConfiguration.getEventReceiverName().equalsIgnoreCase(eventReceiverName)) { return false; } } return true; } private String getPropertyAttributeDataType(String propertyName, StreamDefinition streamDefinition) throws AxisFault { if (propertyName != null) { List<Attribute> metaDataList = streamDefinition.getMetaData(); if (metaDataList != null) { for (Attribute attribute : metaDataList) { if (propertyName.equalsIgnoreCase(EventReceiverAdminConstants.PROPERTY_META_PREFIX + attribute.getName())) { return attribute.getType().toString().toLowerCase(); } } } List<Attribute> correlationDataList = streamDefinition.getCorrelationData(); if (correlationDataList != null) { for (Attribute attribute : correlationDataList) { if (propertyName.equalsIgnoreCase(EventReceiverAdminConstants.PROPERTY_CORRELATION_PREFIX + attribute.getName())) { return attribute.getType().toString().toLowerCase(); } } } List<Attribute> payloadDataList = streamDefinition.getPayloadData(); if (payloadDataList != null) { for (Attribute attribute : payloadDataList) { if (propertyName.equalsIgnoreCase(attribute.getName())) { return attribute.getType().toString().toLowerCase(); } } } } throw new AxisFault("Input Stream attributes are not matching with input stream definition"); } private void constructInputAdapterRelatedConfigs(String eventReceiverName, String eventAdapterType, BasicInputAdapterPropertyDto[] inputPropertyConfiguration, EventReceiverConfiguration eventReceiverConfiguration, String messageFormat) { InputEventAdapterConfiguration inputEventAdapterConfiguration = new InputEventAdapterConfiguration(); inputEventAdapterConfiguration.setName(eventReceiverName); inputEventAdapterConfiguration.setType(eventAdapterType); inputEventAdapterConfiguration.setMessageFormat(messageFormat); inputEventAdapterConfiguration.setProperties(new HashMap<String, String>()); // add input message property configuration to the map if (inputPropertyConfiguration != null && inputPropertyConfiguration.length != 0) { for (BasicInputAdapterPropertyDto eventReceiverProperty : inputPropertyConfiguration) { if (!eventReceiverProperty.getValue().trim().equals("")) { inputEventAdapterConfiguration.getProperties().put(eventReceiverProperty.getKey().trim(), eventReceiverProperty.getValue().trim()); } } } eventReceiverConfiguration.setFromAdapterConfiguration(inputEventAdapterConfiguration); } }
/******************************************************************************* * Copyright 2013-2015 Esri * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.esri.militaryapps.model; import com.esri.militaryapps.util.Utilities; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import org.xml.sax.helpers.DefaultHandler; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Calendar; import java.util.Collections; import java.util.List; import java.util.Timer; import java.util.TimerTask; import java.util.logging.Level; import java.util.logging.Logger; import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.SAXParserFactory; /** * A simulator that provides locations from a GPX file. The military-apps-library-java * library has a built-in GPX file of a simulated GPS feed in Jalalabad, Afghanistan, * that will be used if the constructor is called with no arguments. * * Note that if you subclass LocationSimulator, you need to override LocationController.createLocationSimulator() * so that your subclass, not this LocationSimulator class, will be instantiated. */ public class LocationSimulator extends LocationProvider { private static final Logger logger = Logger.getLogger(LocationSimulator.class.getName()); private class GPXHandler extends DefaultHandler { private List<Location> locations = new ArrayList<Location>(); private Double lat = null; private Double lon = null; private Calendar time = null; private double speed = 0; private boolean readingTrkpt = false; private boolean readingTime = false; private boolean readingSpeed = false; private StringBuilder charsBuffer = new StringBuilder(); @Override public void startDocument() throws SAXException { locations = new ArrayList<Location>(); } @Override public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException { if ("trkpt".equalsIgnoreCase(qName)) { readingTrkpt = true; String latString = attributes.getValue("lat"); String lonString = attributes.getValue("lon"); try { //Do these both in one try block. We could use two try blocks, //but one value is no good without the other, so don't bother. lat = Double.parseDouble(latString); lon = Double.parseDouble(lonString); } catch (Exception e) { //Do nothing } } else if (readingTrkpt && "time".equalsIgnoreCase(qName)) { readingTime = true; } else if (readingTrkpt && "speed".equalsIgnoreCase(qName)) { readingSpeed = true; } } @Override public void characters(char[] ch, int start, int length) throws SAXException { charsBuffer.append(ch, start, length); } @Override public void endElement(String uri, String localName, String qName) throws SAXException { if (readingTrkpt && "trkpt".equalsIgnoreCase(qName)) { readingTrkpt = false; Location location; if (0 == locations.size()) { location = new Location(lon, lat, time, speed, 0); } else { location = new Location(lon, lat, time, speed, locations.get(locations.size() - 1)); } locations.add(location); lat = null; lon = null; time = null; speed = 0; } else if (readingTime && "time".equalsIgnoreCase(qName)) { String dateTimeString = charsBuffer.toString().trim(); try { time = Utilities.parseXmlDateTime(dateTimeString); } catch (Exception e) { logger.log(Level.INFO, "Couldn''t parse datetime ''{0}''", dateTimeString); } readingTime = false; } else if (readingSpeed && "speed".equalsIgnoreCase(qName)) { try { speed = Double.parseDouble(charsBuffer.toString().trim()); } catch (NumberFormatException nfe) { //Do nothing } readingSpeed = false; } charsBuffer = new StringBuilder(); } @Override public void endDocument() throws SAXException { Collections.sort(locations); } } private final List<Location> locations; private final Object gpsPointsIndexLock = new Object(); private Timer timer = null; private TimerTask timerTask = null; private int gpsPointsIndex = 0; private double speedMultiplier = 1.0; private int timeout = 0; private LocationProviderState state = LocationProviderState.STOPPED; /** * Creates a new LocationSimulator based on a GPX file. * @param gpxFile the GPX file. * @throws ParserConfigurationException * @throws SAXException * @throws IOException */ public LocationSimulator(File gpxFile) throws ParserConfigurationException, SAXException, IOException { this(new FileInputStream(gpxFile)); } /** * Creates a new LocationSimulator based on an InputStream containing GPX-formatted data. * @param gpxInputStream the GPX input. * @throws ParserConfigurationException * @throws SAXException * @throws IOException */ public LocationSimulator(final InputStream gpxInputStream) throws ParserConfigurationException, SAXException, IOException { final GPXHandler handler = new GPXHandler(); locations = new ArrayList<Location>(); new Thread() { @Override public void run() { try { SAXParserFactory.newInstance().newSAXParser().parse(gpxInputStream, handler); synchronized (locations) { locations.addAll(handler.locations); } } catch (Exception e) { e.printStackTrace(); } } }.start(); } /** * Gets a track point. If isRelativeIndex is false, the index is used as is. * If isRelativeIndex is true, the index provided is relative to the current index. * For example, if there are * 10 points, numbered 0 through 9, and the simulator is currently on point 6, * then: * <ul> * <li>getTrackPoint(-2) returns point 4</li> * <li>getTrackPoint(-1) returns point 5</li> * <li>getTrackPoint(0) returns point 6</li> * <li>getTrackPoint(1) returns point 7</li> * <li>getTrackPoint(2) returns point 8</li> * <li>getTrackPoint(3) returns point 0</li> * </ul> * @param index * @param isRelativeIndex * @return */ private Location getTrackPoint(int index, boolean isRelativeIndex) { int locationsSize; synchronized (locations) { locationsSize = locations.size(); } if (0 == locationsSize) { return null; } else { synchronized (locations) { if (isRelativeIndex) { while (index < 0) { index += locations.size(); } synchronized (gpsPointsIndexLock) { index = (gpsPointsIndex + index) % locations.size(); } } return locations.get(index); } } } /** * Returns the speed multiplier. * @return the speed multiplier. */ public double getSpeedMultiplier() { return speedMultiplier; } /** * Sets a speed multiplier, which increases or decreases the speed of GPS updates * compared to the actual speed specified in the GPX file. * @param speedMultiplier the speed multiplier to set. */ public void setSpeedMultiplier(double speedMultiplier) { if (0 < speedMultiplier) { this.speedMultiplier = speedMultiplier; } } private long getNextDelay() { int locationsSize; synchronized (locations) { locationsSize = locations.size(); } if (1 >= locationsSize) { return 1000; } else { int currentIndex; synchronized (gpsPointsIndexLock) { currentIndex = gpsPointsIndex; } int previousIndex = currentIndex - 1; if (previousIndex < 0) { synchronized (locations) { previousIndex = locations.size() - 1; } } long theDelay = 0; Location currentLocation = getTrackPoint(currentIndex, true); Location previousLocation = getTrackPoint(previousIndex, true); if (null == currentLocation.getTimestamp() || null == previousLocation.getTimestamp()) { theDelay = 0; } else { theDelay = currentLocation.getTimestamp().getTimeInMillis() - previousLocation.getTimestamp().getTimeInMillis(); theDelay = (long) Math.round(((double) theDelay) / speedMultiplier); } if (0 >= theDelay) { theDelay = 1000; } return theDelay; } } /** * Starts the simulator. */ @Override public void start() { if (null != timer) { timer.cancel(); } Location currentTrackPoint = getTrackPoint(0, true); sendLocation(currentTrackPoint); synchronized (gpsPointsIndexLock) { gpsPointsIndex++; synchronized (locations) { if (0 < locations.size()) { gpsPointsIndex %= locations.size(); } else { gpsPointsIndex = 0; } } } timer = new Timer(true); timerTask = new TimerTask() { @Override public void run() { start(); } }; timer.schedule(timerTask, getNextDelay()); state = LocationProviderState.STARTED; } /** * Pauses the simulator. */ @Override public void pause() { if (null != timer) { timer.cancel(); } state = LocationProviderState.PAUSED; } /** * Stops the simulator but does not release resources. Resources will be released * when an instance of this object is garbage-collected. */ @Override public void stop() { pause(); synchronized (gpsPointsIndexLock) { gpsPointsIndex = 0; } state = LocationProviderState.STOPPED; } /** * Returns the provider's state. * @return the provider's state. */ @Override public LocationProviderState getState() { return state; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.operator.aggregation; import com.facebook.presto.array.BlockBigArray; import com.facebook.presto.array.BooleanBigArray; import com.facebook.presto.array.ByteBigArray; import com.facebook.presto.array.DoubleBigArray; import com.facebook.presto.array.LongBigArray; import com.facebook.presto.array.ReferenceCountMap; import com.facebook.presto.array.SliceBigArray; import com.facebook.presto.bytecode.DynamicClassLoader; import com.facebook.presto.operator.aggregation.state.LongState; import com.facebook.presto.operator.aggregation.state.NullableLongState; import com.facebook.presto.operator.aggregation.state.StateCompiler; import com.facebook.presto.operator.aggregation.state.VarianceState; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.block.BlockBuilder; import com.facebook.presto.spi.block.BlockBuilderStatus; import com.facebook.presto.spi.block.InterleavedBlockBuilder; import com.facebook.presto.spi.function.AccumulatorState; import com.facebook.presto.spi.function.AccumulatorStateFactory; import com.facebook.presto.spi.function.AccumulatorStateSerializer; import com.facebook.presto.spi.function.GroupedAccumulatorState; import com.facebook.presto.spi.type.ArrayType; import com.facebook.presto.spi.type.RowType; import com.facebook.presto.spi.type.Type; import com.facebook.presto.util.Reflection; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.airlift.slice.Slice; import org.openjdk.jol.info.ClassLayout; import org.testng.annotations.Test; import java.lang.invoke.MethodHandle; import java.lang.reflect.Field; import java.util.Map; import java.util.Optional; import static com.facebook.presto.block.BlockAssertions.createLongsBlock; import static com.facebook.presto.spi.type.BigintType.BIGINT; import static com.facebook.presto.spi.type.BooleanType.BOOLEAN; import static com.facebook.presto.spi.type.DoubleType.DOUBLE; import static com.facebook.presto.spi.type.TinyintType.TINYINT; import static com.facebook.presto.spi.type.VarbinaryType.VARBINARY; import static com.facebook.presto.spi.type.VarcharType.VARCHAR; import static com.facebook.presto.util.StructuralTestUtil.mapBlockOf; import static com.facebook.presto.util.StructuralTestUtil.mapType; import static io.airlift.slice.Slices.utf8Slice; import static io.airlift.slice.Slices.wrappedDoubleArray; import static org.testng.Assert.assertEquals; public class TestStateCompiler { private static final int SLICE_INSTANCE_SIZE = ClassLayout.parseClass(Slice.class).instanceSize(); @Test public void testPrimitiveNullableLongSerialization() { AccumulatorStateFactory<NullableLongState> factory = StateCompiler.generateStateFactory(NullableLongState.class); AccumulatorStateSerializer<NullableLongState> serializer = StateCompiler.generateStateSerializer(NullableLongState.class); NullableLongState state = factory.createSingleState(); NullableLongState deserializedState = factory.createSingleState(); state.setLong(2); state.setNull(false); BlockBuilder builder = BIGINT.createBlockBuilder(new BlockBuilderStatus(), 2); serializer.serialize(state, builder); state.setNull(true); serializer.serialize(state, builder); Block block = builder.build(); assertEquals(block.isNull(0), false); assertEquals(BIGINT.getLong(block, 0), state.getLong()); serializer.deserialize(block, 0, deserializedState); assertEquals(deserializedState.getLong(), state.getLong()); assertEquals(block.isNull(1), true); } @Test public void testPrimitiveLongSerialization() { AccumulatorStateFactory<LongState> factory = StateCompiler.generateStateFactory(LongState.class); AccumulatorStateSerializer<LongState> serializer = StateCompiler.generateStateSerializer(LongState.class); LongState state = factory.createSingleState(); LongState deserializedState = factory.createSingleState(); state.setLong(2); BlockBuilder builder = BIGINT.createBlockBuilder(new BlockBuilderStatus(), 1); serializer.serialize(state, builder); Block block = builder.build(); assertEquals(BIGINT.getLong(block, 0), state.getLong()); serializer.deserialize(block, 0, deserializedState); assertEquals(deserializedState.getLong(), state.getLong()); } @Test public void testGetSerializedType() { AccumulatorStateSerializer<LongState> serializer = StateCompiler.generateStateSerializer(LongState.class); assertEquals(serializer.getSerializedType(), BIGINT); } @Test public void testPrimitiveBooleanSerialization() { AccumulatorStateFactory<BooleanState> factory = StateCompiler.generateStateFactory(BooleanState.class); AccumulatorStateSerializer<BooleanState> serializer = StateCompiler.generateStateSerializer(BooleanState.class); BooleanState state = factory.createSingleState(); BooleanState deserializedState = factory.createSingleState(); state.setBoolean(true); BlockBuilder builder = BOOLEAN.createBlockBuilder(new BlockBuilderStatus(), 1); serializer.serialize(state, builder); Block block = builder.build(); serializer.deserialize(block, 0, deserializedState); assertEquals(deserializedState.isBoolean(), state.isBoolean()); } @Test public void testPrimitiveByteSerialization() { AccumulatorStateFactory<ByteState> factory = StateCompiler.generateStateFactory(ByteState.class); AccumulatorStateSerializer<ByteState> serializer = StateCompiler.generateStateSerializer(ByteState.class); ByteState state = factory.createSingleState(); ByteState deserializedState = factory.createSingleState(); state.setByte((byte) 3); BlockBuilder builder = TINYINT.createBlockBuilder(new BlockBuilderStatus(), 1); serializer.serialize(state, builder); Block block = builder.build(); serializer.deserialize(block, 0, deserializedState); assertEquals(deserializedState.getByte(), state.getByte()); } @Test public void testNonPrimitiveSerialization() { AccumulatorStateFactory<SliceState> factory = StateCompiler.generateStateFactory(SliceState.class); AccumulatorStateSerializer<SliceState> serializer = StateCompiler.generateStateSerializer(SliceState.class); SliceState state = factory.createSingleState(); SliceState deserializedState = factory.createSingleState(); state.setSlice(null); BlockBuilder nullBlockBuilder = VARCHAR.createBlockBuilder(new BlockBuilderStatus(), 1); serializer.serialize(state, nullBlockBuilder); Block nullBlock = nullBlockBuilder.build(); serializer.deserialize(nullBlock, 0, deserializedState); assertEquals(deserializedState.getSlice(), state.getSlice()); state.setSlice(utf8Slice("test")); BlockBuilder builder = VARCHAR.createBlockBuilder(new BlockBuilderStatus(), 1); serializer.serialize(state, builder); Block block = builder.build(); serializer.deserialize(block, 0, deserializedState); assertEquals(deserializedState.getSlice(), state.getSlice()); } @Test public void testVarianceStateSerialization() { AccumulatorStateFactory<VarianceState> factory = StateCompiler.generateStateFactory(VarianceState.class); AccumulatorStateSerializer<VarianceState> serializer = StateCompiler.generateStateSerializer(VarianceState.class); VarianceState singleState = factory.createSingleState(); VarianceState deserializedState = factory.createSingleState(); singleState.setMean(1); singleState.setCount(2); singleState.setM2(3); BlockBuilder builder = new RowType(ImmutableList.of(BIGINT, DOUBLE, DOUBLE), Optional.empty()).createBlockBuilder(new BlockBuilderStatus(), 1); serializer.serialize(singleState, builder); Block block = builder.build(); serializer.deserialize(block, 0, deserializedState); assertEquals(deserializedState.getCount(), singleState.getCount()); assertEquals(deserializedState.getMean(), singleState.getMean()); assertEquals(deserializedState.getM2(), singleState.getM2()); } @Test public void testComplexSerialization() { Type arrayType = new ArrayType(BIGINT); Type mapType = mapType(BIGINT, VARCHAR); Map<String, Type> fieldMap = ImmutableMap.of("Block", arrayType, "AnotherBlock", mapType); AccumulatorStateFactory<TestComplexState> factory = StateCompiler.generateStateFactory(TestComplexState.class, fieldMap, new DynamicClassLoader(TestComplexState.class.getClassLoader())); AccumulatorStateSerializer<TestComplexState> serializer = StateCompiler.generateStateSerializer(TestComplexState.class, fieldMap, new DynamicClassLoader(TestComplexState.class.getClassLoader())); TestComplexState singleState = factory.createSingleState(); TestComplexState deserializedState = factory.createSingleState(); singleState.setBoolean(true); singleState.setLong(1); singleState.setDouble(2.0); singleState.setByte((byte) 3); singleState.setSlice(utf8Slice("test")); singleState.setAnotherSlice(wrappedDoubleArray(1.0, 2.0, 3.0)); singleState.setYetAnotherSlice(null); Block array = createLongsBlock(45); singleState.setBlock(array); singleState.setAnotherBlock(mapBlockOf(BIGINT, VARCHAR, ImmutableMap.of(123L, "testBlock"))); BlockBuilder builder = new RowType(ImmutableList.of(BOOLEAN, TINYINT, DOUBLE, BIGINT, mapType, VARBINARY, arrayType, VARBINARY, VARBINARY), Optional.empty()) .createBlockBuilder(new BlockBuilderStatus(), 1); serializer.serialize(singleState, builder); Block block = builder.build(); serializer.deserialize(block, 0, deserializedState); assertEquals(deserializedState.getBoolean(), singleState.getBoolean()); assertEquals(deserializedState.getLong(), singleState.getLong()); assertEquals(deserializedState.getDouble(), singleState.getDouble()); assertEquals(deserializedState.getByte(), singleState.getByte()); assertEquals(deserializedState.getSlice(), singleState.getSlice()); assertEquals(deserializedState.getAnotherSlice(), singleState.getAnotherSlice()); assertEquals(deserializedState.getYetAnotherSlice(), singleState.getYetAnotherSlice()); assertEquals(deserializedState.getBlock().getLong(0, 0), singleState.getBlock().getLong(0, 0)); assertEquals(deserializedState.getAnotherBlock().getLong(0, 0), singleState.getAnotherBlock().getLong(0, 0)); assertEquals(deserializedState.getAnotherBlock().getSlice(1, 0, 9), singleState.getAnotherBlock().getSlice(1, 0, 9)); } //see SliceBigArray::getSize private long getSize(Slice slice) { return slice.length() + SLICE_INSTANCE_SIZE; } private long getComplexStateRetainedSize(TestComplexState state) { long retainedSize = ClassLayout.parseClass(state.getClass()).instanceSize(); // reflection is necessary because TestComplexState implementation is generated Field[] fields = state.getClass().getDeclaredFields(); try { for (Field field : fields) { Class type = field.getType(); field.setAccessible(true); if (type == BlockBigArray.class || type == BooleanBigArray.class || type == SliceBigArray.class || type == ByteBigArray.class || type == DoubleBigArray.class || type == LongBigArray.class) { MethodHandle sizeOf = Reflection.methodHandle(type, "sizeOf", null); retainedSize += (long) sizeOf.invokeWithArguments(field.get(state)); } } } catch (Throwable t) { throw new RuntimeException(t); } return retainedSize; } private static long getBlockBigArrayReferenceCountMapOverhead(TestComplexState state) { long overhead = 0; // reflection is necessary because TestComplexState implementation is generated Field[] stateFields = state.getClass().getDeclaredFields(); try { for (Field stateField : stateFields) { if (stateField.getType() != BlockBigArray.class) { continue; } stateField.setAccessible(true); Field[] blockBigArrayFields = stateField.getType().getDeclaredFields(); for (Field blockBigArrayField : blockBigArrayFields) { if (blockBigArrayField.getType() != ReferenceCountMap.class) { continue; } blockBigArrayField.setAccessible(true); MethodHandle sizeOf = Reflection.methodHandle(blockBigArrayField.getType(), "sizeOf", null); overhead += (long) sizeOf.invokeWithArguments(blockBigArrayField.get(stateField.get(state))); } } } catch (Throwable t) { throw new RuntimeException(t); } return overhead; } @Test public void testComplexStateEstimatedSize() { Map<String, Type> fieldMap = ImmutableMap.of("Block", new ArrayType(BIGINT), "AnotherBlock", mapType(BIGINT, VARCHAR)); AccumulatorStateFactory<TestComplexState> factory = StateCompiler.generateStateFactory(TestComplexState.class, fieldMap, new DynamicClassLoader(TestComplexState.class.getClassLoader())); TestComplexState groupedState = factory.createGroupedState(); long initialRetainedSize = getComplexStateRetainedSize(groupedState); assertEquals(groupedState.getEstimatedSize(), initialRetainedSize); // BlockBigArray has an internal map that can grow in size when getting more blocks // need to handle the map overhead separately initialRetainedSize -= getBlockBigArrayReferenceCountMapOverhead(groupedState); for (int i = 0; i < 1000; i++) { long retainedSize = 0; ((GroupedAccumulatorState) groupedState).setGroupId(i); groupedState.setBoolean(true); groupedState.setLong(1); groupedState.setDouble(2.0); groupedState.setByte((byte) 3); Slice slice = utf8Slice("test"); retainedSize += getSize(slice); groupedState.setSlice(slice); slice = wrappedDoubleArray(1.0, 2.0, 3.0); retainedSize += getSize(slice); groupedState.setAnotherSlice(slice); groupedState.setYetAnotherSlice(null); Block array = createLongsBlock(45); retainedSize += array.getRetainedSizeInBytes(); groupedState.setBlock(array); BlockBuilder mapBlockBuilder = new InterleavedBlockBuilder(ImmutableList.of(BIGINT, VARCHAR), new BlockBuilderStatus(), 1); BIGINT.writeLong(mapBlockBuilder, 123L); VARCHAR.writeSlice(mapBlockBuilder, utf8Slice("testBlock")); Block map = mapBlockBuilder.build(); retainedSize += map.getRetainedSizeInBytes(); groupedState.setAnotherBlock(map); assertEquals(groupedState.getEstimatedSize(), initialRetainedSize + retainedSize * (i + 1) + getBlockBigArrayReferenceCountMapOverhead(groupedState)); } for (int i = 0; i < 1000; i++) { long retainedSize = 0; ((GroupedAccumulatorState) groupedState).setGroupId(i); groupedState.setBoolean(true); groupedState.setLong(1); groupedState.setDouble(2.0); groupedState.setByte((byte) 3); Slice slice = utf8Slice("test"); retainedSize += getSize(slice); groupedState.setSlice(slice); slice = wrappedDoubleArray(1.0, 2.0, 3.0); retainedSize += getSize(slice); groupedState.setAnotherSlice(slice); groupedState.setYetAnotherSlice(null); Block array = createLongsBlock(45); retainedSize += array.getRetainedSizeInBytes(); groupedState.setBlock(array); BlockBuilder mapBlockBuilder = new InterleavedBlockBuilder(ImmutableList.of(BIGINT, VARCHAR), new BlockBuilderStatus(), 1); BIGINT.writeLong(mapBlockBuilder, 123L); VARCHAR.writeSlice(mapBlockBuilder, utf8Slice("testBlock")); Block map = mapBlockBuilder.build(); retainedSize += map.getRetainedSizeInBytes(); groupedState.setAnotherBlock(map); assertEquals(groupedState.getEstimatedSize(), initialRetainedSize + retainedSize * 1000 + getBlockBigArrayReferenceCountMapOverhead(groupedState)); } } public interface TestComplexState extends AccumulatorState { double getDouble(); void setDouble(double value); boolean getBoolean(); void setBoolean(boolean value); long getLong(); void setLong(long value); byte getByte(); void setByte(byte value); Slice getSlice(); void setSlice(Slice slice); Slice getAnotherSlice(); void setAnotherSlice(Slice slice); Slice getYetAnotherSlice(); void setYetAnotherSlice(Slice slice); Block getBlock(); void setBlock(Block block); Block getAnotherBlock(); void setAnotherBlock(Block block); } public interface BooleanState extends AccumulatorState { boolean isBoolean(); void setBoolean(boolean value); } public interface ByteState extends AccumulatorState { byte getByte(); void setByte(byte value); } public interface SliceState extends AccumulatorState { Slice getSlice(); void setSlice(Slice slice); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gemstone.gemfire.internal.cache; // DO NOT modify this class. It was generated from LeafRegionEntry.cpp import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import java.util.concurrent.atomic.AtomicLongFieldUpdater; import com.gemstone.gemfire.cache.EntryEvent; import com.gemstone.gemfire.internal.cache.lru.EnableLRU; import com.gemstone.gemfire.internal.cache.persistence.DiskRecoveryStore; import com.gemstone.gemfire.internal.cache.lru.LRUClockNode; import com.gemstone.gemfire.internal.cache.lru.NewLRUClockHand; import com.gemstone.gemfire.distributed.internal.membership.InternalDistributedMember; import com.gemstone.gemfire.internal.cache.versions.VersionSource; import com.gemstone.gemfire.internal.cache.versions.VersionStamp; import com.gemstone.gemfire.internal.cache.versions.VersionTag; import com.gemstone.gemfire.internal.offheap.OffHeapRegionEntryHelper; import com.gemstone.gemfire.internal.offheap.annotations.Released; import com.gemstone.gemfire.internal.offheap.annotations.Retained; import com.gemstone.gemfire.internal.offheap.annotations.Unretained; import com.gemstone.gemfire.internal.util.concurrent.CustomEntryConcurrentHashMap.HashEntry; // macros whose definition changes this class: // disk: DISK // lru: LRU // stats: STATS // versioned: VERSIONED // offheap: OFFHEAP // One of the following key macros must be defined: // key object: KEY_OBJECT // key int: KEY_INT // key long: KEY_LONG // key uuid: KEY_UUID // key string1: KEY_STRING1 // key string2: KEY_STRING2 /** * Do not modify this class. It was generated. * Instead modify LeafRegionEntry.cpp and then run * bin/generateRegionEntryClasses.sh from the directory * that contains your build.xml. */ public class VersionedThinDiskLRURegionEntryOffHeapStringKey1 extends VersionedThinDiskLRURegionEntryOffHeap { public VersionedThinDiskLRURegionEntryOffHeapStringKey1 (RegionEntryContext context, String key, @Retained Object value , boolean byteEncode ) { super(context, (value instanceof RecoveredEntry ? null : value) ); // DO NOT modify this class. It was generated from LeafRegionEntry.cpp initialize(context, value); // caller has already confirmed that key.length <= MAX_INLINE_STRING_KEY long tmpBits1 = 0L; if (byteEncode) { for (int i=key.length()-1; i >= 0; i--) { // Note: we know each byte is <= 0x7f so the "& 0xff" is not needed. But I added it in to keep findbugs happy. tmpBits1 |= (byte)key.charAt(i) & 0xff; tmpBits1 <<= 8; } tmpBits1 |= 1<<6; } else { for (int i=key.length()-1; i >= 0; i--) { tmpBits1 |= key.charAt(i); tmpBits1 <<= 16; } } tmpBits1 |= key.length(); this.bits1 = tmpBits1; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp // common code protected int hash; private HashEntry<Object, Object> next; @SuppressWarnings("unused") private volatile long lastModified; private static final AtomicLongFieldUpdater<VersionedThinDiskLRURegionEntryOffHeapStringKey1> lastModifiedUpdater = AtomicLongFieldUpdater.newUpdater(VersionedThinDiskLRURegionEntryOffHeapStringKey1.class, "lastModified"); /** * All access done using ohAddrUpdater so it is used even though the compiler can not tell it is. */ @SuppressWarnings("unused") @Retained @Released private volatile long ohAddress; /** * I needed to add this because I wanted clear to call setValue which normally can only be called while the re is synced. * But if I sync in that code it causes a lock ordering deadlock with the disk regions because they also get a rw lock in clear. * Some hardware platforms do not support CAS on a long. If gemfire is run on one of those the AtomicLongFieldUpdater does a sync * on the re and we will once again be deadlocked. * I don't know if we support any of the hardware platforms that do not have a 64bit CAS. If we do then we can expect deadlocks * on disk regions. */ private final static AtomicLongFieldUpdater<VersionedThinDiskLRURegionEntryOffHeapStringKey1> ohAddrUpdater = AtomicLongFieldUpdater.newUpdater(VersionedThinDiskLRURegionEntryOffHeapStringKey1.class, "ohAddress"); @Override public Token getValueAsToken() { return OffHeapRegionEntryHelper.getValueAsToken(this); } @Override protected Object getValueField() { return OffHeapRegionEntryHelper._getValue(this); } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp @Override @Unretained protected void setValueField(@Unretained Object v) { OffHeapRegionEntryHelper.setValue(this, v); } @Override @Retained public Object _getValueRetain(RegionEntryContext context, boolean decompress) { return OffHeapRegionEntryHelper._getValueRetain(this, decompress, context); } @Override public long getAddress() { return ohAddrUpdater.get(this); } @Override public boolean setAddress(long expectedAddr, long newAddr) { return ohAddrUpdater.compareAndSet(this, expectedAddr, newAddr); } @Override @Released public void release() { OffHeapRegionEntryHelper.releaseEntry(this); } @Override public void returnToPool() { // Deadcoded for now; never was working // if (this instanceof VMThinRegionEntryLongKey) { // factory.returnToPool((VMThinRegionEntryLongKey)this); // } } protected long getlastModifiedField() { return lastModifiedUpdater.get(this); } protected boolean compareAndSetLastModifiedField(long expectedValue, long newValue) { return lastModifiedUpdater.compareAndSet(this, expectedValue, newValue); } /** * @see HashEntry#getEntryHash() */ public final int getEntryHash() { return this.hash; } protected void setEntryHash(int v) { this.hash = v; } /** * @see HashEntry#getNextEntry() */ public final HashEntry<Object, Object> getNextEntry() { return this.next; } /** * @see HashEntry#setNextEntry */ public final void setNextEntry(final HashEntry<Object, Object> n) { this.next = n; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp // disk code protected void initialize(RegionEntryContext drs, Object value) { boolean isBackup; if (drs instanceof LocalRegion) { isBackup = ((LocalRegion)drs).getDiskRegion().isBackup(); } else if (drs instanceof PlaceHolderDiskRegion) { isBackup = true; } else { throw new IllegalArgumentException("expected a LocalRegion or PlaceHolderDiskRegion"); } // Delay the initialization of DiskID if overflow only if (isBackup) { diskInitialize(drs, value); } } @Override public final synchronized int updateAsyncEntrySize(EnableLRU capacityController) { int oldSize = getEntrySize(); int newSize = capacityController.entrySize( getKeyForSizing(), null); setEntrySize(newSize); int delta = newSize - oldSize; return delta; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp private void diskInitialize(RegionEntryContext context, Object value) { DiskRecoveryStore drs = (DiskRecoveryStore)context; DiskStoreImpl ds = drs.getDiskStore(); long maxOplogSize = ds.getMaxOplogSize(); //get appropriate instance of DiskId implementation based on maxOplogSize this.id = DiskId.createDiskId(maxOplogSize, true/* is persistence */, ds.needsLinkedList()); Helper.initialize(this, drs, value); } /** * DiskId * * @since 5.1 */ protected DiskId id;//= new DiskId(); public DiskId getDiskId() { return this.id; } @Override void setDiskId(RegionEntry old) { this.id = ((AbstractDiskRegionEntry)old).getDiskId(); } // // inlining DiskId // // always have these fields // /** // * id consists of // * most significant // * 1 byte = users bits // * 2-8 bytes = oplog id // * least significant. // * // * The highest bit in the oplog id part is set to 1 if the oplog id // * is negative. // * @todo this field could be an int for an overflow only region // */ // private long id; // /** // * Length of the bytes on disk. // * This is always set. If the value is invalid then it will be set to 0. // * The most significant bit is used by overflow to mark it as needing to be written. // */ // protected int valueLength = 0; // // have intOffset or longOffset // // intOffset // /** // * The position in the oplog (the oplog offset) where this entry's value is // * stored // */ // private volatile int offsetInOplog; // // longOffset // /** // * The position in the oplog (the oplog offset) where this entry's value is // * stored // */ // private volatile long offsetInOplog; // // have overflowOnly or persistence // // overflowOnly // // no fields // // persistent // /** unique entry identifier * */ // private long keyId; // DO NOT modify this class. It was generated from LeafRegionEntry.cpp // lru code @Override public void setDelayedDiskId(LocalRegion r) { DiskStoreImpl ds = r.getDiskStore(); long maxOplogSize = ds.getMaxOplogSize(); this.id = DiskId.createDiskId(maxOplogSize, false /* over flow only */, ds.needsLinkedList()); } public final synchronized int updateEntrySize(EnableLRU capacityController) { return updateEntrySize(capacityController, _getValue()); // OFHEAP: _getValue ok w/o incing refcount because we are synced and only getting the size } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp public final synchronized int updateEntrySize(EnableLRU capacityController, Object value) { int oldSize = getEntrySize(); int newSize = capacityController.entrySize( getKeyForSizing(), value); setEntrySize(newSize); int delta = newSize - oldSize; // if ( debug ) log( "updateEntrySize key=" + getKey() // + (_getValue() == Token.INVALID ? " invalid" : // (_getValue() == Token.LOCAL_INVALID ? "local_invalid" : // (_getValue()==null ? " evicted" : " valid"))) // + " oldSize=" + oldSize // + " newSize=" + this.size ); return delta; } public final boolean testRecentlyUsed() { return areAnyBitsSet(RECENTLY_USED); } @Override public final void setRecentlyUsed() { setBits(RECENTLY_USED); } public final void unsetRecentlyUsed() { clearBits(~RECENTLY_USED); } public final boolean testEvicted() { return areAnyBitsSet(EVICTED); } public final void setEvicted() { setBits(EVICTED); } public final void unsetEvicted() { clearBits(~EVICTED); } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp private LRUClockNode nextLRU; private LRUClockNode prevLRU; private int size; public final void setNextLRUNode( LRUClockNode next ) { this.nextLRU = next; } public final LRUClockNode nextLRUNode() { return this.nextLRU; } public final void setPrevLRUNode( LRUClockNode prev ) { this.prevLRU = prev; } public final LRUClockNode prevLRUNode() { return this.prevLRU; } public final int getEntrySize() { return this.size; } protected final void setEntrySize(int size) { this.size = size; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp //@Override //public StringBuilder appendFieldsToString(final StringBuilder sb) { // StringBuilder result = super.appendFieldsToString(sb); // result.append("; prev=").append(this.prevLRU==null?"null":"not null"); // result.append("; next=").append(this.nextLRU==null?"null":"not null"); // return result; //} @Override public Object getKeyForSizing() { // inline keys always report null for sizing since the size comes from the entry size return null; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp // versioned code private VersionSource memberID; private short entryVersionLowBytes; private short regionVersionHighBytes; private int regionVersionLowBytes; private byte entryVersionHighByte; private byte distributedSystemId; public int getEntryVersion() { return ((entryVersionHighByte << 16) & 0xFF0000) | (entryVersionLowBytes & 0xFFFF); } public long getRegionVersion() { return (((long)regionVersionHighBytes) << 32) | (regionVersionLowBytes & 0x00000000FFFFFFFFL); } public long getVersionTimeStamp() { return getLastModified(); } public void setVersionTimeStamp(long time) { setLastModified(time); } public VersionSource getMemberID() { return this.memberID; } public int getDistributedSystemId() { return this.distributedSystemId; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp public void setVersions(VersionTag tag) { this.memberID = tag.getMemberID(); int eVersion = tag.getEntryVersion(); this.entryVersionLowBytes = (short)(eVersion & 0xffff); this.entryVersionHighByte = (byte)((eVersion & 0xff0000) >> 16); this.regionVersionHighBytes = tag.getRegionVersionHighBytes(); this.regionVersionLowBytes = tag.getRegionVersionLowBytes(); if (!(tag.isGatewayTag()) && this.distributedSystemId == tag.getDistributedSystemId()) { if (getVersionTimeStamp() <= tag.getVersionTimeStamp()) { setVersionTimeStamp(tag.getVersionTimeStamp()); } else { tag.setVersionTimeStamp(getVersionTimeStamp()); } } else { setVersionTimeStamp(tag.getVersionTimeStamp()); } this.distributedSystemId = (byte)(tag.getDistributedSystemId() & 0xff); } public void setMemberID(VersionSource memberID) { this.memberID = memberID; } @Override public VersionStamp getVersionStamp() { return this; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp public VersionTag asVersionTag() { VersionTag tag = VersionTag.create(memberID); tag.setEntryVersion(getEntryVersion()); tag.setRegionVersion(this.regionVersionHighBytes, this.regionVersionLowBytes); tag.setVersionTimeStamp(getVersionTimeStamp()); tag.setDistributedSystemId(this.distributedSystemId); return tag; } public void processVersionTag(LocalRegion r, VersionTag tag, boolean isTombstoneFromGII, boolean hasDelta, VersionSource thisVM, InternalDistributedMember sender, boolean checkForConflicts) { basicProcessVersionTag(r, tag, isTombstoneFromGII, hasDelta, thisVM, sender, checkForConflicts); } @Override public void processVersionTag(EntryEvent cacheEvent) { // this keeps Eclipse happy. without it the sender chain becomes confused // while browsing this code super.processVersionTag(cacheEvent); } /** get rvv internal high byte. Used by region entries for transferring to storage */ public short getRegionVersionHighBytes() { return this.regionVersionHighBytes; } /** get rvv internal low bytes. Used by region entries for transferring to storage */ public int getRegionVersionLowBytes() { return this.regionVersionLowBytes; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp // key code private final long bits1; private int getKeyLength() { return (int) (this.bits1 & 0x003fL); } private int getEncoding() { // 0 means encoded as char // 1 means encoded as bytes that are all <= 0x7f; return (int) (this.bits1 >> 6) & 0x03; } @Override public final Object getKey() { int keylen = getKeyLength(); char[] chars = new char[keylen]; long tmpBits1 = this.bits1; if (getEncoding() == 1) { for (int i=0; i < keylen; i++) { tmpBits1 >>= 8; chars[i] = (char) (tmpBits1 & 0x00ff); } } else { for (int i=0; i < keylen; i++) { tmpBits1 >>= 16; chars[i] = (char) (tmpBits1 & 0x00FFff); } } return new String(chars); } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp @Override public boolean isKeyEqual(Object k) { if (k instanceof String) { String str = (String)k; int keylen = getKeyLength(); if (str.length() == keylen) { long tmpBits1 = this.bits1; if (getEncoding() == 1) { for (int i=0; i < keylen; i++) { tmpBits1 >>= 8; char c = (char) (tmpBits1 & 0x00ff); if (str.charAt(i) != c) { return false; } } } else { for (int i=0; i < keylen; i++) { tmpBits1 >>= 16; char c = (char) (tmpBits1 & 0x00FFff); if (str.charAt(i) != c) { return false; } } } return true; } } return false; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp }
package org.apache.solr.update; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.lucene.index.LogDocMergePolicy; import org.apache.solr.BaseDistributedSearchTestCase; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.client.solrj.request.LukeRequest; import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.cloud.ZkCoreNodeProps; import org.apache.solr.common.cloud.ZkNodeProps; import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.UpdateParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.core.ConfigSolr; import org.apache.solr.core.CoreContainer; import org.apache.solr.core.CoresLocator; import org.apache.solr.core.PluginInfo; import org.apache.solr.core.SolrCore; import org.apache.solr.core.SolrEventListener; import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.servlet.SolrDispatchFilter; import org.apache.solr.update.MockStreamingSolrClients.Exp; import org.apache.solr.update.SolrCmdDistributor.Error; import org.apache.solr.update.SolrCmdDistributor.Node; import org.apache.solr.update.SolrCmdDistributor.RetryNode; import org.apache.solr.update.SolrCmdDistributor.StdNode; import org.apache.solr.update.processor.DistributedUpdateProcessor; import org.junit.BeforeClass; import org.xml.sax.SAXException; import javax.xml.parsers.ParserConfigurationException; import java.io.File; import java.io.IOException; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; public class SolrCmdDistributorTest extends BaseDistributedSearchTestCase { private AtomicInteger id = new AtomicInteger(); @BeforeClass public static void beforeClass() throws Exception { // we can't use the Randomized merge policy because the test depends on // being able to call optimize to have all deletes expunged. System.setProperty("solr.tests.mergePolicy", LogDocMergePolicy.class.getName()); } private UpdateShardHandler updateShardHandler; public SolrCmdDistributorTest() throws ParserConfigurationException, IOException, SAXException { updateShardHandler = new UpdateShardHandler(new ConfigSolr(null, null) { @Override public CoresLocator getCoresLocator() { return null; } @Override public PluginInfo getShardHandlerFactoryPluginInfo() { return null; } @Override protected String getProperty(CfgProp key) { return null; } }); fixShardCount = true; shardCount = 4; stress = 0; } public static String getSchemaFile() { return "schema.xml"; } public static String getSolrConfigFile() { // use this because it has /update and is minimal return "solrconfig-tlog.xml"; } // TODO: for now we redefine this method so that it pulls from the above // we don't get helpful override behavior due to the method being static @Override protected void createServers(int numShards) throws Exception { System.setProperty("configSetBaseDir", TEST_HOME()); System.setProperty("coreRootDirectory", testDir.toPath().resolve("control").toString()); writeCoreProperties(testDir.toPath().resolve("control/cores"), DEFAULT_TEST_CORENAME); controlJetty = createJetty(new File(getSolrHome()), testDir + "/control/data", null, getSolrConfigFile(), getSchemaFile()); controlClient = createNewSolrClient(controlJetty.getLocalPort()); shardsArr = new String[numShards]; StringBuilder sb = new StringBuilder(); for (int i = 0; i < numShards; i++) { if (sb.length() > 0) sb.append(','); String shardname = "shard" + i; Path coresPath = testDir.toPath().resolve(shardname).resolve("cores"); writeCoreProperties(coresPath, DEFAULT_TEST_CORENAME); System.setProperty("coreRootDirectory", testDir.toPath().resolve(shardname).toString()); JettySolrRunner j = createJetty(new File(getSolrHome()), testDir + "/shard" + i + "/data", null, getSolrConfigFile(), getSchemaFile()); jettys.add(j); clients.add(createNewSolrClient(j.getLocalPort())); String shardStr = buildUrl(j.getLocalPort()); shardsArr[i] = shardStr; sb.append(shardStr); } shards = sb.toString(); } @Override public void doTest() throws Exception { del("*:*"); SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler); ModifiableSolrParams params = new ModifiableSolrParams(); List<Node> nodes = new ArrayList<>(); ZkNodeProps nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, ((HttpSolrClient) controlClient).getBaseURL(), ZkStateReader.CORE_NAME_PROP, ""); nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps))); // add one doc to controlClient AddUpdateCommand cmd = new AddUpdateCommand(null); cmd.solrDoc = sdoc("id", id.incrementAndGet()); params = new ModifiableSolrParams(); cmdDistrib.distribAdd(cmd, nodes, params); CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false); params = new ModifiableSolrParams(); params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true); cmdDistrib.distribCommit(ccmd, nodes, params); cmdDistrib.finish(); List<Error> errors = cmdDistrib.getErrors(); assertEquals(errors.toString(), 0, errors.size()); long numFound = controlClient.query(new SolrQuery("*:*")).getResults() .getNumFound(); assertEquals(1, numFound); HttpSolrClient client = (HttpSolrClient) clients.get(0); nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, client.getBaseURL(), ZkStateReader.CORE_NAME_PROP, ""); nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps))); // add another 2 docs to control and 3 to client cmdDistrib = new SolrCmdDistributor(updateShardHandler); cmd.solrDoc = sdoc("id", id.incrementAndGet()); params = new ModifiableSolrParams(); params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true); cmdDistrib.distribAdd(cmd, nodes, params); int id2 = id.incrementAndGet(); AddUpdateCommand cmd2 = new AddUpdateCommand(null); cmd2.solrDoc = sdoc("id", id2); params = new ModifiableSolrParams(); params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true); cmdDistrib.distribAdd(cmd2, nodes, params); AddUpdateCommand cmd3 = new AddUpdateCommand(null); cmd3.solrDoc = sdoc("id", id.incrementAndGet()); params = new ModifiableSolrParams(); params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true); cmdDistrib.distribAdd(cmd3, Collections.singletonList(nodes.get(1)), params); params = new ModifiableSolrParams(); params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true); cmdDistrib.distribCommit(ccmd, nodes, params); cmdDistrib.finish(); errors = cmdDistrib.getErrors(); assertEquals(errors.toString(), 0, errors.size()); SolrDocumentList results = controlClient.query(new SolrQuery("*:*")).getResults(); numFound = results.getNumFound(); assertEquals(results.toString(), 3, numFound); numFound = client.query(new SolrQuery("*:*")).getResults() .getNumFound(); assertEquals(3, numFound); // now delete doc 2 which is on both control and client1 DeleteUpdateCommand dcmd = new DeleteUpdateCommand(null); dcmd.id = Integer.toString(id2); cmdDistrib = new SolrCmdDistributor(updateShardHandler); params = new ModifiableSolrParams(); params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true); cmdDistrib.distribDelete(dcmd, nodes, params); params = new ModifiableSolrParams(); params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true); cmdDistrib.distribCommit(ccmd, nodes, params); cmdDistrib.finish(); errors = cmdDistrib.getErrors(); assertEquals(errors.toString(), 0, errors.size()); results = controlClient.query(new SolrQuery("*:*")).getResults(); numFound = results.getNumFound(); assertEquals(results.toString(), 2, numFound); numFound = client.query(new SolrQuery("*:*")).getResults() .getNumFound(); assertEquals(results.toString(), 2, numFound); for (SolrClient c : clients) { c.optimize(); //System.out.println(clients.get(0).request(new LukeRequest())); } cmdDistrib = new SolrCmdDistributor(updateShardHandler); int cnt = atLeast(303); for (int i = 0; i < cnt; i++) { nodes.clear(); for (SolrClient c : clients) { if (random().nextBoolean()) { continue; } HttpSolrClient httpClient = (HttpSolrClient) c; nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, httpClient.getBaseURL(), ZkStateReader.CORE_NAME_PROP, ""); nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps))); } AddUpdateCommand c = new AddUpdateCommand(null); c.solrDoc = sdoc("id", id.incrementAndGet()); if (nodes.size() > 0) { params = new ModifiableSolrParams(); cmdDistrib.distribAdd(c, nodes, params); } } nodes.clear(); for (SolrClient c : clients) { HttpSolrClient httpClient = (HttpSolrClient) c; nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, httpClient.getBaseURL(), ZkStateReader.CORE_NAME_PROP, ""); nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps))); } final AtomicInteger commits = new AtomicInteger(); for(JettySolrRunner jetty : jettys) { CoreContainer cores = ((SolrDispatchFilter) jetty.getDispatchFilter().getFilter()).getCores(); try (SolrCore core = cores.getCore("collection1")) { core.getUpdateHandler().registerCommitCallback(new SolrEventListener() { @Override public void init(NamedList args) { } @Override public void postSoftCommit() { } @Override public void postCommit() { commits.incrementAndGet(); } @Override public void newSearcher(SolrIndexSearcher newSearcher, SolrIndexSearcher currentSearcher) { } }); } } params = new ModifiableSolrParams(); params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true); cmdDistrib.distribCommit(ccmd, nodes, params); cmdDistrib.finish(); assertEquals(shardCount, commits.get()); for (SolrClient c : clients) { NamedList<Object> resp = c.request(new LukeRequest()); assertEquals("SOLR-3428: We only did adds - there should be no deletes", ((NamedList<Object>) resp.get("index")).get("numDocs"), ((NamedList<Object>) resp.get("index")).get("maxDoc")); } testMaxRetries(); testOneRetry(); testRetryNodeAgainstBadAddress(); testRetryNodeWontRetrySocketError(); testDistribOpenSearcher(); } private void testMaxRetries() throws IOException { final MockStreamingSolrClients streamingClients = new MockStreamingSolrClients(updateShardHandler); SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(streamingClients, 5, 0); streamingClients.setExp(Exp.CONNECT_EXCEPTION); ArrayList<Node> nodes = new ArrayList<>(); final HttpSolrClient solrclient1 = (HttpSolrClient) clients.get(0); final AtomicInteger retries = new AtomicInteger(); ZkNodeProps nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient1.getBaseURL(), ZkStateReader.CORE_NAME_PROP, ""); RetryNode retryNode = new RetryNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1") { @Override public boolean checkRetry() { retries.incrementAndGet(); return true; } }; nodes.add(retryNode); AddUpdateCommand cmd = new AddUpdateCommand(null); cmd.solrDoc = sdoc("id", id.incrementAndGet()); ModifiableSolrParams params = new ModifiableSolrParams(); cmdDistrib.distribAdd(cmd, nodes, params); cmdDistrib.finish(); assertEquals(6, retries.get()); assertEquals(1, cmdDistrib.getErrors().size()); } private void testOneRetry() throws Exception { final HttpSolrClient solrclient = (HttpSolrClient) clients.get(0); long numFoundBefore = solrclient.query(new SolrQuery("*:*")).getResults() .getNumFound(); final MockStreamingSolrClients streamingClients = new MockStreamingSolrClients(updateShardHandler); SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(streamingClients, 5, 0); streamingClients.setExp(Exp.CONNECT_EXCEPTION); ArrayList<Node> nodes = new ArrayList<>(); ZkNodeProps nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient.getBaseURL(), ZkStateReader.CORE_NAME_PROP, ""); final AtomicInteger retries = new AtomicInteger(); nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient.getBaseURL(), ZkStateReader.CORE_NAME_PROP, ""); RetryNode retryNode = new RetryNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1") { @Override public boolean checkRetry() { streamingClients.setExp(null); retries.incrementAndGet(); return true; } }; nodes.add(retryNode); AddUpdateCommand cmd = new AddUpdateCommand(null); cmd.solrDoc = sdoc("id", id.incrementAndGet()); ModifiableSolrParams params = new ModifiableSolrParams(); CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false); cmdDistrib.distribAdd(cmd, nodes, params); cmdDistrib.distribCommit(ccmd, nodes, params); cmdDistrib.finish(); assertEquals(1, retries.get()); long numFoundAfter = solrclient.query(new SolrQuery("*:*")).getResults() .getNumFound(); // we will get java.net.ConnectException which we retry on assertEquals(numFoundBefore + 1, numFoundAfter); assertEquals(0, cmdDistrib.getErrors().size()); } private void testRetryNodeWontRetrySocketError() throws Exception { final HttpSolrClient solrclient = (HttpSolrClient) clients.get(0); long numFoundBefore = solrclient.query(new SolrQuery("*:*")).getResults() .getNumFound(); final MockStreamingSolrClients streamingClients = new MockStreamingSolrClients(updateShardHandler); SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(streamingClients, 5, 0); streamingClients.setExp(Exp.SOCKET_EXCEPTION); ArrayList<Node> nodes = new ArrayList<>(); ZkNodeProps nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient.getBaseURL(), ZkStateReader.CORE_NAME_PROP, ""); final AtomicInteger retries = new AtomicInteger(); nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient.getBaseURL(), ZkStateReader.CORE_NAME_PROP, ""); RetryNode retryNode = new RetryNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1") { @Override public boolean checkRetry() { retries.incrementAndGet(); return true; } }; nodes.add(retryNode); AddUpdateCommand cmd = new AddUpdateCommand(null); cmd.solrDoc = sdoc("id", id.incrementAndGet()); ModifiableSolrParams params = new ModifiableSolrParams(); CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false); cmdDistrib.distribAdd(cmd, nodes, params); streamingClients.setExp(null); cmdDistrib.distribCommit(ccmd, nodes, params); cmdDistrib.finish(); // it will checkRetry, but not actually do it... assertEquals(1, retries.get()); long numFoundAfter = solrclient.query(new SolrQuery("*:*")).getResults() .getNumFound(); // we will get java.net.SocketException: Network is unreachable, which we don't retry on assertEquals(numFoundBefore, numFoundAfter); assertEquals(1, cmdDistrib.getErrors().size()); } private void testRetryNodeAgainstBadAddress() throws SolrServerException, IOException { // Test RetryNode SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler); final HttpSolrClient solrclient = (HttpSolrClient) clients.get(0); long numFoundBefore = solrclient.query(new SolrQuery("*:*")).getResults() .getNumFound(); ArrayList<Node> nodes = new ArrayList<>(); ZkNodeProps nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, "[ff01::114]:33332" + context, ZkStateReader.CORE_NAME_PROP, ""); RetryNode retryNode = new RetryNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1") { @Override public boolean checkRetry() { ZkNodeProps leaderProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient.getBaseURL(), ZkStateReader.CORE_NAME_PROP, ""); this.nodeProps = new ZkCoreNodeProps(leaderProps); return true; } }; nodes.add(retryNode); AddUpdateCommand cmd = new AddUpdateCommand(null); cmd.solrDoc = sdoc("id", id.incrementAndGet()); ModifiableSolrParams params = new ModifiableSolrParams(); cmdDistrib.distribAdd(cmd, nodes, params); CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false); params = new ModifiableSolrParams(); params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true); cmdDistrib.distribCommit(ccmd, nodes, params); cmdDistrib.finish(); long numFoundAfter = solrclient.query(new SolrQuery("*:*")).getResults() .getNumFound(); // different OS's will throw different exceptions for the bad address above if (numFoundBefore != numFoundAfter) { assertEquals(0, cmdDistrib.getErrors().size()); assertEquals(numFoundBefore + 1, numFoundAfter); } else { // we will get java.net.SocketException: Network is unreachable and not retry assertEquals(numFoundBefore, numFoundAfter); assertEquals(1, cmdDistrib.getErrors().size()); } } @Override public void setUp() throws Exception { super.setUp(); } @Override public void tearDown() throws Exception { updateShardHandler.close(); super.tearDown(); } private void testDistribOpenSearcher() { SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler); UpdateRequest updateRequest = new UpdateRequest(); CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false); //test default value (should be true) cmdDistrib.addCommit(updateRequest, ccmd); boolean openSearcher = updateRequest.getParams().getBool(UpdateParams.OPEN_SEARCHER,false); assertTrue(openSearcher); //test openSearcher = false ccmd.openSearcher = false; cmdDistrib.addCommit(updateRequest, ccmd); openSearcher = updateRequest.getParams().getBool(UpdateParams.OPEN_SEARCHER,true); assertFalse(openSearcher); } }
/** * Copyright (C) 2014-2018 LinkedIn Corp. ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.linkedin.thirdeye.anomaly.events; import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; import com.google.api.client.googleapis.json.GoogleJsonResponseException; import com.google.api.client.http.HttpTransport; import com.google.api.client.json.JsonFactory; import com.google.api.client.json.jackson2.JacksonFactory; import com.google.api.client.util.DateTime; import com.google.api.services.calendar.Calendar; import com.google.api.services.calendar.CalendarScopes; import com.google.api.services.calendar.model.Event; import com.google.common.collect.ImmutableMap; import com.ibm.icu.util.TimeZone; import com.linkedin.thirdeye.anomaly.HolidayEventsLoaderConfiguration; import com.linkedin.thirdeye.api.TimeGranularity; import com.linkedin.thirdeye.datalayer.bao.EventManager; import com.linkedin.thirdeye.datalayer.dto.EventDTO; import java.io.FileInputStream; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The type Holiday events loader, which loads the holiday events from Google Calendar periodically. */ public class HolidayEventsLoader implements Runnable { static class HolidayEvent { /** * The Name. */ String name; /** * The Event type. */ String eventType; /** * The Start time. */ long startTime; /** * The End time. */ long endTime; /** * Instantiates a new Holiday event. * * @param name the name * @param eventType the event type * @param startTime the start time * @param endTime the end time */ HolidayEvent(String name, String eventType, long startTime, long endTime) { this.name = name; this.eventType = eventType; this.startTime = startTime; this.endTime = endTime; } /** * Gets name. * * @return the name */ public String getName() { return name; } /** * Sets name. * * @param name the name */ public void setName(String name) { this.name = name; } /** * Gets start time. * * @return the start time */ public long getStartTime() { return startTime; } /** * Sets start time. * * @param startTime the start time */ public void setStartTime(long startTime) { this.startTime = startTime; } /** * Gets end time. * * @return the end time */ public long getEndTime() { return endTime; } /** * Sets end time. * * @param endTime the end time */ public void setEndTime(long endTime) { this.endTime = endTime; } /** * Gets event type. * * @return the event type */ public String getEventType() { return eventType; } /** * Sets event type. * * @param eventType the event type */ public void setEventType(String eventType) { this.eventType = eventType; } @Override public int hashCode() { return Objects.hash(getName(), getEventType(), getStartTime(), getEndTime()); } @Override public boolean equals(Object obj) { if (!(obj instanceof HolidayEvent)) { return false; } HolidayEvent holidayEvent = (HolidayEvent) obj; return Objects.equals(getName(), holidayEvent.getName()) && Objects.equals(getStartTime(), holidayEvent.getStartTime()) && Objects.equals(getEndTime(), holidayEvent.getEndTime()) && Objects.equals( getEventType(), holidayEvent.getEventType()); } } /** * Instantiates a new Holiday events loader. * * @param holidayEventsLoaderConfiguration the configuration * @param calendarApiKeyPath the calendar api key path * @param eventDAO the event dao */ public HolidayEventsLoader(HolidayEventsLoaderConfiguration holidayEventsLoaderConfiguration, String calendarApiKeyPath, EventManager eventDAO) { this.holidayLoadRange = holidayEventsLoaderConfiguration.getHolidayLoadRange(); this.calendarList = holidayEventsLoaderConfiguration.getCalendars(); this.keyPath = calendarApiKeyPath; this.eventDAO = eventDAO; this.runFrequency = new TimeGranularity(holidayEventsLoaderConfiguration.getRunFrequency(), TimeUnit.DAYS); scheduledExecutorService = Executors.newSingleThreadScheduledExecutor(); } /** List of google holiday calendar ids */ private List<String> calendarList; /** Calendar Api private key path */ private String keyPath; private ScheduledExecutorService scheduledExecutorService; private TimeGranularity runFrequency; /** Time range to calculate the upper bound for an holiday's start time. In milliseconds */ private long holidayLoadRange; private static final Logger LOG = LoggerFactory.getLogger(HolidayEventsLoader.class); /** Global instance of the HTTP transport. */ private static HttpTransport HTTP_TRANSPORT; /** Global instance of the JSON factory. */ private static final JsonFactory JSON_FACTORY = JacksonFactory.getDefaultInstance(); /** Global instance of the scopes. */ private static final Set<String> SCOPES = Collections.singleton(CalendarScopes.CALENDAR_READONLY); private static final String NO_COUNTRY_CODE = "no country code"; /** * Override the time zone code for a country */ private static Map<String, String> COUNTRY_TO_TIMEZONE = ImmutableMap.of("US", "PST"); static { try { HTTP_TRANSPORT = GoogleNetHttpTransport.newTrustedTransport(); } catch (Exception e) { LOG.error("Can't create http transport with google api.", e); } } private final EventManager eventDAO; /** * Start. */ public void start() { scheduledExecutorService.scheduleAtFixedRate(this, 0, runFrequency.getSize(), runFrequency.getUnit()); } /** * Shutdown. */ public void shutdown() { scheduledExecutorService.shutdown(); } /** * Fetch holidays and save to ThirdEye database. */ public void run() { long start = System.currentTimeMillis(); long end = start + holidayLoadRange; loadHolidays(start, end); } public void loadHolidays(long start, long end) { LOG.info("Loading holidays between {} and {}", start, end); List<Event> newHolidays = null; try { newHolidays = getAllHolidays(start, end); } catch (Exception e) { LOG.error("Fetch holidays failed. Aborting.", e); return; } Map<HolidayEvent, Set<String>> newHolidayEventToCountryCodes = aggregateCountryCodesGroupByHolidays(newHolidays); Map<String, List<EventDTO>> holidayNameToHolidayEvent = getHolidayNameToEventDtoMap(newHolidayEventToCountryCodes); // Get the existing holidays within the time range from the database List<EventDTO> existingEvents = eventDAO.findEventsBetweenTimeRange(EventType.HOLIDAY.toString(), start, end); mergeWithExistingHolidays(holidayNameToHolidayEvent, existingEvents); } private Map<HolidayEvent, Set<String>> aggregateCountryCodesGroupByHolidays(List<Event> newHolidays) { // A map from new holiday to a set of country codes that has the holiday Map<HolidayEvent, Set<String>> newHolidayEventToCountryCodes = new HashMap<>(); // Convert Google Event Type to holiday events and aggregates the country code list for (Event holiday : newHolidays) { String countryCode = getCountryCode(holiday); String timeZone = getTimeZoneForCountry(countryCode); HolidayEvent holidayEvent = new HolidayEvent(holiday.getSummary(), EventType.HOLIDAY.toString(), getUtcTimeStamp(holiday.getStart().getDate().getValue(), timeZone), getUtcTimeStamp(holiday.getEnd().getDate().getValue(), timeZone)); if (!newHolidayEventToCountryCodes.containsKey(holidayEvent)) { newHolidayEventToCountryCodes.put(holidayEvent, new HashSet<String>()); } if (!countryCode.equals(NO_COUNTRY_CODE)) { newHolidayEventToCountryCodes.get(holidayEvent).add(countryCode); } LOG.info("Get holiday event {} in country {} between {} and {} in timezone {} ", holidayEvent.getName(), countryCode, holidayEvent.getStartTime(), holidayEvent.getEndTime(), timeZone); } return newHolidayEventToCountryCodes; } private long getUtcTimeStamp(long timeStamp, String timeZone){ return timeStamp - TimeZone.getTimeZone(timeZone).getOffset(timeStamp); } private String getTimeZoneForCountry(String countryCode) { // if time zone of a country is set explicitly if (COUNTRY_TO_TIMEZONE.containsKey(countryCode)) { return COUNTRY_TO_TIMEZONE.get(countryCode); } // guess the time zone from country code String timeZone = "GMT"; String[] timeZones = TimeZone.getAvailableIDs(countryCode); if (timeZones.length != 0) { timeZone = timeZones[0]; } return timeZone; } Map<String, List<EventDTO>> getHolidayNameToEventDtoMap( Map<HolidayEvent, Set<String>> newHolidayEventToCountryCodes) { Map<String, List<EventDTO>> holidayNameToHolidayEvent = new HashMap<>(); // Convert Holiday Events to EventDTOs. for (Map.Entry<HolidayEvent, Set<String>> entry : newHolidayEventToCountryCodes.entrySet()) { HolidayEvent newHolidayEvent = entry.getKey(); Set<String> newCountryCodes = entry.getValue(); String holidayName = newHolidayEvent.getName(); EventDTO eventDTO = new EventDTO(); eventDTO.setName(holidayName); eventDTO.setEventType(newHolidayEvent.getEventType()); eventDTO.setStartTime(newHolidayEvent.getStartTime()); eventDTO.setEndTime(newHolidayEvent.getEndTime()); Map<String, List<String>> targetDimensionMap = new HashMap<>(); targetDimensionMap.put("countryCode", new ArrayList<>(newCountryCodes)); eventDTO.setTargetDimensionMap(targetDimensionMap); if (!holidayNameToHolidayEvent.containsKey(holidayName)) { holidayNameToHolidayEvent.put(holidayName, new ArrayList<EventDTO>()); } holidayNameToHolidayEvent.get(holidayName).add(eventDTO); } return holidayNameToHolidayEvent; } void mergeWithExistingHolidays(Map<String, List<EventDTO>> holidayNameToHolidayEvent, List<EventDTO> existingEvents) { for (EventDTO existingEvent : existingEvents) { String holidayName = existingEvent.getName(); if (!holidayNameToHolidayEvent.containsKey(holidayName)) { // If a event disappears, delete the event eventDAO.delete(existingEvent); } else { // If an existing event shows up again, overwrite with new time and country code. List<EventDTO> eventList = holidayNameToHolidayEvent.get(holidayName); EventDTO newEvent = eventList.remove(eventList.size() - 1); existingEvent.setStartTime(newEvent.getStartTime()); existingEvent.setEndTime(newEvent.getEndTime()); existingEvent.setTargetDimensionMap(newEvent.getTargetDimensionMap()); eventDAO.update(existingEvent); if (eventList.isEmpty()) { holidayNameToHolidayEvent.remove(holidayName); } } } // Add all remaining new events into the database for (List<EventDTO> eventDTOList : holidayNameToHolidayEvent.values()) { for (EventDTO eventDTO : eventDTOList) { eventDAO.save(eventDTO); } } } private String getCountryCode(Event holiday) { String calendarName = holiday.getCreator().getDisplayName(); if (calendarName != null && calendarName.length() > 12) { String countryName = calendarName.substring(12); for (Locale locale : Locale.getAvailableLocales()) { if (locale.getDisplayCountry().equals(countryName)) { return locale.getCountry(); } } } return NO_COUNTRY_CODE; } /** * Fetch holidays from all calendars in Google Calendar API * * @param start Lower bound (inclusive) for an holiday's end time to filter by. * @param end Upper bound (exclusive) for an holiday's start time to filter by. */ private List<Event> getAllHolidays(long start, long end) throws Exception { List<Event> events = new ArrayList<>(); for (String calendar : calendarList) { try { events.addAll(this.getCalendarEvents(calendar, start, end)); } catch (GoogleJsonResponseException e) { LOG.warn("Fetch holiday events failed in calendar {}.", calendar, e); } } return events; } private List<Event> getCalendarEvents(String Calendar_id, long start, long end) throws Exception { GoogleCredential credential = GoogleCredential.fromStream(new FileInputStream(keyPath)).createScoped(SCOPES); Calendar service = new Calendar.Builder(HTTP_TRANSPORT, JSON_FACTORY, credential).setApplicationName("thirdeye").build(); return service.events() .list(Calendar_id) .setTimeMin(new DateTime(start)) .setTimeMax(new DateTime(end)) .execute() .getItems(); } }
package org.jabref.gui.groups; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; import javafx.beans.binding.Bindings; import javafx.beans.binding.BooleanBinding; import javafx.beans.property.SimpleBooleanProperty; import javafx.beans.property.SimpleIntegerProperty; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import javafx.scene.input.Dragboard; import javafx.scene.paint.Color; import org.jabref.gui.DragAndDropDataFormats; import org.jabref.gui.IconTheme; import org.jabref.gui.StateManager; import org.jabref.gui.util.BackgroundTask; import org.jabref.gui.util.BindingsHelper; import org.jabref.gui.util.TaskExecutor; import org.jabref.logic.groups.DefaultGroupsFactory; import org.jabref.logic.layout.format.LatexToUnicodeFormatter; import org.jabref.model.FieldChange; import org.jabref.model.database.BibDatabaseContext; import org.jabref.model.entry.BibEntry; import org.jabref.model.entry.event.EntryEvent; import org.jabref.model.groups.AbstractGroup; import org.jabref.model.groups.AutomaticGroup; import org.jabref.model.groups.GroupEntryChanger; import org.jabref.model.groups.GroupTreeNode; import org.jabref.model.strings.StringUtil; import com.google.common.eventbus.Subscribe; import org.fxmisc.easybind.EasyBind; public class GroupNodeViewModel { private final String displayName; private final boolean isRoot; private final ObservableList<GroupNodeViewModel> children; private final BibDatabaseContext databaseContext; private final StateManager stateManager; private final GroupTreeNode groupNode; private final SimpleIntegerProperty hits; private final SimpleBooleanProperty hasChildren; private final SimpleBooleanProperty expandedProperty = new SimpleBooleanProperty(); private final BooleanBinding anySelectedEntriesMatched; private final BooleanBinding allSelectedEntriesMatched; private final TaskExecutor taskExecutor; public GroupNodeViewModel(BibDatabaseContext databaseContext, StateManager stateManager, TaskExecutor taskExecutor, GroupTreeNode groupNode) { this.databaseContext = Objects.requireNonNull(databaseContext); this.taskExecutor = Objects.requireNonNull(taskExecutor); this.stateManager = Objects.requireNonNull(stateManager); this.groupNode = Objects.requireNonNull(groupNode); LatexToUnicodeFormatter formatter = new LatexToUnicodeFormatter(); displayName = formatter.format(groupNode.getName()); isRoot = groupNode.isRoot(); if (groupNode.getGroup() instanceof AutomaticGroup) { AutomaticGroup automaticGroup = (AutomaticGroup) groupNode.getGroup(); children = automaticGroup.createSubgroups(databaseContext.getDatabase().getEntries()).stream() .map(this::toViewModel) .sorted((group1, group2) -> group1.getDisplayName().compareToIgnoreCase(group2.getDisplayName())) .collect(Collectors.toCollection(FXCollections::observableArrayList)); } else { children = BindingsHelper.mapBacked(groupNode.getChildren(), this::toViewModel); } hasChildren = new SimpleBooleanProperty(); hasChildren.bind(Bindings.isNotEmpty(children)); hits = new SimpleIntegerProperty(0); calculateNumberOfMatches(); expandedProperty.set(groupNode.getGroup().isExpanded()); expandedProperty.addListener((observable, oldValue, newValue) -> groupNode.getGroup().setExpanded(newValue)); // Register listener databaseContext.getDatabase().registerListener(this); ObservableList<Boolean> selectedEntriesMatchStatus = EasyBind.map(stateManager.getSelectedEntries(), groupNode::matches); anySelectedEntriesMatched = BindingsHelper.any(selectedEntriesMatchStatus, matched -> matched); allSelectedEntriesMatched = BindingsHelper.all(selectedEntriesMatchStatus, matched -> matched); } public GroupNodeViewModel(BibDatabaseContext databaseContext, StateManager stateManager, TaskExecutor taskExecutor, AbstractGroup group) { this(databaseContext, stateManager, taskExecutor, new GroupTreeNode(group)); } static GroupNodeViewModel getAllEntriesGroup(BibDatabaseContext newDatabase, StateManager stateManager, TaskExecutor taskExecutor) { return new GroupNodeViewModel(newDatabase, stateManager, taskExecutor, DefaultGroupsFactory.getAllEntriesGroup()); } private GroupNodeViewModel toViewModel(GroupTreeNode child) { return new GroupNodeViewModel(databaseContext, stateManager, taskExecutor, child); } public List<FieldChange> addEntriesToGroup(List<BibEntry> entries) { // TODO: warn if assignment has undesired side effects (modifies a field != keywords) //if (!WarnAssignmentSideEffects.warnAssignmentSideEffects(group, groupSelector.frame)) //{ // return; // user aborted operation //} return groupNode.addEntriesToGroup(entries); // TODO: Store undo // if (!undo.isEmpty()) { // groupSelector.concludeAssignment(UndoableChangeEntriesOfGroup.getUndoableEdit(target, undo), target.getNode(), assignedEntries); } public SimpleBooleanProperty expandedProperty() { return expandedProperty; } public BooleanBinding anySelectedEntriesMatchedProperty() { return anySelectedEntriesMatched; } public BooleanBinding allSelectedEntriesMatchedProperty() { return allSelectedEntriesMatched; } public SimpleBooleanProperty hasChildrenProperty() { return hasChildren; } public String getDisplayName() { return displayName; } public boolean isRoot() { return isRoot; } public String getDescription() { return groupNode.getGroup().getDescription().orElse(""); } public SimpleIntegerProperty getHits() { return hits; } @Override public boolean equals(Object o) { if (this == o) { return true; } if ((o == null) || (getClass() != o.getClass())) { return false; } GroupNodeViewModel that = (GroupNodeViewModel) o; if (!groupNode.equals(that.groupNode)) { return false; } return true; } @Override public String toString() { return "GroupNodeViewModel{" + "displayName='" + displayName + '\'' + ", isRoot=" + isRoot + ", iconCode='" + getIconCode() + '\'' + ", children=" + children + ", databaseContext=" + databaseContext + ", groupNode=" + groupNode + ", hits=" + hits + '}'; } @Override public int hashCode() { return groupNode.hashCode(); } public String getIconCode() { return groupNode.getGroup().getIconCode().orElse(IconTheme.JabRefIcon.DEFAULT_GROUP_ICON.getCode()); } public ObservableList<GroupNodeViewModel> getChildren() { return children; } public GroupTreeNode getGroupNode() { return groupNode; } /** * Gets invoked if an entry in the current database changes. */ @Subscribe public void listen(@SuppressWarnings("unused") EntryEvent entryEvent) { calculateNumberOfMatches(); } private void calculateNumberOfMatches() { // We calculate the new hit value // We could be more intelligent and try to figure out the new number of hits based on the entry change // for example, a previously matched entry gets removed -> hits = hits - 1 BackgroundTask .wrap(() -> groupNode.calculateNumberOfMatches(databaseContext.getDatabase())) .onSuccess(hits::setValue) .executeWith(taskExecutor); } public GroupTreeNode addSubgroup(AbstractGroup subgroup) { return groupNode.addSubgroup(subgroup); } void toggleExpansion() { expandedProperty().set(!expandedProperty().get()); } boolean isMatchedBy(String searchString) { return StringUtil.isBlank(searchString) || StringUtil.containsIgnoreCase(getDisplayName(), searchString); } public Color getColor() { return groupNode.getGroup().getColor().orElse(IconTheme.getDefaultColor()); } public String getPath() { return groupNode.getPath(); } public Optional<GroupNodeViewModel> getChildByPath(String pathToSource) { return groupNode.getChildByPath(pathToSource).map(this::toViewModel); } /** * Decides if the content stored in the given {@link Dragboard} can be droped on the given target row. * Currently, the following sources are allowed: * - another group (will be added as subgroup on drop) * - entries if the group implements {@link GroupEntryChanger} (will be assigned to group on drop) */ public boolean acceptableDrop(Dragboard dragboard) { // TODO: we should also check isNodeDescendant boolean canDropOtherGroup = dragboard.hasContent(DragAndDropDataFormats.GROUP); boolean canDropEntries = dragboard.hasContent(DragAndDropDataFormats.ENTRIES) && groupNode.getGroup() instanceof GroupEntryChanger; return canDropOtherGroup || canDropEntries; } public void moveTo(GroupNodeViewModel target) { // TODO: Add undo and display message //MoveGroupChange undo = new MoveGroupChange(((GroupTreeNodeViewModel)source.getParent()).getNode(), // source.getNode().getPositionInParent(), target.getNode(), target.getChildCount()); getGroupNode().moveTo(target.getGroupNode()); //panel.getUndoManager().addEdit(new UndoableMoveGroup(this.groupsRoot, moveChange)); //panel.markBaseChanged(); //frame.output(Localization.lang("Moved group \"%0\".", node.getNode().getGroup().getName())); } public void moveTo(GroupTreeNode target, int targetIndex) { getGroupNode().moveTo(target, targetIndex); } public Optional<GroupTreeNode> getParent() { return groupNode.getParent(); } public void draggedOn(GroupNodeViewModel target, DroppingMouseLocation mouseLocation) { Optional<GroupTreeNode> targetParent = target.getParent(); if (targetParent.isPresent()) { int targetIndex = target.getPositionInParent(); // In case we want to move an item in the same parent // and the item is moved down, we need to adjust the target index if (targetParent.equals(getParent())) { int sourceIndex = this.getPositionInParent(); if (sourceIndex < targetIndex) { targetIndex--; } } // Different actions depending on where the user releases the drop in the target row // Bottom + top -> insert source row before / after this row // Center -> add as child switch (mouseLocation) { case BOTTOM: this.moveTo(targetParent.get(), targetIndex + 1); break; case CENTER: this.moveTo(target); break; case TOP: this.moveTo(targetParent.get(), targetIndex); break; } } else { // No parent = root -> just add this.moveTo(target); } } private int getPositionInParent() { return groupNode.getPositionInParent(); } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.template.postfix.settings; import com.intellij.codeInsight.CodeInsightBundle; import com.intellij.codeInsight.template.impl.LiveTemplateCompletionContributor; import com.intellij.codeInsight.template.impl.TemplateSettings; import com.intellij.codeInsight.template.postfix.templates.LanguagePostfixTemplate; import com.intellij.codeInsight.template.postfix.templates.PostfixTemplate; import com.intellij.codeInsight.template.postfix.templates.PostfixTemplateProvider; import com.intellij.lang.LanguageExtensionPoint; import com.intellij.openapi.extensions.ExtensionPointName; import com.intellij.openapi.options.Configurable; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.options.SearchableConfigurable; import com.intellij.openapi.ui.ComboBox; import com.intellij.openapi.util.text.StringUtil; import com.intellij.ui.components.JBCheckBox; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import java.awt.*; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; public class PostfixTemplatesConfigurable extends SearchableConfigurable.Parent.Abstract implements Configurable.Composite, SearchableConfigurable, Configurable.NoScroll { public static final Comparator<PostfixTemplate> TEMPLATE_COMPARATOR = new Comparator<PostfixTemplate>() { @Override public int compare(PostfixTemplate o1, PostfixTemplate o2) { return o1.getKey().compareTo(o2.getKey()); } }; private PostfixTemplatesSettings myTemplatesSettings; private JComponent myPanel; private JBCheckBox myCompletionEnabledCheckbox; private JBCheckBox myPostfixTemplatesEnabled; private ComboBox myShortcutComboBox; private static final String SPACE = CodeInsightBundle.message("template.shortcut.space"); private static final String TAB = CodeInsightBundle.message("template.shortcut.tab"); private static final String ENTER = CodeInsightBundle.message("template.shortcut.enter"); @SuppressWarnings("unchecked") public PostfixTemplatesConfigurable() { myTemplatesSettings = PostfixTemplatesSettings.getInstance(); /* LanguageExtensionPoint[] extensions = new ExtensionPointName<LanguageExtensionPoint>(LanguagePostfixTemplate.EP_NAME).getExtensions(); templateMultiMap = MultiMap.create(); for (LanguageExtensionPoint extension : extensions) { List<PostfixTemplate> postfixTemplates = ContainerUtil.newArrayList(((PostfixTemplateProvider)extension.getInstance()).getTemplates()); ContainerUtil.sort(postfixTemplates, TEMPLATE_COMPARATOR); templateMultiMap.putValues(extension.getKey(), postfixTemplates); } */ myPostfixTemplatesEnabled.addChangeListener(new ChangeListener() { @Override public void stateChanged(ChangeEvent e) { updateComponents(); } }); myShortcutComboBox.addItem(TAB); myShortcutComboBox.addItem(SPACE); myShortcutComboBox.addItem(ENTER); } @NotNull @Override public String getId() { return "reference.settingsdialog.IDE.editor.postfix.templates"; } @Nullable @Override public String getHelpTopic() { return getId(); } @Nls @Override public String getDisplayName() { return null; } @NotNull @Override public JComponent createComponent() { JPanel panel = new JPanel(new BorderLayout()); panel.add(myPanel, BorderLayout.NORTH); return panel; } @Override public void apply() throws ConfigurationException { myTemplatesSettings.setPostfixTemplatesEnabled(myPostfixTemplatesEnabled.isSelected()); myTemplatesSettings.setTemplatesCompletionEnabled(myCompletionEnabledCheckbox.isSelected()); myTemplatesSettings.setShortcut(stringToShortcut((String)myShortcutComboBox.getSelectedItem())); } @Override public void reset() { myPostfixTemplatesEnabled.setSelected(myTemplatesSettings.isPostfixTemplatesEnabled()); myCompletionEnabledCheckbox.setSelected(myTemplatesSettings.isTemplatesCompletionEnabled()); myShortcutComboBox.setSelectedItem(shortcutToString((char)myTemplatesSettings.getShortcut())); updateComponents(); } @Override public boolean isModified() { return myPostfixTemplatesEnabled.isSelected() != myTemplatesSettings.isPostfixTemplatesEnabled() || myCompletionEnabledCheckbox.isSelected() != myTemplatesSettings.isTemplatesCompletionEnabled() || stringToShortcut((String)myShortcutComboBox.getSelectedItem()) != myTemplatesSettings.getShortcut(); } @Override public void disposeUIResources() { } @Nullable @Override public Runnable enableSearch(String s) { return null; } @Override protected Configurable[] buildConfigurables() { LanguageExtensionPoint[] extensions = new ExtensionPointName<LanguageExtensionPoint>(LanguagePostfixTemplate.EP_NAME).getExtensions(); List<Configurable> list = new ArrayList<Configurable>(extensions.length); for (LanguageExtensionPoint extensionPoint : extensions) { list.add(new PostfixTemplatesChildConfigurable(extensionPoint)); } Collections.sort(list, new Comparator<Configurable>() { @Override public int compare(Configurable o1, Configurable o2) { return StringUtil.compare(o1.getDisplayName(), o2.getDisplayName(), true); } }); return list.toArray(new Configurable[list.size()]); } private void updateComponents() { boolean pluginEnabled = myPostfixTemplatesEnabled.isSelected(); myCompletionEnabledCheckbox.setVisible(!LiveTemplateCompletionContributor.shouldShowAllTemplates()); myCompletionEnabledCheckbox.setEnabled(pluginEnabled); myShortcutComboBox.setEnabled(pluginEnabled); } private static char stringToShortcut(@NotNull String string) { if (SPACE.equals(string)) { return TemplateSettings.SPACE_CHAR; } else if (ENTER.equals(string)) { return TemplateSettings.ENTER_CHAR; } return TemplateSettings.TAB_CHAR; } private static String shortcutToString(char shortcut) { if (shortcut == TemplateSettings.SPACE_CHAR) { return SPACE; } if (shortcut == TemplateSettings.ENTER_CHAR) { return ENTER; } return TAB; } @Nullable public PostfixTemplatesChildConfigurable findConfigurable(PostfixTemplateProvider postfixTemplateProvider) { for (Configurable configurable : getConfigurables()) { PostfixTemplatesChildConfigurable childConfigurable = (PostfixTemplatesChildConfigurable)configurable; if(childConfigurable.getPostfixTemplateProvider() == postfixTemplateProvider) { return childConfigurable; } } return null; } }
package org.visallo.webster; import org.visallo.webster.parameterProviders.ParameterProviderFactory; import org.visallo.webster.resultWriters.DefaultResultWriterFactory; import org.visallo.webster.resultWriters.ResultWriterFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.lang.reflect.Method; import java.util.HashMap; import java.util.Map; public class App { private static final Logger ACCESS_LOGGER = LoggerFactory.getLogger(App.class.getName() + ".ACCESS_LOG"); public static final String WEBSTER_APP_ATTRIBUTE_NAME = "websterApp"; private static final ResultWriterFactory DEFAULT_RESULT_WRITER_FACTORY = new DefaultResultWriterFactory(); private Router router; private Map<String, Object> config; public App(final ServletContext servletContext) { router = new Router(servletContext); config = new HashMap<>(); } public final void get(String path, Handler... handlers) { router.addRoute(Route.Method.GET, path, wrapNonRequestResponseHandlers(handlers)); } @SafeVarargs public final void get(String path, Class<? extends Handler>... classes) { try { Handler[] handlers = instantiateHandlers(classes); get(path, handlers); } catch (Exception e) { throw new WebsterException("Could not execute get method on path " + path, e); } } public final void post(String path, Handler... handlers) { router.addRoute(Route.Method.POST, path, wrapNonRequestResponseHandlers(handlers)); } @SafeVarargs public final void post(String path, Class<? extends Handler>... classes) { try { Handler[] handlers = instantiateHandlers(classes); post(path, handlers); } catch (Exception e) { throw new WebsterException("Could not execute post method on path " + path, e); } } public final void put(String path, Handler... handlers) { router.addRoute(Route.Method.PUT, path, wrapNonRequestResponseHandlers(handlers)); } @SafeVarargs public final void put(String path, Class<? extends Handler>... classes) { try { Handler[] handlers = instantiateHandlers(classes); put(path, handlers); } catch (Exception e) { throw new WebsterException("Could not execute put method on path " + path, e); } } public final void delete(String path, Handler... handlers) { router.addRoute(Route.Method.DELETE, path, wrapNonRequestResponseHandlers(handlers)); } @SafeVarargs public final void delete(String path, Class<? extends Handler>... classes) { try { Handler[] handlers = instantiateHandlers(classes); delete(path, handlers); } catch (Exception e) { throw new WebsterException("Could not execute delete method on path " + path, e); } } public final void head(String path, Handler... handlers) { router.addRoute(Route.Method.HEAD, path, wrapNonRequestResponseHandlers(handlers)); } @SafeVarargs public final void head(String path, Class<? extends Handler>... classes) { try { Handler[] handlers = instantiateHandlers(classes); head(path, handlers); } catch (Exception e) { throw new WebsterException("Could not execute head method on path " + path, e); } } public final void options(String path, Handler... handlers) { router.addRoute(Route.Method.OPTIONS, path, wrapNonRequestResponseHandlers(handlers)); } @SafeVarargs public final void options(String path, Class<? extends Handler>... classes) { try { Handler[] handlers = instantiateHandlers(classes); options(path, handlers); } catch (Exception e) { throw new WebsterException("Could not execute options method on path " + path, e); } } public final void trace(String path, Handler... handlers) { router.addRoute(Route.Method.TRACE, path, wrapNonRequestResponseHandlers(handlers)); } @SafeVarargs public final void trace(String path, Class<? extends Handler>... classes) { try { Handler[] handlers = instantiateHandlers(classes); trace(path, handlers); } catch (Exception e) { throw new WebsterException("Could not execute trace method on path " + path, e); } } public final void connect(String path, Handler... handlers) { router.addRoute(Route.Method.CONNECT, path, wrapNonRequestResponseHandlers(handlers)); } @SafeVarargs public final void connect(String path, Class<? extends Handler>... classes) { try { Handler[] handlers = instantiateHandlers(classes); connect(path, handlers); } catch (Exception e) { throw new WebsterException("Could not execute connect method on path " + path, e); } } public final void onException(Class<? extends Exception> exceptionClass, Handler... handlers) { router.addExceptionHandler(exceptionClass, wrapNonRequestResponseHandlers(handlers)); } @SafeVarargs public final void onException(Class<? extends Exception> exceptionClass, Class<? extends Handler>... classes) { try { Handler[] handlers = instantiateHandlers(classes); onException(exceptionClass, handlers); } catch (Exception e) { throw new WebsterException(e); } } public Object get(String name) { return config.get(name); } public void set(String name, Object value) { config.put(name, value); } public void enable(String name) { config.put(name, true); } public void disable(String name) { config.put(name, false); } public boolean isEnabled(String name) { Object value = config.get(name); if (value != null && value instanceof Boolean) { return (Boolean) value; } return false; } public boolean isDisabled(String name) { return !isEnabled(name); } public Router getRouter() { return router; } public static App getApp(HttpServletRequest request) { return (App) request.getAttribute(WEBSTER_APP_ATTRIBUTE_NAME); } public void handle(HttpServletRequest request, HttpServletResponse response) throws Exception { long startTime = System.currentTimeMillis(); try { request.setAttribute(WEBSTER_APP_ATTRIBUTE_NAME, this); router.route(request, response); } finally { if (ACCESS_LOGGER.isDebugEnabled()) { long endTime = System.currentTimeMillis(); long timeMs = endTime - startTime; ACCESS_LOGGER.debug(request.getMethod() + " " + request.getRequestURI() + " " + timeMs + "ms"); } } } protected Handler[] instantiateHandlers(Class<? extends Handler>[] handlerClasses) throws Exception { Handler[] handlers = new Handler[handlerClasses.length]; for (int i = 0; i < handlerClasses.length; i++) { handlers[i] = handlerClasses[i].newInstance(); } return handlers; } private RequestResponseHandler[] wrapNonRequestResponseHandlers(Handler[] handlers) { RequestResponseHandler[] results = new RequestResponseHandler[handlers.length]; for (int i = 0; i < handlers.length; i++) { if (handlers[i] instanceof RequestResponseHandler) { results[i] = (RequestResponseHandler) handlers[i]; } else if (handlers[i] instanceof ParameterizedHandler) { results[i] = new RequestResponseHandlerParameterizedHandlerWrapper( this, (ParameterizedHandler) handlers[i] ); } else { throw new WebsterException("Unhandled handler type: " + handlers[i].getClass().getName()); } } return results; } public static <T> void registeredParameterProviderFactory(ParameterProviderFactory<T> parameterProviderFactory) { RequestResponseHandlerParameterizedHandlerWrapper.registeredParameterProviderFactory(parameterProviderFactory); } public static <T> void registerParameterValueConverter( Class<T> clazz, DefaultParameterValueConverter.Converter<T> converter ) { DefaultParameterValueConverter.registerValueConverter(clazz, converter); } ResultWriterFactory internalGetResultWriterFactory(Method handleMethod) { return getResultWriterFactory(handleMethod); } protected ResultWriterFactory getResultWriterFactory(Method handleMethod) { return DEFAULT_RESULT_WRITER_FACTORY; } }
/* * Copyright (c) 2012-2016 The ANTLR Project. All rights reserved. * Use of this file is governed by the BSD 3-clause license that * can be found in the LICENSE.txt file in the project root. */ package org.antlr.v4.test.tool; import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.misc.IntegerList; import org.antlr.v4.runtime.misc.Interval; /** * * @author Sam Harwell */ public class JavaUnicodeInputStream implements CharStream { private final CharStream source; private final IntegerList escapeIndexes = new IntegerList(); private final IntegerList escapeCharacters = new IntegerList(); private final IntegerList escapeIndirectionLevels = new IntegerList(); private int escapeListIndex; private int range; private int slashCount; private int la1; public JavaUnicodeInputStream(CharStream source) { if (source == null) { throw new NullPointerException("source"); } this.source = source; this.la1 = source.LA(1); } @Override public int size() { return source.size(); } @Override public int index() { return source.index(); } @Override public String getSourceName() { return source.getSourceName(); } @Override public String getText(Interval interval) { return source.getText(interval); } @Override public void consume() { if (la1 != '\\') { source.consume(); la1 = source.LA(1); range = Math.max(range, source.index()); slashCount = 0; return; } // make sure the next character has been processed this.LA(1); if (escapeListIndex >= escapeIndexes.size() || escapeIndexes.get(escapeListIndex) != index()) { source.consume(); slashCount++; } else { int indirectionLevel = escapeIndirectionLevels.get(escapeListIndex); for (int i = 0; i < 6 + indirectionLevel; i++) { source.consume(); } escapeListIndex++; slashCount = 0; } la1 = source.LA(1); assert range >= index(); } @Override public int LA(int i) { if (i == 1 && la1 != '\\') { return la1; } if (i <= 0) { int desiredIndex = index() + i; for (int j = escapeListIndex - 1; j >= 0; j--) { if (escapeIndexes.get(j) + 6 + escapeIndirectionLevels.get(j) > desiredIndex) { desiredIndex -= 5 + escapeIndirectionLevels.get(j); } if (escapeIndexes.get(j) == desiredIndex) { return escapeCharacters.get(j); } } return source.LA(desiredIndex - index()); } else { int desiredIndex = index() + i - 1; for (int j = escapeListIndex; j < escapeIndexes.size(); j++) { if (escapeIndexes.get(j) == desiredIndex) { return escapeCharacters.get(j); } else if (escapeIndexes.get(j) < desiredIndex) { desiredIndex += 5 + escapeIndirectionLevels.get(j); } else { return source.LA(desiredIndex - index() + 1); } } int[] currentIndex = { index() }; int[] slashCountPtr = { slashCount }; int[] indirectionLevelPtr = { 0 }; for (int j = 0; j < i; j++) { int previousIndex = currentIndex[0]; int c = readCharAt(currentIndex, slashCountPtr, indirectionLevelPtr); if (currentIndex[0] > range) { if (currentIndex[0] - previousIndex > 1) { escapeIndexes.add(previousIndex); escapeCharacters.add(c); escapeIndirectionLevels.add(indirectionLevelPtr[0]); } range = currentIndex[0]; } if (j == i - 1) { return c; } } throw new IllegalStateException("shouldn't be reachable"); } } @Override public int mark() { return source.mark(); } @Override public void release(int marker) { source.release(marker); } @Override public void seek(int index) { if (index > range) { throw new UnsupportedOperationException(); } source.seek(index); la1 = source.LA(1); slashCount = 0; while (source.LA(-slashCount - 1) == '\\') { slashCount++; } escapeListIndex = escapeIndexes.binarySearch(source.index()); if (escapeListIndex < 0) { escapeListIndex = -escapeListIndex - 1; } } private static boolean isHexDigit(int c) { return c >= '0' && c <= '9' || c >= 'a' && c <= 'f' || c >= 'A' && c <= 'F'; } private static int hexValue(int c) { if (c >= '0' && c <= '9') { return c - '0'; } if (c >= 'a' && c <= 'f') { return c - 'a' + 10; } if (c >= 'A' && c <= 'F') { return c - 'A' + 10; } throw new IllegalArgumentException("c"); } private int readCharAt(int[] nextIndexPtr, int[] slashCountPtr, int[] indirectionLevelPtr) { assert nextIndexPtr != null && nextIndexPtr.length == 1; assert slashCountPtr != null && slashCountPtr.length == 1; assert indirectionLevelPtr != null && indirectionLevelPtr.length == 1; boolean blockUnicodeEscape = (slashCountPtr[0] % 2) != 0; int c0 = source.LA(nextIndexPtr[0] - index() + 1); if (c0 == '\\') { slashCountPtr[0]++; if (!blockUnicodeEscape) { int c1 = source.LA(nextIndexPtr[0] - index() + 2); if (c1 == 'u') { int c2 = source.LA(nextIndexPtr[0] - index() + 3); indirectionLevelPtr[0] = 0; while (c2 == 'u') { indirectionLevelPtr[0]++; c2 = source.LA(nextIndexPtr[0] - index() + 3 + indirectionLevelPtr[0]); } int c3 = source.LA(nextIndexPtr[0] - index() + 4 + indirectionLevelPtr[0]); int c4 = source.LA(nextIndexPtr[0] - index() + 5 + indirectionLevelPtr[0]); int c5 = source.LA(nextIndexPtr[0] - index() + 6 + indirectionLevelPtr[0]); if (isHexDigit(c2) && isHexDigit(c3) && isHexDigit(c4) && isHexDigit(c5)) { int value = hexValue(c2); value = (value << 4) + hexValue(c3); value = (value << 4) + hexValue(c4); value = (value << 4) + hexValue(c5); nextIndexPtr[0] += 6 + indirectionLevelPtr[0]; slashCountPtr[0] = 0; return value; } } } } nextIndexPtr[0]++; return c0; } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/recaptchaenterprise/v1/recaptchaenterprise.proto package com.google.recaptchaenterprise.v1; /** * * * <pre> * The create key request message. * </pre> * * Protobuf type {@code google.cloud.recaptchaenterprise.v1.CreateKeyRequest} */ public final class CreateKeyRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.recaptchaenterprise.v1.CreateKeyRequest) CreateKeyRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateKeyRequest.newBuilder() to construct. private CreateKeyRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateKeyRequest() { parent_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateKeyRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CreateKeyRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); parent_ = s; break; } case 18: { com.google.recaptchaenterprise.v1.Key.Builder subBuilder = null; if (key_ != null) { subBuilder = key_.toBuilder(); } key_ = input.readMessage( com.google.recaptchaenterprise.v1.Key.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(key_); key_ = subBuilder.buildPartial(); } break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto .internal_static_google_cloud_recaptchaenterprise_v1_CreateKeyRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto .internal_static_google_cloud_recaptchaenterprise_v1_CreateKeyRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.recaptchaenterprise.v1.CreateKeyRequest.class, com.google.recaptchaenterprise.v1.CreateKeyRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; private volatile java.lang.Object parent_; /** * * * <pre> * Required. The name of the project in which the key will be created, in the * format "projects/{project}". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The name of the project in which the key will be created, in the * format "projects/{project}". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int KEY_FIELD_NUMBER = 2; private com.google.recaptchaenterprise.v1.Key key_; /** * * * <pre> * Required. Information to create a reCAPTCHA Enterprise key. * </pre> * * <code> * .google.cloud.recaptchaenterprise.v1.Key key = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the key field is set. */ @java.lang.Override public boolean hasKey() { return key_ != null; } /** * * * <pre> * Required. Information to create a reCAPTCHA Enterprise key. * </pre> * * <code> * .google.cloud.recaptchaenterprise.v1.Key key = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The key. */ @java.lang.Override public com.google.recaptchaenterprise.v1.Key getKey() { return key_ == null ? com.google.recaptchaenterprise.v1.Key.getDefaultInstance() : key_; } /** * * * <pre> * Required. Information to create a reCAPTCHA Enterprise key. * </pre> * * <code> * .google.cloud.recaptchaenterprise.v1.Key key = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.recaptchaenterprise.v1.KeyOrBuilder getKeyOrBuilder() { return getKey(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (key_ != null) { output.writeMessage(2, getKey()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (key_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getKey()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.recaptchaenterprise.v1.CreateKeyRequest)) { return super.equals(obj); } com.google.recaptchaenterprise.v1.CreateKeyRequest other = (com.google.recaptchaenterprise.v1.CreateKeyRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasKey() != other.hasKey()) return false; if (hasKey()) { if (!getKey().equals(other.getKey())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasKey()) { hash = (37 * hash) + KEY_FIELD_NUMBER; hash = (53 * hash) + getKey().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.recaptchaenterprise.v1.CreateKeyRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.recaptchaenterprise.v1.CreateKeyRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.recaptchaenterprise.v1.CreateKeyRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.recaptchaenterprise.v1.CreateKeyRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.recaptchaenterprise.v1.CreateKeyRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.recaptchaenterprise.v1.CreateKeyRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.recaptchaenterprise.v1.CreateKeyRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.recaptchaenterprise.v1.CreateKeyRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.recaptchaenterprise.v1.CreateKeyRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.recaptchaenterprise.v1.CreateKeyRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.recaptchaenterprise.v1.CreateKeyRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.recaptchaenterprise.v1.CreateKeyRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.recaptchaenterprise.v1.CreateKeyRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The create key request message. * </pre> * * Protobuf type {@code google.cloud.recaptchaenterprise.v1.CreateKeyRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.recaptchaenterprise.v1.CreateKeyRequest) com.google.recaptchaenterprise.v1.CreateKeyRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto .internal_static_google_cloud_recaptchaenterprise_v1_CreateKeyRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto .internal_static_google_cloud_recaptchaenterprise_v1_CreateKeyRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.recaptchaenterprise.v1.CreateKeyRequest.class, com.google.recaptchaenterprise.v1.CreateKeyRequest.Builder.class); } // Construct using com.google.recaptchaenterprise.v1.CreateKeyRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); parent_ = ""; if (keyBuilder_ == null) { key_ = null; } else { key_ = null; keyBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto .internal_static_google_cloud_recaptchaenterprise_v1_CreateKeyRequest_descriptor; } @java.lang.Override public com.google.recaptchaenterprise.v1.CreateKeyRequest getDefaultInstanceForType() { return com.google.recaptchaenterprise.v1.CreateKeyRequest.getDefaultInstance(); } @java.lang.Override public com.google.recaptchaenterprise.v1.CreateKeyRequest build() { com.google.recaptchaenterprise.v1.CreateKeyRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.recaptchaenterprise.v1.CreateKeyRequest buildPartial() { com.google.recaptchaenterprise.v1.CreateKeyRequest result = new com.google.recaptchaenterprise.v1.CreateKeyRequest(this); result.parent_ = parent_; if (keyBuilder_ == null) { result.key_ = key_; } else { result.key_ = keyBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.recaptchaenterprise.v1.CreateKeyRequest) { return mergeFrom((com.google.recaptchaenterprise.v1.CreateKeyRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.recaptchaenterprise.v1.CreateKeyRequest other) { if (other == com.google.recaptchaenterprise.v1.CreateKeyRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; onChanged(); } if (other.hasKey()) { mergeKey(other.getKey()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.recaptchaenterprise.v1.CreateKeyRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.recaptchaenterprise.v1.CreateKeyRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The name of the project in which the key will be created, in the * format "projects/{project}". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The name of the project in which the key will be created, in the * format "projects/{project}". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The name of the project in which the key will be created, in the * format "projects/{project}". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; onChanged(); return this; } /** * * * <pre> * Required. The name of the project in which the key will be created, in the * format "projects/{project}". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); onChanged(); return this; } /** * * * <pre> * Required. The name of the project in which the key will be created, in the * format "projects/{project}". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; onChanged(); return this; } private com.google.recaptchaenterprise.v1.Key key_; private com.google.protobuf.SingleFieldBuilderV3< com.google.recaptchaenterprise.v1.Key, com.google.recaptchaenterprise.v1.Key.Builder, com.google.recaptchaenterprise.v1.KeyOrBuilder> keyBuilder_; /** * * * <pre> * Required. Information to create a reCAPTCHA Enterprise key. * </pre> * * <code> * .google.cloud.recaptchaenterprise.v1.Key key = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the key field is set. */ public boolean hasKey() { return keyBuilder_ != null || key_ != null; } /** * * * <pre> * Required. Information to create a reCAPTCHA Enterprise key. * </pre> * * <code> * .google.cloud.recaptchaenterprise.v1.Key key = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The key. */ public com.google.recaptchaenterprise.v1.Key getKey() { if (keyBuilder_ == null) { return key_ == null ? com.google.recaptchaenterprise.v1.Key.getDefaultInstance() : key_; } else { return keyBuilder_.getMessage(); } } /** * * * <pre> * Required. Information to create a reCAPTCHA Enterprise key. * </pre> * * <code> * .google.cloud.recaptchaenterprise.v1.Key key = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setKey(com.google.recaptchaenterprise.v1.Key value) { if (keyBuilder_ == null) { if (value == null) { throw new NullPointerException(); } key_ = value; onChanged(); } else { keyBuilder_.setMessage(value); } return this; } /** * * * <pre> * Required. Information to create a reCAPTCHA Enterprise key. * </pre> * * <code> * .google.cloud.recaptchaenterprise.v1.Key key = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setKey(com.google.recaptchaenterprise.v1.Key.Builder builderForValue) { if (keyBuilder_ == null) { key_ = builderForValue.build(); onChanged(); } else { keyBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Required. Information to create a reCAPTCHA Enterprise key. * </pre> * * <code> * .google.cloud.recaptchaenterprise.v1.Key key = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeKey(com.google.recaptchaenterprise.v1.Key value) { if (keyBuilder_ == null) { if (key_ != null) { key_ = com.google.recaptchaenterprise.v1.Key.newBuilder(key_) .mergeFrom(value) .buildPartial(); } else { key_ = value; } onChanged(); } else { keyBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Required. Information to create a reCAPTCHA Enterprise key. * </pre> * * <code> * .google.cloud.recaptchaenterprise.v1.Key key = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearKey() { if (keyBuilder_ == null) { key_ = null; onChanged(); } else { key_ = null; keyBuilder_ = null; } return this; } /** * * * <pre> * Required. Information to create a reCAPTCHA Enterprise key. * </pre> * * <code> * .google.cloud.recaptchaenterprise.v1.Key key = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.recaptchaenterprise.v1.Key.Builder getKeyBuilder() { onChanged(); return getKeyFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Information to create a reCAPTCHA Enterprise key. * </pre> * * <code> * .google.cloud.recaptchaenterprise.v1.Key key = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.recaptchaenterprise.v1.KeyOrBuilder getKeyOrBuilder() { if (keyBuilder_ != null) { return keyBuilder_.getMessageOrBuilder(); } else { return key_ == null ? com.google.recaptchaenterprise.v1.Key.getDefaultInstance() : key_; } } /** * * * <pre> * Required. Information to create a reCAPTCHA Enterprise key. * </pre> * * <code> * .google.cloud.recaptchaenterprise.v1.Key key = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.recaptchaenterprise.v1.Key, com.google.recaptchaenterprise.v1.Key.Builder, com.google.recaptchaenterprise.v1.KeyOrBuilder> getKeyFieldBuilder() { if (keyBuilder_ == null) { keyBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.recaptchaenterprise.v1.Key, com.google.recaptchaenterprise.v1.Key.Builder, com.google.recaptchaenterprise.v1.KeyOrBuilder>( getKey(), getParentForChildren(), isClean()); key_ = null; } return keyBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.recaptchaenterprise.v1.CreateKeyRequest) } // @@protoc_insertion_point(class_scope:google.cloud.recaptchaenterprise.v1.CreateKeyRequest) private static final com.google.recaptchaenterprise.v1.CreateKeyRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.recaptchaenterprise.v1.CreateKeyRequest(); } public static com.google.recaptchaenterprise.v1.CreateKeyRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateKeyRequest> PARSER = new com.google.protobuf.AbstractParser<CreateKeyRequest>() { @java.lang.Override public CreateKeyRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new CreateKeyRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<CreateKeyRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateKeyRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.recaptchaenterprise.v1.CreateKeyRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.phonemetra.turbo.lockclock.weather; import android.app.AlarmManager; import android.app.PendingIntent; import android.app.Service; import android.content.Context; import android.content.Intent; import android.location.Criteria; import android.location.Location; import android.location.LocationListener; import android.location.LocationManager; import android.location.LocationProvider; import android.os.AsyncTask; import android.os.Bundle; import android.os.IBinder; import android.os.PowerManager; import android.os.PowerManager.WakeLock; import android.os.SystemClock; import android.text.TextUtils; import com.phonemetra.turbo.lockclock.ClockWidgetProvider; import com.phonemetra.turbo.lockclock.util.Preferences; import com.phonemetra.turbo.lockclock.util.WidgetUtils; //import com.google.android.gms.common.ConnectionResult; //import com.google.android.gms.common.GooglePlayServicesUtil; public class WeatherUpdateService extends Service { public static final String ACTION_FORCE_UPDATE = "com.phonemetra.turbo.lockclock.action.FORCE_WEATHER_UPDATE"; private static final String ACTION_CANCEL_LOCATION_UPDATE = "com.phonemetra.turbo.lockclock.action.CANCEL_LOCATION_UPDATE"; // Broadcast action for end of update public static final String ACTION_UPDATE_FINISHED = "com.phonemetra.turbo.lockclock.action.WEATHER_UPDATE_FINISHED"; public static final String EXTRA_UPDATE_CANCELLED = "update_cancelled"; private static final long LOCATION_REQUEST_TIMEOUT = 5L * 60L * 1000L; // request for at most 5 minutes private static final long OUTDATED_LOCATION_THRESHOLD_MILLIS = 10L * 60L * 1000L; // 10 minutes private static final float LOCATION_ACCURACY_THRESHOLD_METERS = 50000; private WeatherUpdateTask mTask; private static final Criteria sLocationCriteria; static { sLocationCriteria = new Criteria(); sLocationCriteria.setPowerRequirement(Criteria.POWER_LOW); sLocationCriteria.setAccuracy(Criteria.ACCURACY_COARSE); sLocationCriteria.setCostAllowed(false); } @Override public int onStartCommand(Intent intent, int flags, int startId) { boolean active = mTask != null && mTask.getStatus() != AsyncTask.Status.FINISHED; if (ACTION_CANCEL_LOCATION_UPDATE.equals(intent.getAction())) { WeatherLocationListener.cancel(this); if (!active) { stopSelf(); } return START_NOT_STICKY; } if (active) { return START_REDELIVER_INTENT; } boolean force = ACTION_FORCE_UPDATE.equals(intent.getAction()); if (!shouldUpdate(force)) { stopSelf(); sendCancelledBroadcast(); return START_NOT_STICKY; } mTask = new WeatherUpdateTask(); mTask.execute(); return START_REDELIVER_INTENT; } private void sendCancelledBroadcast() { Intent finishedIntent = new Intent(ACTION_UPDATE_FINISHED); finishedIntent.putExtra(EXTRA_UPDATE_CANCELLED, true); sendBroadcast(finishedIntent); } @Override public IBinder onBind(Intent intent) { return null; } @Override public void onDestroy() { if (mTask != null && mTask.getStatus() != AsyncTask.Status.FINISHED) { mTask.cancel(true); mTask = null; } } private boolean shouldUpdate(boolean force) { long interval = Preferences.weatherRefreshIntervalInMs(this); if (interval == 0 && !force) { return false; } if (force) { Preferences.setCachedWeatherInfo(this, 0, null); } long now = System.currentTimeMillis(); long lastUpdate = Preferences.lastWeatherUpdateTimestamp(this); long due = lastUpdate + interval; if (lastUpdate != 0 && now < due) { return false; } return WidgetUtils.isNetworkAvailable(this); } private class WeatherUpdateTask extends AsyncTask<Void, Void, WeatherInfo> { private WakeLock mWakeLock; private Context mContext; public WeatherUpdateTask() { PowerManager pm = (PowerManager) getSystemService(POWER_SERVICE); mWakeLock = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, ""); mWakeLock.setReferenceCounted(false); mContext = WeatherUpdateService.this; } @Override protected void onPreExecute() { mWakeLock.acquire(); } private Location getCurrentLocation() { LocationManager lm = (LocationManager) getSystemService(Context.LOCATION_SERVICE); Location location = lm.getLastKnownLocation(LocationManager.PASSIVE_PROVIDER); if (location != null && location.getAccuracy() > LOCATION_ACCURACY_THRESHOLD_METERS) { location = null; } // If lastKnownLocation is not present (because none of the apps in the // device has requested the current location to the system yet) or outdated, // then try to get the current location use the provider that best matches the criteria. boolean needsUpdate = location == null; if (location != null) { long delta = System.currentTimeMillis() - location.getTime(); needsUpdate = delta > OUTDATED_LOCATION_THRESHOLD_MILLIS; } if (needsUpdate) { //Getting best location provider String locationProvider = lm.getBestProvider(sLocationCriteria, true); if (TextUtils.isEmpty(locationProvider)) { //No available location providers matching criteria } else if (isGooglePlayServicesAvailable() && locationProvider.equals(LocationManager.GPS_PROVIDER)) { // Since Google Play services is available, // let's conserve battery power and not depend on the device's GPS. // Google Play Services available; Ignoring GPS provider. } else { WeatherLocationListener.registerIfNeeded(mContext, locationProvider); } } return location; } private boolean isGooglePlayServicesAvailable() { //int result = GooglePlayServicesUtil.isGooglePlayServicesAvailable(mContext); //return result == ConnectionResult.SUCCESS // || result == ConnectionResult.SERVICE_VERSION_UPDATE_REQUIRED; return false; } @Override protected WeatherInfo doInBackground(Void... params) { WeatherProvider provider = Preferences.weatherProvider(mContext); boolean metric = Preferences.useMetricUnits(mContext); String customLocationId = null, customLocationName = null; if (Preferences.useCustomWeatherLocation(mContext)) { customLocationId = Preferences.customWeatherLocationId(mContext); customLocationName = Preferences.customWeatherLocationCity(mContext); } if (customLocationId != null) { return provider.getWeatherInfo(customLocationId, customLocationName, metric); } Location location = getCurrentLocation(); if (location != null) { WeatherInfo info = provider.getWeatherInfo(location, metric); if (info != null) { return info; } } // work with cached location from last request for now // a listener to update it is already scheduled if possible WeatherInfo cachedInfo = Preferences.getCachedWeatherInfo(mContext); if (cachedInfo != null) { return provider.getWeatherInfo(cachedInfo.getId(), cachedInfo.getCity(), metric); } return null; } @Override protected void onPostExecute(WeatherInfo result) { finish(result); } @Override protected void onCancelled() { finish(null); } private void finish(WeatherInfo result) { if (result != null) { long now = System.currentTimeMillis(); Preferences.setCachedWeatherInfo(mContext, now, result); scheduleUpdate(mContext, Preferences.weatherRefreshIntervalInMs(mContext), false); Intent updateIntent = new Intent(mContext, ClockWidgetProvider.class); sendBroadcast(updateIntent); } else if (isCancelled()) { // cancelled, likely due to lost network - we'll get restarted // when network comes back } else { // failure, schedule next download in 30 minutes long interval = 30 * 60 * 1000; scheduleUpdate(mContext, interval, false); } WeatherContentProvider.updateCachedWeatherInfo(mContext, result); Intent finishedIntent = new Intent(ACTION_UPDATE_FINISHED); finishedIntent.putExtra(EXTRA_UPDATE_CANCELLED, result == null); sendBroadcast(finishedIntent); mWakeLock.release(); stopSelf(); } } private static class WeatherLocationListener implements LocationListener { private Context mContext; private PendingIntent mTimeoutIntent; private static WeatherLocationListener sInstance = null; static void registerIfNeeded(Context context, String provider) { synchronized (WeatherLocationListener.class) { if (sInstance == null) { final Context appContext = context.getApplicationContext(); final LocationManager locationManager = (LocationManager) appContext.getSystemService(Context.LOCATION_SERVICE); // Check location provider after set sInstance, so, if the provider is not // supported, we never enter here again. sInstance = new WeatherLocationListener(appContext); // Check whether the provider is supported. // NOTE!!! Actually only WeatherUpdateService class is calling this function // with the NETWORK_PROVIDER, so setting the instance is safe. We must // change this if this call receive different providers LocationProvider lp = locationManager.getProvider(provider); if (lp != null) { locationManager.requestSingleUpdate(provider, sInstance, appContext.getMainLooper()); sInstance.setTimeoutAlarm(); } } } } static void cancel(Context context) { synchronized (WeatherLocationListener.class) { if (sInstance != null) { final Context appContext = context.getApplicationContext(); final LocationManager locationManager = (LocationManager) appContext.getSystemService(Context.LOCATION_SERVICE); locationManager.removeUpdates(sInstance); sInstance.cancelTimeoutAlarm(); sInstance = null; } } } private WeatherLocationListener(Context context) { super(); mContext = context; } private void setTimeoutAlarm() { Intent intent = new Intent(mContext, WeatherUpdateService.class); intent.setAction(ACTION_CANCEL_LOCATION_UPDATE); mTimeoutIntent = PendingIntent.getService(mContext, 0, intent, PendingIntent.FLAG_ONE_SHOT | PendingIntent.FLAG_CANCEL_CURRENT); AlarmManager am = (AlarmManager) mContext.getSystemService(ALARM_SERVICE); long elapseTime = SystemClock.elapsedRealtime() + LOCATION_REQUEST_TIMEOUT; am.set(AlarmManager.ELAPSED_REALTIME_WAKEUP, elapseTime, mTimeoutIntent); } private void cancelTimeoutAlarm() { if (mTimeoutIntent != null) { AlarmManager am = (AlarmManager) mContext.getSystemService(ALARM_SERVICE); am.cancel(mTimeoutIntent); mTimeoutIntent = null; } } @Override public void onLocationChanged(Location location) { synchronized (WeatherLocationListener.class) { WeatherUpdateService.scheduleUpdate(mContext, 0, true); cancelTimeoutAlarm(); sInstance = null; } } @Override public void onStatusChanged(String provider, int status, Bundle extras) { if (status == LocationProvider.AVAILABLE) { synchronized (WeatherLocationListener.class) { WeatherUpdateService.scheduleUpdate(mContext, 0, true); cancelTimeoutAlarm(); sInstance = null; } } } @Override public void onProviderEnabled(String provider) { // Not used } @Override public void onProviderDisabled(String provider) { // Not used } } private static void scheduleUpdate(Context context, long timeFromNow, boolean force) { AlarmManager am = (AlarmManager) context.getSystemService(Context.ALARM_SERVICE); long due = System.currentTimeMillis() + timeFromNow; am.set(AlarmManager.RTC_WAKEUP, due, getUpdateIntent(context, force)); } public static void scheduleNextUpdate(Context context, boolean force) { long lastUpdate = Preferences.lastWeatherUpdateTimestamp(context); if (lastUpdate == 0 || force) { scheduleUpdate(context, 0, true); } else { long interval = Preferences.weatherRefreshIntervalInMs(context); scheduleUpdate(context, lastUpdate + interval - System.currentTimeMillis(), false); } } public static PendingIntent getUpdateIntent(Context context, boolean force) { Intent i = new Intent(context, WeatherUpdateService.class); if (force) { i.setAction(ACTION_FORCE_UPDATE); } return PendingIntent.getService(context, 0, i, PendingIntent.FLAG_UPDATE_CURRENT); } public static void cancelUpdates(Context context) { AlarmManager am = (AlarmManager) context.getSystemService(Context.ALARM_SERVICE); am.cancel(getUpdateIntent(context, true)); am.cancel(getUpdateIntent(context, false)); WeatherLocationListener.cancel(context); } }