gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/** *+ * ATimeTest.java * 1.0.0 Oct 1, 2014 Leo Hinterlang *- */ package com.fidelis.valface; import static org.junit.Assert.*; import java.text.DateFormat; import java.util.Calendar; import java.util.Date; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; /** * ATimeTest * * @version 1.0.0 * @author Leo Hinterlang * */ public class ATimeTest { @Rule public TestName testName = new TestName(); private boolean passed; /** * @throws java.lang.Exception */ @Before public void setUp () throws Exception { String test = testName.getMethodName(); System.out.printf("Testing: " + test); for (int n = test.length(); n < 30; n++) { System.out.print('.'); } passed = false; } /** * @throws java.lang.Exception */ @After public void tearDown () throws Exception { System.out.println(passed ? " passed" : " failed ***"); } /** * Test method for {@link com.fidelis.valface.ATime#getHour()}. */ @Test public void testGetHour () { ATime time = ATime.of(12, 34); assertEquals(12, time.getHour()); time = ATime.of(0, 44); assertEquals(0, time.getHour()); time = ATime.of(23, 18); assertEquals(23, time.getHour()); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#getMinute()}. */ @Test public void testGetMinute () { ATime time = ATime.of(12, 34); assertEquals(34, time.getMinute()); time = ATime.of(0, 44); assertEquals(44, time.getMinute()); time = ATime.of(23, 0); assertEquals(0, time.getMinute()); time = ATime.of(14, 59); assertEquals(59, time.getMinute()); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#getSecond()}. */ @Test public void testGetSecond () { ATime time = ATime.of(12, 34, 56); assertEquals(56, time.getSecond()); time = ATime.of(0, 0, 5); assertEquals(5, time.getSecond()); time = ATime.of(1, 2, 0); assertEquals(0, time.getSecond()); time = ATime.of(17, 30); assertEquals(0, time.getSecond()); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#getNano()}. */ @Test public void testGetNano () { ATime time = ATime.of(12, 34, 56, 7890); assertEquals(7890, time.getNano()); time = ATime.of(0, 44, 0, 212313414); assertEquals(212313414, time.getNano()); time = ATime.of(1, 2, 3); assertEquals(0, time.getNano()); time = ATime.of(7, 23, 14, 0); assertEquals(0, time.getNano()); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#of(int, int, int, int)}. */ @Test public void testOfIntIntIntInt () { ATime time = ATime.of(12, 34, 56, 7890); assertEquals(12, time.getHour()); assertEquals(34, time.getMinute()); assertEquals(56, time.getSecond()); assertEquals(7890, time.getNano()); time = ATime.of(23, 59, 59, 999999999); assertEquals(23, time.getHour()); assertEquals(59, time.getMinute()); assertEquals(59, time.getSecond()); assertEquals(999999999, time.getNano()); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#of(int, int, int)}. */ @Test public void testOfIntIntInt () { ATime time = ATime.of(12, 34, 56); assertEquals(12, time.getHour()); assertEquals(34, time.getMinute()); assertEquals(56, time.getSecond()); assertEquals(0, time.getNano()); time = ATime.of(23, 59, 59); assertEquals(23, time.getHour()); assertEquals(59, time.getMinute()); assertEquals(59, time.getSecond()); assertEquals(0, time.getNano()); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#of(int, int)}. */ @Test public void testOfIntInt () { ATime time = ATime.of(12, 34); assertEquals(12, time.getHour()); assertEquals(34, time.getMinute()); assertEquals(0, time.getSecond()); assertEquals(0, time.getNano()); time = ATime.of(23, 59); assertEquals(23, time.getHour()); assertEquals(59, time.getMinute()); assertEquals(0, time.getSecond()); assertEquals(0, time.getNano()); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#ofNanoOfDay(long)}. */ @Test public void testOfNanoOfDay () { long nanos = (14 * 3600 + 37 * 60 + 49) * 1000000000L + 987654321L; ATime time = ATime.ofNanoOfDay(nanos); assertEquals(14, time.getHour()); assertEquals(37, time.getMinute()); assertEquals(49, time.getSecond()); assertEquals(987654321, time.getNano()); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#ofSecondOfDay(long)}. */ @Test public void testOfSecondOfDay () { ATime time = ATime.ofSecondOfDay(14 * 3600 + 37 * 60 + 49); assertEquals(14, time.getHour()); assertEquals(37, time.getMinute()); assertEquals(49, time.getSecond()); assertEquals(0, time.getNano()); time = ATime.ofSecondOfDay(23 * 3600 + 59 * 60 + 59); assertEquals(23, time.getHour()); assertEquals(59, time.getMinute()); assertEquals(59, time.getSecond()); assertEquals(0, time.getNano()); try { time = ATime.ofSecondOfDay(24 * 3600 + 60 * 60 + 60); fail("Exception expected"); } catch (IllegalArgumentException ex) { // expected } try { time = ATime.ofSecondOfDay(-123); fail("Exception expected"); } catch (IllegalArgumentException ex) { // expected } passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#compareTo(ATime)}. */ @Test public void testCompareTo () { ATime time1 = ATime.of(12, 34, 56); ATime time2 = ATime.of(12, 34, 55); assertTrue(time1.compareTo(time2) > 0); assertTrue(time2.compareTo(time1) < 0); time2 = ATime.of(12, 34, 56); assertTrue(time1.compareTo(time2) == 0); time1 = ATime.of(12, 34, 56, 100100100); time2 = ATime.of(12, 34, 56, 200200200); assertTrue(time1.compareTo(time2) < 0); assertTrue(time2.compareTo(time1) > 0); time2 = ATime.of(12, 34, 56, 100100100); assertTrue(time1.compareTo(time2) == 0); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#equals(ATime)}. */ @Test public void testEquals () { ATime time1 = ATime.of(23, 59, 59); ATime time2 = ATime.of(23, 59, 59); assertTrue(time1.equals(time2)); time2 = ATime.of(11, 12, 13); assertFalse(time1.equals(time2)); time1 = ATime.of(10, 11, 12, 123456789); time2 = ATime.of(10, 11, 12, 123456789); assertTrue(time1.equals(time2)); time2 = ATime.of(10, 11, 12, 987654321); assertFalse(time1.equals(time2)); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#isBefore(ATime)}. */ @Test public void testIsBefore () { ATime time1 = ATime.of(12, 34, 56, 7890); ATime time2 = ATime.of(12, 34, 56, 7891); assertTrue(time1.isBefore(time2)); assertFalse(time2.isBefore(time1)); time2 = ATime.of(12, 34, 56, 7890); assertFalse(time1.isBefore(time2)); time1 = ATime.of(23, 59, 59, 100100100); time2 = ATime.of(10, 11, 13, 100100100); assertFalse(time1.isBefore(time2)); assertTrue(time2.isBefore(time1)); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#isAfter(ATime)}. */ @Test public void testIsAfter () { ATime time1 = ATime.of(20, 10, 30); ATime time2 = ATime.of(20, 10, 15); assertTrue(time1.isAfter(time2)); assertFalse(time2.isAfter(time1)); time2 = ATime.of(20, 10, 30); assertFalse(time1.isAfter(time2)); assertFalse(time2.isAfter(time1)); time1 = ATime.of(5, 13, 40, 999); time2 = ATime.of(5, 13, 40, 1000); assertFalse(time1.isAfter(time2)); assertTrue(time2.isAfter(time1)); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#withHour(int)}. */ @Test public void testWithHour () { ATime time = ATime.of(12, 34, 56, 7890); ATime time2 = time.withHour(20); assertEquals(20, time2.getHour()); assertEquals(34, time2.getMinute()); assertEquals(56, time2.getSecond()); assertEquals(7890, time2.getNano()); time2 = time.withHour(0); assertEquals(0, time2.getHour()); try { time2 = time.withHour(99); fail("Exception expected"); } catch (IllegalArgumentException ex) { // expected } passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#withMinute(int)}. */ @Test public void testWithMinute () { ATime time = ATime.of(12, 34, 56, 7890); ATime time2 = time.withMinute(55); assertEquals(12, time2.getHour()); assertEquals(55, time2.getMinute()); assertEquals(56, time2.getSecond()); assertEquals(7890, time2.getNano()); time2 = time.withMinute(0); assertEquals(0, time2.getMinute()); try { time2 = time.withMinute(99); fail("Exception expected"); } catch (IllegalArgumentException ex) { // expected } passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#withSecond(int)}. */ @Test public void testWithSecond () { ATime time = ATime.of(12, 34, 56, 7890); ATime time2 = time.withSecond(16); assertEquals(12, time2.getHour()); assertEquals(34, time2.getMinute()); assertEquals(16, time2.getSecond()); assertEquals(7890, time2.getNano()); time2 = time.withSecond(0); assertEquals(0, time2.getSecond()); try { time2 = time.withSecond(99); fail("Exception expected"); } catch (IllegalArgumentException ex) { // expected } passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#withNano(int)}. */ @Test public void testWithNano () { ATime time = ATime.of(12, 34, 56, 7890); ATime time2 = time.withNano(9870); assertEquals(12, time2.getHour()); assertEquals(34, time2.getMinute()); assertEquals(56, time2.getSecond()); assertEquals(9870, time2.getNano()); time2 = time.withNano(0); assertEquals(0, time2.getNano()); try { time2 = time.withNano(1000000000); fail("Exception expected"); } catch (IllegalArgumentException ex) { // expected } passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#plusHours(long)}. */ @Test public void testPlusHours () { ATime time = ATime.of(12, 34, 56, 7890); ATime time2 = time.plusHours(4); assertEquals(16, time2.getHour()); time2 = time.plusHours(8); assertEquals(20, time2.getHour()); time2 = time.plusHours(12); assertEquals(0, time2.getHour()); time2 = time.plusHours(16); assertEquals(4, time2.getHour()); time2 = time.plusHours(20); assertEquals(8, time2.getHour()); time2 = time.plusHours(24); assertEquals(12, time2.getHour()); assertEquals(34, time2.getMinute()); assertEquals(56, time2.getSecond()); assertEquals(7890, time2.getNano()); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#plusMinutes(long)}. */ @Test public void testPlusMinutes () { ATime time = ATime.of(12, 34, 56, 7890); ATime time2 = time.plusMinutes(10); assertEquals(44, time2.getMinute()); time2 = time.plusMinutes(20); assertEquals(54, time2.getMinute()); time2 = time.plusMinutes(30); assertEquals(13, time2.getHour()); assertEquals(04, time2.getMinute()); time = time.plusHours(11); assertEquals(23, time.getHour()); time2 = time.plusMinutes(30); assertEquals(0, time2.getHour()); assertEquals(4, time2.getMinute()); time2 = time.plusMinutes(60); assertEquals(0, time2.getHour()); assertEquals(34, time2.getMinute()); assertEquals(56, time2.getSecond()); assertEquals(7890, time2.getNano()); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#plusSeconds(long)}. */ @Test public void testPlusSeconds () { ATime time = ATime.of(12, 34, 56, 7890); ATime time2 = time.plusSeconds(3); assertEquals(59, time2.getSecond()); time2 = time.plusSeconds(4); assertEquals(0, time2.getSecond()); assertEquals(35, time2.getMinute()); time = ATime.of(23, 59, 56, 123456789); time2 = time.plusSeconds(4); assertEquals(0, time2.getHour()); assertEquals(0, time2.getMinute()); assertEquals(0, time2.getSecond()); assertEquals(123456789, time2.getNano()); time2 = time.plusSeconds(94); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#plusNanos(long)}. */ @Test public void testPlusNanos () { ATime time = ATime.of(12, 34, 56, 0); ATime time2 = time.plusNanos(999999999); assertEquals(12, time2.getHour()); assertEquals(34, time2.getMinute()); assertEquals(56, time2.getSecond()); assertEquals(999999999, time2.getNano()); time = ATime.of(23, 59, 59, 999999999); time2 = time.plusNanos(1); assertEquals(0, time2.getHour()); assertEquals(0, time2.getMinute()); assertEquals(0, time2.getSecond()); assertEquals(0, time2.getNano()); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#minusHours(long)}. */ @Test public void testMinusHours () { ATime time = ATime.of(12, 34, 56, 7890); ATime time2 = time.minusHours(4); assertEquals(8, time2.getHour()); assertEquals(34, time2.getMinute()); assertEquals(56, time2.getSecond()); assertEquals(7890, time2.getNano()); time2 = time.minusHours(20); assertEquals(16, time2.getHour()); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#minusMinutes(long)}. */ @Test public void testMinusMinutes () { ATime time = ATime.of(12, 34, 56, 7890); ATime time2 = time.minusMinutes(30); assertEquals(12, time2.getHour()); assertEquals(4, time2.getMinute()); assertEquals(56, time2.getSecond()); assertEquals(7890, time2.getNano()); time2 = time.minusMinutes(34); assertEquals(12, time2.getHour()); assertEquals(0, time2.getMinute()); time2 = time.minusMinutes(35); assertEquals(11, time2.getHour()); assertEquals(59, time2.getMinute()); time2 = time.minusMinutes(12 * 60); assertEquals(0, time2.getHour()); assertEquals(34, time2.getMinute()); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#minusSeconds(long)}. */ @Test public void testMinusSeconds () { ATime time = ATime.of(12, 34, 56, 7890); ATime time2 = time.minusSeconds(30); assertEquals(12, time2.getHour()); assertEquals(34, time2.getMinute()); assertEquals(26, time2.getSecond()); assertEquals(7890, time2.getNano()); time2 = time.minusSeconds(56); assertEquals(34, time2.getMinute()); assertEquals(0, time2.getSecond()); time2 = time.minusSeconds(57); assertEquals(33, time2.getMinute()); assertEquals(59, time2.getSecond()); time2 = time.minusSeconds(34 * 60); assertEquals(0, time2.getMinute()); assertEquals(56, time2.getSecond()); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#minusNanos(long)}. */ @Test public void testMinusNanos () { ATime time = ATime.of(12, 34, 56, 7890); ATime time2 = time.minusNanos(7000); assertEquals(12, time2.getHour()); assertEquals(34, time2.getMinute()); assertEquals(56, time2.getSecond()); assertEquals(890, time2.getNano()); time2 = time.minusNanos(7890); assertEquals(56, time2.getSecond()); assertEquals(0, time2.getNano()); time2 = time.minusNanos(7891); assertEquals(55, time2.getSecond()); assertEquals(999999999, time2.getNano()); time2 = time.minusNanos((long) 56 * 1000000000); assertEquals(0, time2.getSecond()); assertEquals(7890, time2.getNano()); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#toCalendar()}. */ @Test public void testToCalendar () { ATime time = ATime.of(12, 34, 56, 789000000); Calendar cal = time.toCalendar(); assertEquals(12, cal.get(Calendar.HOUR_OF_DAY)); assertEquals(34, cal.get(Calendar.MINUTE)); assertEquals(56, cal.get(Calendar.SECOND)); assertEquals(789, cal.get(Calendar.MILLISECOND)); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#toDate()}. */ @Test public void testToDate () { ATime time = ATime.of(22, 11, 44, 999999999); Date date = time.toDate(); Calendar cal = Calendar.getInstance(); cal.setTime(date); assertEquals(22, cal.get(Calendar.HOUR_OF_DAY)); assertEquals(11, cal.get(Calendar.MINUTE)); assertEquals(44, cal.get(Calendar.SECOND)); assertEquals(999, cal.get(Calendar.MILLISECOND)); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#toSecondOfDay()}. */ @Test public void testToSecondOfDay () { ATime time = ATime.of(12, 34, 56); int secondOfDay = time.toSecondOfDay(); ATime time2 = ATime.ofSecondOfDay(secondOfDay); assertEquals(12, time2.getHour()); assertEquals(34, time2.getMinute()); assertEquals(56, time2.getSecond()); assertEquals(0, time2.getNano()); time = ATime.of(23, 59, 59, 999999999); secondOfDay = time.toSecondOfDay(); time2 = ATime.ofSecondOfDay(secondOfDay); assertEquals(23, time2.getHour()); assertEquals(59, time2.getMinute()); assertEquals(59, time2.getSecond()); assertEquals(0, time2.getNano()); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#toNanoOfDay()}. */ @Test public void testToNanoOfDay () { ATime time = ATime.of(12, 34, 56, 7890); long nanoOfDay = time.toNanoOfDay(); ATime time2 = ATime.ofNanoOfDay(nanoOfDay); assertEquals(12, time2.getHour()); assertEquals(34, time2.getMinute()); assertEquals(56, time2.getSecond()); assertEquals(7890, time2.getNano()); time = ATime.of(23, 59, 59, 999999999); nanoOfDay = time.toNanoOfDay(); time2 = ATime.ofNanoOfDay(nanoOfDay); assertEquals(23, time2.getHour()); assertEquals(59, time2.getMinute()); assertEquals(59, time2.getSecond()); assertEquals(999999999, time2.getNano()); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#from(Date)}. */ @Test public void testFromDate () { Date date = new Date(); ATime time = ATime.from(date); Calendar cal = Calendar.getInstance(); cal.setTime(date); assertEquals(cal.get(Calendar.HOUR_OF_DAY), time.getHour()); assertEquals(cal.get(Calendar.MINUTE), time.getMinute()); assertEquals(cal.get(Calendar.SECOND), time.getSecond()); assertEquals(cal.get(Calendar.MILLISECOND), time.getNano() / 1000000); cal.set(Calendar.HOUR_OF_DAY, 12); cal.set(Calendar.MINUTE, 34); cal.set(Calendar.SECOND, 56); cal.set(Calendar.MILLISECOND, 789); date = cal.getTime(); time = ATime.from(date); assertEquals(12, time.getHour()); assertEquals(34, time.getMinute()); assertEquals(56, time.getSecond()); assertEquals(789000000, time.getNano()); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#from(Calendar)}. */ @Test public void testFromCalendar () { Calendar cal = Calendar.getInstance(); ATime time = ATime.from(cal); assertEquals(cal.get(Calendar.HOUR_OF_DAY), time.getHour()); assertEquals(cal.get(Calendar.MINUTE), time.getMinute()); assertEquals(cal.get(Calendar.SECOND), time.getSecond()); assertEquals(cal.get(Calendar.MILLISECOND), time.getNano() / 1000000); cal.set(Calendar.HOUR_OF_DAY, 12); cal.set(Calendar.MINUTE, 34); cal.set(Calendar.SECOND, 56); cal.set(Calendar.MILLISECOND, 789); time = ATime.from(cal); assertEquals(12, time.getHour()); assertEquals(34, time.getMinute()); assertEquals(56, time.getSecond()); assertEquals(789000000, time.getNano()); passed = true; } /** * Test method for {@link com.fidelis.valface.ATime#parse(String)}. */ @Test public void testParse () { doParse("09", 9, 0, 0, 0); doParse("0910", 9, 10, 0, 0); doParse("091011", 9, 10, 11, 0); doParse("091011.123456789", 9, 10, 11, 123456789); doParse("23:24", 23, 24, 0, 0); doParse("23:24:25", 23, 24, 25, 0); doParse("23:59:59.999999999", 23, 59, 59, 999999999); doParse("24:59:59.999999999", -1, 0, 0, 0); doParse("23:60:59.999999999", -1, 0, 0, 0); doParse("23:59:60.999999999", -1, 0, 0, 0); doParse("23:59:59.9999999991", -1, 0, 0, 0); doParse("18:", -1, 0, 0, 0); passed = true; } private void doParse (String dateString, int h, int m, int s, int n) { try { ATime time = ATime.parse(dateString); if (h < 0) { fail("Exception expected"); } assertEquals(h, time.getHour()); assertEquals(m, time.getMinute()); assertEquals(s, time.getSecond()); assertEquals(n, time.getNano()); } catch (IllegalArgumentException ex) { if (h >= 0) { fail("Exception not expected"); } } } /** * Test method for {@link com.fidelis.valface.ATime#format(String)}. */ @Test public void testFormat () { ATime time = ATime.of(20, 4, 6, 123456789); String f = time.format("hh:mm:ss nano"); assertEquals("08:04:06 123456789", f); f = time.format("<Hour: >Hour <Minute: >Minute <Second: >Second <Nano: >Nano"); assertEquals("Hour: 20 Minute: 4 Second: 6 Nano: 123456789", f); String fmt = "Hour hour HH hh - Minute minute mm - Second second ss - milli micro nano AMPM"; f = time.format(fmt); assertEquals("20 8 20 08 - 4 04 04 - 6 06 06 - 123 123456 123456789 PM", f); f = time.format("hh : mm : ss nn1,nn2,nn3 ampm"); assertEquals("08 : 04 : 06 123,456,789 pm", f); passed = true; } }
// Generated from LittleJQ.g4 by ANTLR 4.5.1 /** * Copyright 2015 Bernd Vogt and others. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sourcepit.antlr4.eclipse.lang.tests.littlej.query; import java.util.List; import org.antlr.v4.runtime.Parser; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.RecognitionException; import org.antlr.v4.runtime.RuntimeMetaData; import org.antlr.v4.runtime.TokenStream; import org.antlr.v4.runtime.Vocabulary; import org.antlr.v4.runtime.VocabularyImpl; import org.antlr.v4.runtime.atn.ATN; import org.antlr.v4.runtime.atn.ATNDeserializer; import org.antlr.v4.runtime.atn.ParserATNSimulator; import org.antlr.v4.runtime.atn.PredictionContextCache; import org.antlr.v4.runtime.dfa.DFA; import org.antlr.v4.runtime.tree.ParseTreeListener; import org.antlr.v4.runtime.tree.ParseTreeVisitor; import org.antlr.v4.runtime.tree.TerminalNode; @SuppressWarnings({ "all", "warnings", "unchecked", "unused", "cast" }) public class LittleJQParser extends Parser { static { RuntimeMetaData.checkVersion("4.5.1", RuntimeMetaData.VERSION); } protected static final DFA[] _decisionToDFA; protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int T__0 = 1, T__1 = 2, T__2 = 3, T__3 = 4, T__4 = 5, StringLiteral = 6, ID = 7, WS = 8; public static final int RULE_query = 0, RULE_querySegment = 1, RULE_segmentName = 2, RULE_segmentQuery = 3, RULE_segmentQueryType = 4, RULE_segmentQueryExpression = 5; public static final String[] ruleNames = { "query", "querySegment", "segmentName", "segmentQuery", "segmentQueryType", "segmentQueryExpression" }; private static final String[] _LITERAL_NAMES = { null, "'/'", "'['", "'@'", "'='", "']'" }; private static final String[] _SYMBOLIC_NAMES = { null, null, null, null, null, null, "StringLiteral", "ID", "WS" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); /** * @deprecated Use {@link #VOCABULARY} instead. */ @Deprecated public static final String[] tokenNames; static { tokenNames = new String[_SYMBOLIC_NAMES.length]; for (int i = 0; i < tokenNames.length; i++) { tokenNames[i] = VOCABULARY.getLiteralName(i); if (tokenNames[i] == null) { tokenNames[i] = VOCABULARY.getSymbolicName(i); } if (tokenNames[i] == null) { tokenNames[i] = "<INVALID>"; } } } @Override @Deprecated public String[] getTokenNames() { return tokenNames; } @Override public Vocabulary getVocabulary() { return VOCABULARY; } @Override public String getGrammarFileName() { return "LittleJQ.g4"; } @Override public String[] getRuleNames() { return ruleNames; } @Override public String getSerializedATN() { return _serializedATN; } @Override public ATN getATN() { return _ATN; } public LittleJQParser(TokenStream input) { super(input); _interp = new ParserATNSimulator(this, _ATN, _decisionToDFA, _sharedContextCache); } public static class QueryContext extends ParserRuleContext { public TerminalNode EOF() { return getToken(LittleJQParser.EOF, 0); } public List<QuerySegmentContext> querySegment() { return getRuleContexts(QuerySegmentContext.class); } public QuerySegmentContext querySegment(int i) { return getRuleContext(QuerySegmentContext.class, i); } public QueryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_query; } @Override public void enterRule(ParseTreeListener listener) { if (listener instanceof LittleJQListener) ((LittleJQListener) listener).enterQuery(this); } @Override public void exitRule(ParseTreeListener listener) { if (listener instanceof LittleJQListener) ((LittleJQListener) listener).exitQuery(this); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if (visitor instanceof LittleJQVisitor) return ((LittleJQVisitor<? extends T>) visitor).visitQuery(this); else return visitor.visitChildren(this); } } public final QueryContext query() throws RecognitionException { QueryContext _localctx = new QueryContext(_ctx, getState()); enterRule(_localctx, 0, RULE_query); int _la; try { enterOuterAlt(_localctx, 1); { setState(15); _errHandler.sync(this); _la = _input.LA(1); while (_la == T__0) { { { setState(12); querySegment(); } } setState(17); _errHandler.sync(this); _la = _input.LA(1); } setState(18); match(EOF); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class QuerySegmentContext extends ParserRuleContext { public SegmentNameContext segmentName() { return getRuleContext(SegmentNameContext.class, 0); } public SegmentQueryContext segmentQuery() { return getRuleContext(SegmentQueryContext.class, 0); } public QuerySegmentContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_querySegment; } @Override public void enterRule(ParseTreeListener listener) { if (listener instanceof LittleJQListener) ((LittleJQListener) listener).enterQuerySegment(this); } @Override public void exitRule(ParseTreeListener listener) { if (listener instanceof LittleJQListener) ((LittleJQListener) listener).exitQuerySegment(this); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if (visitor instanceof LittleJQVisitor) return ((LittleJQVisitor<? extends T>) visitor).visitQuerySegment(this); else return visitor.visitChildren(this); } } public final QuerySegmentContext querySegment() throws RecognitionException { QuerySegmentContext _localctx = new QuerySegmentContext(_ctx, getState()); enterRule(_localctx, 2, RULE_querySegment); int _la; try { enterOuterAlt(_localctx, 1); { setState(20); match(T__0); setState(21); segmentName(); setState(23); _la = _input.LA(1); if (_la == T__1) { { setState(22); segmentQuery(); } } } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class SegmentNameContext extends ParserRuleContext { public TerminalNode ID() { return getToken(LittleJQParser.ID, 0); } public SegmentNameContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_segmentName; } @Override public void enterRule(ParseTreeListener listener) { if (listener instanceof LittleJQListener) ((LittleJQListener) listener).enterSegmentName(this); } @Override public void exitRule(ParseTreeListener listener) { if (listener instanceof LittleJQListener) ((LittleJQListener) listener).exitSegmentName(this); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if (visitor instanceof LittleJQVisitor) return ((LittleJQVisitor<? extends T>) visitor).visitSegmentName(this); else return visitor.visitChildren(this); } } public final SegmentNameContext segmentName() throws RecognitionException { SegmentNameContext _localctx = new SegmentNameContext(_ctx, getState()); enterRule(_localctx, 4, RULE_segmentName); try { enterOuterAlt(_localctx, 1); { setState(25); match(ID); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class SegmentQueryContext extends ParserRuleContext { public SegmentQueryTypeContext segmentQueryType() { return getRuleContext(SegmentQueryTypeContext.class, 0); } public SegmentQueryExpressionContext segmentQueryExpression() { return getRuleContext(SegmentQueryExpressionContext.class, 0); } public SegmentQueryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_segmentQuery; } @Override public void enterRule(ParseTreeListener listener) { if (listener instanceof LittleJQListener) ((LittleJQListener) listener).enterSegmentQuery(this); } @Override public void exitRule(ParseTreeListener listener) { if (listener instanceof LittleJQListener) ((LittleJQListener) listener).exitSegmentQuery(this); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if (visitor instanceof LittleJQVisitor) return ((LittleJQVisitor<? extends T>) visitor).visitSegmentQuery(this); else return visitor.visitChildren(this); } } public final SegmentQueryContext segmentQuery() throws RecognitionException { SegmentQueryContext _localctx = new SegmentQueryContext(_ctx, getState()); enterRule(_localctx, 6, RULE_segmentQuery); try { enterOuterAlt(_localctx, 1); { setState(27); match(T__1); setState(28); match(T__2); setState(29); segmentQueryType(); setState(30); match(T__3); setState(31); segmentQueryExpression(); setState(32); match(T__4); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class SegmentQueryTypeContext extends ParserRuleContext { public TerminalNode ID() { return getToken(LittleJQParser.ID, 0); } public SegmentQueryTypeContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_segmentQueryType; } @Override public void enterRule(ParseTreeListener listener) { if (listener instanceof LittleJQListener) ((LittleJQListener) listener).enterSegmentQueryType(this); } @Override public void exitRule(ParseTreeListener listener) { if (listener instanceof LittleJQListener) ((LittleJQListener) listener).exitSegmentQueryType(this); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if (visitor instanceof LittleJQVisitor) return ((LittleJQVisitor<? extends T>) visitor).visitSegmentQueryType(this); else return visitor.visitChildren(this); } } public final SegmentQueryTypeContext segmentQueryType() throws RecognitionException { SegmentQueryTypeContext _localctx = new SegmentQueryTypeContext(_ctx, getState()); enterRule(_localctx, 8, RULE_segmentQueryType); try { enterOuterAlt(_localctx, 1); { setState(34); match(ID); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class SegmentQueryExpressionContext extends ParserRuleContext { public TerminalNode StringLiteral() { return getToken(LittleJQParser.StringLiteral, 0); } public SegmentQueryExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_segmentQueryExpression; } @Override public void enterRule(ParseTreeListener listener) { if (listener instanceof LittleJQListener) ((LittleJQListener) listener).enterSegmentQueryExpression(this); } @Override public void exitRule(ParseTreeListener listener) { if (listener instanceof LittleJQListener) ((LittleJQListener) listener).exitSegmentQueryExpression(this); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if (visitor instanceof LittleJQVisitor) return ((LittleJQVisitor<? extends T>) visitor).visitSegmentQueryExpression(this); else return visitor.visitChildren(this); } } public final SegmentQueryExpressionContext segmentQueryExpression() throws RecognitionException { SegmentQueryExpressionContext _localctx = new SegmentQueryExpressionContext(_ctx, getState()); enterRule(_localctx, 10, RULE_segmentQueryExpression); try { enterOuterAlt(_localctx, 1); { setState(36); match(StringLiteral); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static final String _serializedATN = "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3\n)\4\2\t\2\4\3\t" + "\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\3\2\7\2\20\n\2\f\2\16\2\23\13\2\3\2" + "\3\2\3\3\3\3\3\3\5\3\32\n\3\3\4\3\4\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\6\3" + "\6\3\7\3\7\3\7\2\2\b\2\4\6\b\n\f\2\2$\2\21\3\2\2\2\4\26\3\2\2\2\6\33\3" + "\2\2\2\b\35\3\2\2\2\n$\3\2\2\2\f&\3\2\2\2\16\20\5\4\3\2\17\16\3\2\2\2" + "\20\23\3\2\2\2\21\17\3\2\2\2\21\22\3\2\2\2\22\24\3\2\2\2\23\21\3\2\2\2" + "\24\25\7\2\2\3\25\3\3\2\2\2\26\27\7\3\2\2\27\31\5\6\4\2\30\32\5\b\5\2" + "\31\30\3\2\2\2\31\32\3\2\2\2\32\5\3\2\2\2\33\34\7\t\2\2\34\7\3\2\2\2\35" + "\36\7\4\2\2\36\37\7\5\2\2\37 \5\n\6\2 !\7\6\2\2!\"\5\f\7\2\"#\7\7\2\2" + "#\t\3\2\2\2$%\7\t\2\2%\13\3\2\2\2&\'\7\b\2\2\'\r\3\2\2\2\4\21\31"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { _decisionToDFA = new DFA[_ATN.getNumberOfDecisions()]; for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) { _decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i); } } }
/* * Copyright (C) 2011 Scripture Software * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * * Project: BibleQuote-for-Android * File: LibraryActivity.java * * Created by Vladimir Yakushev at 10/2017 * E-mail: [email protected] * WWW: http://www.scripturesoftware.org */ package com.BibleQuote.presentation.ui.library; import android.content.Context; import android.content.Intent; import android.net.Uri; import android.os.Bundle; import android.util.Log; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.GridView; import android.widget.ListView; import android.widget.SimpleAdapter; import android.widget.Toast; import androidx.annotation.NonNull; import com.BibleQuote.R; import com.BibleQuote.async.task.AsyncOpenModule; import com.BibleQuote.async.task.AsyncRefreshModules; import com.BibleQuote.async.task.LoadModuleFromFile; import com.BibleQuote.di.component.ActivityComponent; import com.BibleQuote.domain.controller.ILibraryController; import com.BibleQuote.domain.entity.BaseModule; import com.BibleQuote.domain.entity.BibleReference; import com.BibleQuote.domain.entity.Book; import com.BibleQuote.domain.exceptions.BookDefinitionException; import com.BibleQuote.domain.exceptions.BookNotFoundException; import com.BibleQuote.domain.exceptions.BooksDefinitionException; import com.BibleQuote.domain.exceptions.ExceptionHelper; import com.BibleQuote.domain.exceptions.OpenModuleException; import com.BibleQuote.entity.ItemList; import com.BibleQuote.managers.Librarian; import com.BibleQuote.presentation.dialogs.NotifyDialog; import com.BibleQuote.presentation.ui.base.AsyncTaskActivity; import com.BibleQuote.utils.Task; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.inject.Inject; import butterknife.BindView; import butterknife.ButterKnife; import butterknife.OnClick; import butterknife.OnItemClick; import ru.churchtools.deskbible.data.library.LibraryContext; public class LibraryActivity extends AsyncTaskActivity { private static final int ACTION_CODE_GET_FILE = 1; private static final int MODULE_VIEW = 1, BOOK_VIEW = 2, CHAPTER_VIEW = 3; private static final String TAG = LibraryActivity.class.getSimpleName(); @BindView(R.id.books) ListView booksList; @BindView(R.id.btnBook) Button btnBook; @BindView(R.id.btnChapter) Button btnChapter; @BindView(R.id.btnModule) Button btnModule; @BindView(R.id.chapterChoose) GridView chapterList; @BindView(R.id.modules) ListView modulesList; @Inject Librarian librarian; @Inject LibraryContext mLibraryContext; @Inject ILibraryController mILibraryController; private String bookID = Librarian.EMPTY_OBJ; private ArrayList<ItemList> books = new ArrayList<>(); private String chapter = Librarian.EMPTY_OBJ; private List<String> chapters = new ArrayList<>(); private String messageRefresh; private String moduleID = Librarian.EMPTY_OBJ; private int modulePos, bookPos, chapterPos; private ArrayList<ItemList> modules = new ArrayList<>(); private int viewMode = 1; public static Intent createIntent(@NonNull Context context) { return new Intent(context, LibraryActivity.class); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_library); ButterKnife.bind(this); messageRefresh = getResources().getString(R.string.messageRefresh); BibleReference osisLink = librarian.getCurrentOSISLink(); if (librarian.isOSISLinkValid(osisLink)) { moduleID = osisLink.getModuleID(); bookID = osisLink.getBookID(); chapter = String.valueOf(osisLink.getChapter()); updateView(CHAPTER_VIEW); } else { updateView(MODULE_VIEW); } setButtonText(); } @Override protected void inject(ActivityComponent component) { component.inject(this); } @Override public boolean onCreateOptionsMenu(Menu menu) { MenuInflater infl = getMenuInflater(); infl.inflate(R.menu.menu_library, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle item selection switch (item.getItemId()) { case R.id.action_bar_refresh: mAsyncManager.setupTask(new AsyncRefreshModules(messageRefresh, false), this); return true; case R.id.menu_library_add: choiceModuleFromFile(); return true; default: return super.onOptionsItemSelected(item); } } @Override protected void onPostResume() { super.onPostResume(); updateView(viewMode); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == ACTION_CODE_GET_FILE) { if (resultCode == RESULT_OK) { getModuleFromFile(data.getData()); } } else { Log.e(TAG, "Unknown request code: " + requestCode); } super.onActivityResult(requestCode, resultCode, data); } @Override public void onTaskComplete(Task task) { if (task == null || task.isCancelled()) { return; } if (task instanceof AsyncOpenModule) { onOpenModuleComplete((AsyncOpenModule) task); } else if (task instanceof LoadModuleFromFile) { onLoadModuleComplete((LoadModuleFromFile) task); } else { updateView(MODULE_VIEW); } } @Override public Context getContext() { return this; } @OnItemClick(R.id.books) void onClickBookItem(int position) { bookPos = position; bookID = books.get(bookPos).get("ID"); chapterPos = 0; updateView(CHAPTER_VIEW); setButtonText(); if (chapters.size() == 1) { chapter = chapters.get(0); readChapter(); } } @OnItemClick(R.id.chapterChoose) void onClickChapterItem(int position) { chapterPos = position; chapter = chapters.get(position); setButtonText(); readChapter(); } @OnItemClick(R.id.modules) void onClickModuleItem(int position2) { modules = librarian.getModulesList(); if (modules.size() <= position2) { updateView(MODULE_VIEW); return; } modulePos = position2; moduleID = modules.get(modulePos).get(ItemList.ID); bookPos = 0; chapterPos = 0; String message = getResources().getString(R.string.messageLoadBooks); BibleReference currentOSISLink = librarian.getCurrentOSISLink(); BibleReference osisLink1 = new BibleReference( currentOSISLink.getModuleDatasource(), null, moduleID, currentOSISLink.getBookID(), currentOSISLink.getChapter(), currentOSISLink.getFromVerse()); mAsyncManager.setupTask(new AsyncOpenModule(message, false, osisLink1), this); } @OnClick({R.id.btnBook, R.id.btnChapter, R.id.btnModule}) void onViewClicked(View view) { switch (view.getId()) { case R.id.btnBook: onClickBook(); break; case R.id.btnChapter: onClickChapter(); break; case R.id.btnModule: onClickModule(); break; } } private void choiceModuleFromFile() { final Intent target = new Intent(Intent.ACTION_GET_CONTENT) .setType("application/zip") .addCategory(Intent.CATEGORY_OPENABLE); if (target.resolveActivity(getPackageManager()) != null) { startActivityForResult(target, ACTION_CODE_GET_FILE); } else { Toast.makeText(this, R.string.exception_add_module_from_file, Toast.LENGTH_LONG).show(); } } private SimpleAdapter getBookAdapter() { books = new ArrayList<>(); if (!librarian.getModulesList().isEmpty()) { try { books = librarian.getModuleBooksList(moduleID); } catch (OpenModuleException e) { ExceptionHelper.onOpenModuleException(e, this, TAG); } catch (BooksDefinitionException e) { ExceptionHelper.onBooksDefinitionException(e, this, TAG); } catch (BookDefinitionException e) { ExceptionHelper.onBookDefinitionException(e, this, TAG); } } return new SimpleAdapter(this, books, R.layout.item_list, new String[]{ItemList.ID, ItemList.Name}, new int[]{ R.id.id, R.id.name}); } private ArrayAdapter<String> getChapterAdapter() { try { chapters = librarian.getChaptersList(moduleID, bookID); } catch (BookNotFoundException e) { ExceptionHelper.onBookNotFoundException(e, this, TAG); } catch (OpenModuleException e) { ExceptionHelper.onOpenModuleException(e, this, TAG); } return new ArrayAdapter<>(this, R.layout.chapter_item, R.id.chapter, chapters); } private SimpleAdapter getModuleAdapter() { modules = librarian.getModulesList(); return new SimpleAdapter(this, modules, R.layout.item_list, new String[]{ItemList.ID, ItemList.Name}, new int[]{ R.id.id, R.id.name}); } private void getModuleFromFile(Uri uri) { mAsyncManager.setupTask(new LoadModuleFromFile( this, getString(R.string.copy_module_from_file), uri, mILibraryController, mLibraryContext), this); } private void onClickBook() { if (bookID.equals(Librarian.EMPTY_OBJ)) { return; } updateView(BOOK_VIEW); } private void onClickChapter() { if (chapter.equals(Librarian.EMPTY_OBJ)) { return; } updateView(CHAPTER_VIEW); } private void onClickModule() { if (moduleID.equals(Librarian.EMPTY_OBJ)) { return; } updateView(MODULE_VIEW); } private void onLoadModuleComplete(LoadModuleFromFile task) { LoadModuleFromFile.StatusCode statusCode = task.getStatusCode(); String errorMessage; switch (statusCode) { case Success: updateView(MODULE_VIEW); return; case FileNotExist: errorMessage = getString(R.string.file_not_exist); break; case FileNotSupported: errorMessage = getString(R.string.file_not_supported); break; case MoveFailed: errorMessage = getString(R.string.file_not_moved); break; case LibraryNotFound: errorMessage = getString(R.string.file_not_moved); break; default: errorMessage = getString(R.string.err_load_module_unknown); } new NotifyDialog(errorMessage, this).show(); } private void onOpenModuleComplete(AsyncOpenModule task) { Exception e = task.getException(); if (e == null) { BaseModule module = task.getModule(); moduleID = module.getID(); Map<String, Book> books = module.getBooks(); if (books != null && books.size() != 0 && !books.containsKey(bookID)) { Iterator<String> iterator = books.keySet().iterator(); bookID = iterator.next(); } setButtonText(); updateView(BOOK_VIEW); } else { if (e instanceof OpenModuleException) { ExceptionHelper.onOpenModuleException((OpenModuleException) e, this, TAG); } else if (e instanceof BooksDefinitionException) { ExceptionHelper.onBooksDefinitionException((BooksDefinitionException) e, this, TAG); } else if (e instanceof BookDefinitionException) { ExceptionHelper.onBookDefinitionException((BookDefinitionException) e, this, TAG); } updateView(MODULE_VIEW); } } private void readChapter() { setResult(RESULT_OK, new Intent() .putExtra("linkOSIS", String.format("%s.%s.%s", moduleID, bookID, chapter))); finish(); } private void setButtonText() { String bookShortName = Librarian.EMPTY_OBJ; if (!moduleID.equals(Librarian.EMPTY_OBJ) && !bookID.equals(Librarian.EMPTY_OBJ)) { try { bookShortName = librarian.getBookShortName(moduleID, bookID); List<String> chList = librarian.getChaptersList(moduleID, bookID); if (!chList.isEmpty()) { chapter = chList.contains(chapter) ? chapter : chList.get(0); } else { chapter = Librarian.EMPTY_OBJ; } } catch (OpenModuleException e) { ExceptionHelper.onOpenModuleException(e, this, TAG); moduleID = Librarian.EMPTY_OBJ; bookID = Librarian.EMPTY_OBJ; chapter = Librarian.EMPTY_OBJ; } catch (BookNotFoundException e) { ExceptionHelper.onBookNotFoundException(e, this, TAG); bookID = Librarian.EMPTY_OBJ; chapter = Librarian.EMPTY_OBJ; } } btnModule.setText(moduleID); btnBook.setText(bookShortName); btnChapter.setText(chapter); } private void updateView(int viewMode) { this.viewMode = viewMode; btnModule.setEnabled(viewMode != MODULE_VIEW); btnBook.setEnabled(viewMode != BOOK_VIEW); btnChapter.setEnabled(viewMode != CHAPTER_VIEW); modulesList.setVisibility(viewMode == MODULE_VIEW ? View.VISIBLE : View.GONE); booksList.setVisibility(viewMode == BOOK_VIEW ? View.VISIBLE : View.GONE); chapterList.setVisibility(viewMode == CHAPTER_VIEW ? View.VISIBLE : View.GONE); switch (viewMode) { case MODULE_VIEW: viewModeModule(); break; case BOOK_VIEW: viewModeBook(); break; case CHAPTER_VIEW: viewModeChapter(); break; default: break; } } private void viewModeBook() { booksList.setAdapter(getBookAdapter()); ItemList itemBook; try { itemBook = new ItemList(bookID, librarian.getBookFullName(moduleID, bookID)); bookPos = books.indexOf(itemBook); if (bookPos >= 0) { booksList.setSelection(bookPos); } } catch (OpenModuleException e) { ExceptionHelper.onOpenModuleException(e, this, TAG); } } private void viewModeChapter() { chapterList.setAdapter(getChapterAdapter()); chapterPos = chapters.indexOf(chapter); if (chapterPos >= 0) { chapterList.setSelection(chapterPos); } } private void viewModeModule() { modulesList.setAdapter(getModuleAdapter()); modulePos = modules.indexOf(new ItemList(moduleID, librarian.getModuleFullName())); if (modulePos >= 0) { modulesList.setSelection(modulePos); } } }
/** * This document is a part of the source code and related artifacts * for CollectionSpace, an open source collections management system * for museums and related institutions: * http://www.collectionspace.org * http://wiki.collectionspace.org * Copyright 2009 Regents of the University of California * Licensed under the Educational Community License (ECL), Version 2.0. * You may not use this file except in compliance with this License. * You may obtain a copy of the ECL 2.0 License at * https://source.collectionspace.org/collection-space/LICENSE.txt * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.collectionspace.services.contact.nuxeo; import java.util.Map; import javax.ws.rs.core.UriInfo; import org.collectionspace.services.client.AuthorityClient; import org.collectionspace.services.config.service.ObjectPartType; import org.collectionspace.services.contact.ContactJAXBSchema; import org.collectionspace.services.common.document.DocumentWrapper; import org.collectionspace.services.nuxeo.client.java.NuxeoDocumentModelHandler; import org.collectionspace.services.contact.ContactsCommon; import org.nuxeo.ecm.core.api.DocumentModel; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The Class ContactDocumentModelHandler. */ public class ContactDocumentModelHandler extends NuxeoDocumentModelHandler<ContactsCommon> { private final Logger logger = LoggerFactory.getLogger(ContactDocumentModelHandler.class); private static final String COMMON_PART_LABEL = "contacts_common"; private String inAuthority; private String inItem; /** * Gets the in authority. * * @return the in authority */ public String getInAuthority() { return inAuthority; } /** * Sets the in authority. * * @param inAuthority the new in item */ public void setInAuthority(String inAuthority) { this.inAuthority = inAuthority; } /** * Gets the in item. * * @return the in item */ public String getInItem() { return inItem; } /** * Sets the in item. * * @param inItem the new in item */ public void setInItem(String inItem) { this.inItem = inItem; } /* (non-Javadoc) * @see org.collectionspace.services.nuxeo.client.java.DocumentModelHandler#handleCreate(org.collectionspace.services.common.document.DocumentWrapper) */ @Override public void handleCreate(DocumentWrapper<DocumentModel> wrapDoc) throws Exception { // first fill all the parts of the document super.handleCreate(wrapDoc); handleInAuthority(wrapDoc.getWrappedObject()); handleDisplayNames(wrapDoc.getWrappedObject()); } /* (non-Javadoc) * @see org.collectionspace.services.nuxeo.client.java.DocumentModelHandler#handleUpdate(org.collectionspace.services.common.document.DocumentWrapper) */ @Override public void handleUpdate(DocumentWrapper<DocumentModel> wrapDoc) throws Exception { super.handleUpdate(wrapDoc); handleDisplayNames(wrapDoc.getWrappedObject()); } /** * Check the logic around the parent pointer. Note that we only need do this on * create, since we have logic to make this read-only on update. * * @param docModel * * @throws Exception the exception */ private void handleInAuthority(DocumentModel docModel) throws Exception { String commonPartLabel = getServiceContext().getCommonPartLabel("contacts"); docModel.setProperty(commonPartLabel, ContactJAXBSchema.IN_AUTHORITY, inAuthority); docModel.setProperty(commonPartLabel, ContactJAXBSchema.IN_ITEM, inItem); } private void handleDisplayNames(DocumentModel docModel) throws Exception { String commonPartLabel = getServiceContext().getCommonPartLabel("contacts"); String email = getStringValueInPrimaryRepeatingComplexProperty( docModel, commonPartLabel, ContactJAXBSchema.EMAIL_GROUP_LIST, ContactJAXBSchema.EMAIL); String telephoneNumber = getStringValueInPrimaryRepeatingComplexProperty( docModel, commonPartLabel, ContactJAXBSchema.TELEPHONE_NUMBER_GROUP_LIST, ContactJAXBSchema.TELEPHONE_NUMBER); String addressPlace1 = getStringValueInPrimaryRepeatingComplexProperty( docModel, commonPartLabel, ContactJAXBSchema.ADDRESS_GROUP_LIST, ContactJAXBSchema.ADDRESS_PLACE_1); String displayName = prepareDefaultDisplayName(email, telephoneNumber, addressPlace1); docModel.setProperty(commonPartLabel, ContactJAXBSchema.DISPLAY_NAME, displayName); } /** * Produces a default displayName from the basic name and foundingPlace fields. * @see OrgAuthorityClientUtils.prepareDefaultDisplayName() which * duplicates this logic, until we define a service-general utils package * that is neither client nor service specific. * @param shortName * @param foundingPlace * @return * @throws Exception */ private static String prepareDefaultDisplayName(String email, String telephoneNumber, String addressPlace1) throws Exception { final int MAX_DISPLAY_NAME_LENGTH = 30; StringBuilder newStr = new StringBuilder(""); final String sep = " "; boolean firstAdded = false; if (!(email == null || email.isEmpty())) { newStr.append(email); firstAdded = true; } if (!(telephoneNumber == null || telephoneNumber.isEmpty())) { if (newStr.length() <= MAX_DISPLAY_NAME_LENGTH) { if (firstAdded) { newStr.append(sep); } else { firstAdded = true; } newStr.append(telephoneNumber); } } if (!(addressPlace1 == null || addressPlace1.isEmpty())) { if (newStr.length() <= MAX_DISPLAY_NAME_LENGTH) { if (firstAdded) { newStr.append(sep); } newStr.append(addressPlace1); } } String displayName = newStr.toString(); if (displayName.length() > MAX_DISPLAY_NAME_LENGTH) { return displayName.substring(0, MAX_DISPLAY_NAME_LENGTH) + "..."; } else { return displayName; } } @Override public String getUri(DocumentModel docModel) { String uri = ""; UriInfo ui = getServiceContext().getUriInfo(); if (ui != null) { uri = '/' + getAuthorityPathComponent(ui) + '/' + inAuthority + '/' + AuthorityClient.ITEMS + '/' + inItem + getServiceContextPath() + getCsid(docModel); // uri = "/" + ui.getPath() + "/" + getCsid(docModel); } else { uri = super.getUri(docModel); } return uri; } // Assumes the initial path component in the URI, following the base URI, // identifies the relevant authority resource private String getAuthorityPathComponent(UriInfo ui) { return ui.getPathSegments().get(0).toString(); } /** * Filters out ContactJAXBSchema.IN_AUTHORITY, and IN_ITEM, to ensure that * the parent links remains untouched. * Also remove the display name, as this is always computed. * @param objectProps the properties parsed from the update payload * @param partMeta metadata for the object to fill */ @Override public void filterReadOnlyPropertiesForPart( Map<String, Object> objectProps, ObjectPartType partMeta) { super.filterReadOnlyPropertiesForPart(objectProps, partMeta); objectProps.remove(ContactJAXBSchema.IN_AUTHORITY); objectProps.remove(ContactJAXBSchema.IN_ITEM); objectProps.remove(ContactJAXBSchema.URI); objectProps.remove(ContactJAXBSchema.DISPLAY_NAME); } }
package mmlib4j.representation.tree.tos; import java.util.ArrayList; import java.util.HashSet; import mmlib4j.datastruct.Queue; import mmlib4j.images.GrayScaleImage; import mmlib4j.images.impl.ImageFactory; import mmlib4j.representation.tree.InfoPrunedTree; import mmlib4j.representation.tree.MorphologicalTreeFiltering; import mmlib4j.representation.tree.attribute.Attribute; import mmlib4j.representation.tree.attribute.ComputerAttributeBasedPerimeterExternal; import mmlib4j.representation.tree.attribute.ComputerBasicAttribute; import mmlib4j.representation.tree.attribute.ComputerCentralMomentAttribute; import mmlib4j.representation.tree.attribute.ComputerDistanceTransform; import mmlib4j.representation.tree.attribute.ComputerExtinctionValueTreeOfShapes; import mmlib4j.representation.tree.attribute.ComputerExtinctionValueTreeOfShapes.ExtinctionValueNode; import mmlib4j.utils.Utils; /** * MMLib4J - Mathematical Morphology Library for Java * @author Wonder Alexandre Luz Alves * */ public class ConnectedFilteringByTreeOfShape extends TreeOfShape implements MorphologicalTreeFiltering{ private boolean hasComputerBasicAttribute = false; private boolean hasComputerAttributeBasedPerimeterExternal = false; private boolean hasComputerCentralMomentAttribute = false; private boolean hasComputerPatternEulerAttribute = false; private boolean hasComputerDistanceTransform = false; private ComputerDistanceTransform dt = null; public ConnectedFilteringByTreeOfShape(GrayScaleImage img){ super(img, -1, -1); computerBasicAttribute(); } protected ConnectedFilteringByTreeOfShape(BuilderTreeOfShapeByUnionFind build){ super(build); } public ConnectedFilteringByTreeOfShape(GrayScaleImage img, int xInfinito, int yInfinito){ super(img, xInfinito, yInfinito); computerBasicAttribute(); } public HashSet<NodeToS> getListNodes(){ return listNode; } public ComputerDistanceTransform computerDistanceTransform(){ if(!hasComputerDistanceTransform){ long ti = System.currentTimeMillis(); dt = new ComputerDistanceTransform(numNode, getRoot(), imgInput); hasComputerDistanceTransform = true; if(Utils.debug){ long tf = System.currentTimeMillis(); System.out.println("Tempo de execucao [computer distance transform] "+ ((tf - ti) /1000.0) + "s"); } } return dt; } public void computerPatternEulerAttribute(){ if(!hasComputerPatternEulerAttribute){ long ti = System.currentTimeMillis(); for(NodeToS node: getListNodes()){ node.addAttribute(Attribute.NUM_HOLES, new Attribute(Attribute.NUM_HOLES, node.getNumHoles())); } hasComputerPatternEulerAttribute = true; if(Utils.debug){ long tf = System.currentTimeMillis(); System.out.println("Tempo de execucao [attribute euler] "+ ((tf - ti) /1000.0) + "s"); } } } public void computerCentralMomentAttribute(){ if(!hasComputerCentralMomentAttribute){ new ComputerCentralMomentAttribute(numNode, getRoot(), imgInput.getWidth()).addAttributeInNodesToS(getListNodes()); hasComputerCentralMomentAttribute = true; } } public void computerBasicAttribute(){ if(!hasComputerBasicAttribute){ long ti = System.currentTimeMillis(); new ComputerBasicAttribute(numNode, getRoot(), imgInput).addAttributeInNodesToS(getListNodes()); hasComputerBasicAttribute = true; if(Utils.debug){ long tf = System.currentTimeMillis(); System.out.println("Tempo de execucao [basic attribute] "+ ((tf - ti) /1000.0) + "s"); } } } public void computerAttributeBasedPerimeterExternal(){ if(!hasComputerAttributeBasedPerimeterExternal){ long ti = System.currentTimeMillis(); new ComputerAttributeBasedPerimeterExternal(numNode, getRoot(), getInputImage()).addAttributeInNodesToS(getListNodes()); hasComputerAttributeBasedPerimeterExternal = true; if(Utils.debug){ long tf = System.currentTimeMillis(); System.out.println("Tempo de execucao [external perimeter] "+ ((tf - ti) /1000.0) + "s"); } } } public void loadAttribute(int attr){ switch(attr){ case Attribute.ALTITUDE: case Attribute.AREA: case Attribute.VOLUME: case Attribute.WIDTH: case Attribute.HEIGHT: case Attribute.PERIMETER: case Attribute.LEVEL: case Attribute.RECTANGULARITY: case Attribute.RATIO_WIDTH_HEIGHT: computerBasicAttribute(); break; case Attribute.MOMENT_CENTRAL_02: case Attribute.MOMENT_CENTRAL_20: case Attribute.MOMENT_CENTRAL_11: case Attribute.VARIANCE_LEVEL: case Attribute.LEVEL_MEAN: case Attribute.MOMENT_COMPACTNESS: case Attribute.MOMENT_ECCENTRICITY: case Attribute.MOMENT_ELONGATION: case Attribute.MOMENT_LENGTH_MAJOR_AXES: case Attribute.MOMENT_LENGTH_MINOR_AXES: case Attribute.MOMENT_ORIENTATION: case Attribute.MOMENT_ASPECT_RATIO: computerCentralMomentAttribute(); break; case Attribute.PERIMETER_EXTERNAL: case Attribute.CIRCULARITY: case Attribute.COMPACTNESS: case Attribute.ELONGATION: computerAttributeBasedPerimeterExternal(); break; case Attribute.NUM_HOLES: computerPatternEulerAttribute(); break; } } private double getAttribute(NodeToS node, int type){ loadAttribute(type); return node.getAttributeValue(type); } public GrayScaleImage reconstruction(InfoPrunedTree prunedTree){ GrayScaleImage imgOut = ImageFactory.createGrayScaleImage(getInputImage()); Queue<InfoPrunedTree.NodePrunedTree> fifo = new Queue<InfoPrunedTree.NodePrunedTree>(); fifo.enqueue( prunedTree.getRoot() ); while(!fifo.isEmpty()){ InfoPrunedTree.NodePrunedTree node_ = fifo.dequeue(); NodeToS node = (NodeToS) node_.getInfo(); for(NodeToS son: node.getChildren()){ if(prunedTree.wasPruned(son)){ for(int p: son.getPixelsOfCC()){ imgOut.setPixel(p, node.getLevel()); } } } for(int p: node.getCanonicalPixels()){ imgOut.setPixel(p, node.getLevel()); } for(InfoPrunedTree.NodePrunedTree son: node_.getChildren()){ fifo.enqueue( son ); } } return imgOut; } public InfoPrunedTree getPrunedTree(double attributeValue, int type, int typePruning){ long ti = System.currentTimeMillis(); InfoPrunedTree prunedTree = new InfoPrunedTree(this, getRoot(), getNumNode(), type, attributeValue); for(NodeToS no: getListNodes()){ if(! (getAttribute(no, type) <= attributeValue) ){ //poda prunedTree.addNodeNotPruned(no); } } System.out.println("Tempo de execucao [Tree of shapes - filtering by pruning] "+ ((System.currentTimeMillis() - ti) /1000.0) + "s"); return prunedTree; } /** * Cria uma imagem filtrada. * Obs: Para gerar a imagem filtrada nao eh alterado a estrutura original da arvore * @param attributeValue - valor do atributo * @param idAttribute - tipo do atributo * @return imagem filtrada */ public GrayScaleImage filteringByPruning(double attributeValue, int type){ long ti = System.currentTimeMillis(); GrayScaleImage imgOut = ImageFactory.createGrayScaleImage(imgInput);; Queue<NodeToS> fifo = new Queue<NodeToS>(); fifo.enqueue(this.root); while(!fifo.isEmpty()){ NodeToS no = fifo.dequeue(); //double bb = no.getArea() / ((double) no.getWidthNode() * no.getHeightNode()); if(getAttribute(no, type) <= attributeValue){// && bb > 0.85){ // ){ //poda int levelPropagation = no.parent == null ? no.level : no.parent.level; //propagacao do nivel do pai para os filhos Queue<NodeToS> fifoPruning = new Queue<NodeToS>(); fifoPruning.enqueue(no); while(!fifoPruning.isEmpty()){ NodeToS nodePruning = fifoPruning.dequeue(); for(NodeToS song: nodePruning.children){ fifoPruning.enqueue(song); } for(Integer p: nodePruning.getCanonicalPixels()) imgOut.setPixel(p, levelPropagation); } } else{ for(Integer p: no.getCanonicalPixels()){ imgOut.setPixel(p, no.level); } if(no.children != null){ for(NodeToS son: no.children){ fifo.enqueue(son); } } } } if(Utils.debug) System.out.println("Tempo de execucao [tree of shapes - pruning - "+ ((System.currentTimeMillis() - ti) /1000.0) + "s"); return imgOut;//paintEdges(imgOut); } /** * Cria uma imagem filtrada. * Obs: Para gerar a imagem filtrada nao eh alterado a estrutura original da arvore * @param attributeValue - valor do atributo * @param idAttribute - tipo do atributo * @return imagem filtrada */ public GrayScaleImage filtering(double attributeValue, int type, int typePruning){ if(typePruning == MorphologicalTreeFiltering.EXTINCTION_VALUE) return filteringExtinctionValue(attributeValue, type); else return filteringByPruning(attributeValue, type); } ArrayList<ExtinctionValueNode> extincaoPorNode; ComputerExtinctionValueTreeOfShapes extinctionValue; public GrayScaleImage filteringExtinctionValue(double attributeValue, int type){ loadAttribute(type); long ti = System.currentTimeMillis(); if(extinctionValue == null){ extinctionValue = new ComputerExtinctionValueTreeOfShapes(this); extincaoPorNode = extinctionValue.getExtinctionValueCut(attributeValue, type); } if(extinctionValue.getType() != type) extincaoPorNode = extinctionValue.getExtinctionValueCut(attributeValue, type); GrayScaleImage imgOut = ImageFactory.createGrayScaleImage(imgInput);; Queue<NodeToS> fifo = new Queue<NodeToS>(); fifo.enqueue(getRoot()); while(!fifo.isEmpty()){ NodeToS no = fifo.dequeue(); for(Integer p: no.getCanonicalPixels()){ imgOut.setPixel(p, no.level); } if(no.children != null){ for(NodeToS son: no.children){ fifo.enqueue(son); } } } for(int k=extincaoPorNode.size()-1; k >= 0 ; k--){ NodeToS no = extincaoPorNode.get(k).node; if(extincaoPorNode.get(k).extinctionValue <= attributeValue){ //poda int levelPropagation = no.level; //propagacao do nivel do pai para os filhos Queue<NodeToS> fifoPruning = new Queue<NodeToS>(); fifoPruning.enqueue(no); while(!fifoPruning.isEmpty()){ NodeToS nodePruning = fifoPruning.dequeue(); if(nodePruning.children != null){ for(NodeToS song: nodePruning.children){ fifoPruning.enqueue(song); } } for(Integer p: nodePruning.getCanonicalPixels()){ imgOut.setPixel(p, levelPropagation); } } } } if(Utils.debug){ long tf = System.currentTimeMillis(); System.out.println("Tempo de execucao [tree of shape - extinction value - direct] "+ ((tf - ti) /1000.0) + "s"); } return imgOut; } }
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.badlogic.gdx.setup; import static java.awt.GridBagConstraints.BOTH; import static java.awt.GridBagConstraints.CENTER; import static java.awt.GridBagConstraints.HORIZONTAL; import static java.awt.GridBagConstraints.NONE; import static java.awt.GridBagConstraints.NORTH; import static java.awt.GridBagConstraints.SOUTH; import static java.awt.GridBagConstraints.SOUTHEAST; import static java.awt.GridBagConstraints.WEST; import java.awt.Color; import java.awt.Cursor; import java.awt.Desktop; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.List; import javax.swing.BorderFactory; import javax.swing.JDialog; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JSeparator; import javax.swing.JTextField; import javax.swing.border.Border; import javax.swing.border.CompoundBorder; import javax.swing.border.EmptyBorder; import com.badlogic.gdx.setup.GdxSetupUI.SetupButton; import com.badlogic.gdx.setup.GdxSetupUI.SetupCheckBox; public class SettingsDialog extends JDialog { private JPanel contentPane; private SetupButton buttonOK; private SetupButton buttonCancel; private JLabel linkText; private JPanel content; private JPanel bottomPanel; private JPanel buttonPanel; private JTextField mavenTextField; SetupCheckBox offlineBox; SetupCheckBox kotlinBox; private String mavenSnapshot; private boolean offlineSnapshot; private boolean kotlinSnapshot; public SettingsDialog (final SetupCheckBox gwtCheckBox) { contentPane = new JPanel(new GridBagLayout()); setContentPane(contentPane); setModal(true); getRootPane().setDefaultButton(buttonOK); uiLayout(gwtCheckBox); uiStyle(); buttonOK.addActionListener(new ActionListener() { public void actionPerformed (ActionEvent e) { if (offlineBox.isSelected()) { int value = JOptionPane.showConfirmDialog(null, "You have selected offline mode. This requires you to have your dependencies already in your maven/gradle cache.\n\nThe setup will fail if you do not have the correct dependenices already.\n\nDo you want to continue?", "Warning!", JOptionPane.YES_NO_OPTION); if (value == 0) { onOK(); } } else { onOK(); } } }); buttonCancel.addActionListener(new ActionListener() { @Override public void actionPerformed (ActionEvent e) { onCancel(); } }); linkText.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR)); linkText.addMouseListener(new MouseAdapter() { public void mouseClicked (MouseEvent e) { if (e.getClickCount() > 0) { if (Desktop.isDesktopSupported()) { Desktop desktop = Desktop.getDesktop(); try { URI uri = new URI( "https://github.com/libgdx/libgdx/wiki/Improving-workflow-with-Gradle#how-to-remove-gradle-ide-integration-from-your-project"); desktop.browse(uri); } catch (IOException ex) { ex.printStackTrace(); } catch (URISyntaxException ex) { ex.printStackTrace(); } } } } }); setTitle("Advanced Settings"); setSize(600, 300); setLocationRelativeTo(null); } private void uiLayout (final SetupCheckBox gwtCheckBox) { content = new JPanel(new GridBagLayout()); content.setBorder(BorderFactory.createEmptyBorder(20, 20, 20, 20)); bottomPanel = new JPanel(new GridBagLayout()); buttonPanel = new JPanel(new GridBagLayout()); buttonPanel.setBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5)); buttonOK = new SetupButton("Save"); buttonCancel = new SetupButton("Cancel"); buttonPanel.add(buttonOK, new GridBagConstraints(0, 0, 1, 1, 0, 0, CENTER, HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0)); buttonPanel.add(buttonCancel, new GridBagConstraints(1, 0, 1, 1, 0, 0, CENTER, HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0)); contentPane.add(content, new GridBagConstraints(0, 0, 1, 1, 1, 1, NORTH, BOTH, new Insets(0, 0, 0, 0), 0, 0)); JLabel settings = new JLabel("Settings"); JLabel description = new JLabel("Description"); settings.setForeground(new Color(255, 255, 255)); description.setForeground(new Color(255, 255, 255)); settings.setHorizontalAlignment(JLabel.CENTER); description.setHorizontalAlignment(JLabel.CENTER); content.add(settings, new GridBagConstraints(0, 0, 1, 1, 1, 1, NORTH, HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0)); content.add(description, new GridBagConstraints(3, 0, 1, 1, 1, 1, NORTH, HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0)); JLabel mavenLabel = new JLabel("Maven Mirror Url"); JLabel mavenDesc = new JLabel("Replaces Maven Central with this repository"); mavenTextField = new JTextField(15); mavenTextField.setMinimumSize(mavenTextField.getPreferredSize()); mavenLabel.setForeground(new Color(170, 170, 170)); mavenDesc.setForeground(new Color(170, 170, 170)); JLabel offlineLabel = new JLabel("Offline Mode"); JLabel offlineDesc = new JLabel("Don't force download dependencies"); JLabel kotlinLabel = new JLabel("Use Kotlin"); JLabel kotlinDesc = new JLabel("Use Kotlin as the main language."); offlineBox = new SetupCheckBox(); offlineLabel.setForeground(new Color(170, 170, 170)); offlineDesc.setForeground(new Color(170, 170, 170)); offlineBox.setBackground(new Color(36, 36, 36)); kotlinBox = new SetupCheckBox(); kotlinBox.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { final String message = "Using Kotlin with the HTML backend is not supported. Do you want to disable the HTML backend?"; if(kotlinBox.isSelected() && gwtCheckBox.isSelected() && JOptionPane.showConfirmDialog(kotlinBox, message, "Warning!", JOptionPane.YES_NO_OPTION) == 0) { gwtCheckBox.setSelected(false); } else if(gwtCheckBox.isSelected()) { kotlinBox.setSelected(false); } } }); offlineBox.setBackground(new Color(36, 36, 36)); kotlinLabel.setForeground(new Color(170, 170, 170)); kotlinDesc.setForeground(new Color(170, 170, 170)); JSeparator separator = new JSeparator(); separator.setForeground(new Color(85, 85, 85)); separator.setBackground(new Color(85, 85, 85)); content.add(separator, new GridBagConstraints(0, 1, 4, 1, 1, 1, NORTH, HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0)); content.add(mavenLabel, new GridBagConstraints(0, 2, 1, 1, 1, 1, NORTH, HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0)); content.add(mavenTextField, new GridBagConstraints(1, 2, 2, 1, 1, 1, NORTH, HORIZONTAL, new Insets(0, 15, 0, 0), 0, 0)); content.add(mavenDesc, new GridBagConstraints(3, 2, 1, 1, 1, 1, NORTH, HORIZONTAL, new Insets(0, 15, 0, 0), 0, 0)); content.add(offlineLabel, new GridBagConstraints(0, 3, 1, 1, 1, 1, NORTH, HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0)); content.add(offlineBox, new GridBagConstraints(1, 3, 2, 1, 1, 1, NORTH, HORIZONTAL, new Insets(0, 15, 0, 0), 0, 0)); content.add(offlineDesc, new GridBagConstraints(3, 3, 1, 1, 1, 1, NORTH, HORIZONTAL, new Insets(0, 15, 0, 0), 0, 0)); content.add(kotlinLabel, new GridBagConstraints(0, 4, 1, 1, 1, 1, NORTH, HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0)); content.add(kotlinBox, new GridBagConstraints(1, 4, 2, 1, 1, 1, NORTH, HORIZONTAL, new Insets(0, 15, 0, 0), 0, 0)); content.add(kotlinDesc, new GridBagConstraints(3, 4, 1, 1, 1, 1, NORTH, HORIZONTAL, new Insets(0, 15, 0, 0), 0, 0)); String text = "<p style=\"font-size:10\">Click for more info on using Gradle without IDE integration</p>"; linkText = new JLabel("<html>" + text + "</html>"); bottomPanel.add(linkText, new GridBagConstraints(0, 0, 1, 1, 1, 1, WEST, NONE, new Insets(0, 10, 0, 0), 0, 0)); bottomPanel.add(buttonPanel, new GridBagConstraints(3, 0, 1, 1, 1, 1, SOUTHEAST, NONE, new Insets(0, 0, 0, 0), 0, 0)); contentPane.add(bottomPanel, new GridBagConstraints(0, 1, 4, 1, 1, 1, SOUTH, HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0)); } private void uiStyle () { content.setBackground(new Color(36, 36, 36)); content.setForeground(new Color(255, 255, 255)); bottomPanel.setBackground(new Color(36, 36, 36)); bottomPanel.setForeground(new Color(255, 255, 255)); buttonPanel.setBackground(new Color(36, 36, 36)); buttonPanel.setForeground(new Color(255, 255, 255)); linkText.setForeground(new Color(20, 150, 20)); contentPane.setBackground(new Color(36, 36, 36)); Border line = BorderFactory.createLineBorder(new Color(80, 80, 80)); Border empty = new EmptyBorder(4, 4, 4, 4); CompoundBorder border = new CompoundBorder(line, empty); mavenTextField.setBorder(border); mavenTextField.setCaretColor(new Color(255, 255, 255)); mavenTextField.setBackground(new Color(46, 46, 46)); mavenTextField.setForeground(new Color(255, 255, 255)); } public void showDialog (SetupCheckBox gwtCheckBox) { takeSnapshot(); setVisible(true); if (gwtCheckBox.isSelected()) { kotlinBox.setSelected(false); kotlinSnapshot = false; } } public List<String> getGradleArgs () { List<String> list = new ArrayList<String>(); list.add("--no-daemon"); if (offlineBox.isSelected()) { list.add("--offline"); } return list; } void onOK () { if (mavenTextField.getText().isEmpty()) { DependencyBank.mavenCentral = "mavenCentral()"; } else { DependencyBank.mavenCentral = "maven { url \"" + mavenTextField.getText() + "\" }"; } setVisible(false); } void onCancel () { setVisible(false); restore(); } private void takeSnapshot () { mavenSnapshot = mavenTextField.getText(); offlineSnapshot = offlineBox.isSelected(); kotlinSnapshot = kotlinBox.isSelected(); } private void restore () { mavenTextField.setText(mavenSnapshot); offlineBox.setSelected(offlineSnapshot); kotlinBox.setSelected(kotlinSnapshot); } }
package abi38_0_0.expo.modules.notifications.notifications.channels; import android.app.NotificationChannel; import android.content.Context; import android.graphics.Color; import android.media.AudioAttributes; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.provider.Settings; import abi38_0_0.org.unimodules.core.ExportedModule; import abi38_0_0.org.unimodules.core.Promise; import abi38_0_0.org.unimodules.core.arguments.ReadableArguments; import abi38_0_0.org.unimodules.core.interfaces.ExpoMethod; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Objects; import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; import androidx.core.app.NotificationManagerCompat; import abi38_0_0.expo.modules.notifications.notifications.SoundResolver; import expo.modules.notifications.notifications.enums.AudioContentType; import expo.modules.notifications.notifications.enums.AudioUsage; import expo.modules.notifications.notifications.enums.NotificationImportance; import expo.modules.notifications.notifications.enums.NotificationVisibility; import static abi38_0_0.expo.modules.notifications.notifications.channels.NotificationChannelSerializer.AUDIO_ATTRIBUTES_CONTENT_TYPE_KEY; import static abi38_0_0.expo.modules.notifications.notifications.channels.NotificationChannelSerializer.AUDIO_ATTRIBUTES_FLAGS_ENFORCE_AUDIBILITY_KEY; import static abi38_0_0.expo.modules.notifications.notifications.channels.NotificationChannelSerializer.AUDIO_ATTRIBUTES_FLAGS_HW_AV_SYNC_KEY; import static abi38_0_0.expo.modules.notifications.notifications.channels.NotificationChannelSerializer.AUDIO_ATTRIBUTES_FLAGS_KEY; import static abi38_0_0.expo.modules.notifications.notifications.channels.NotificationChannelSerializer.AUDIO_ATTRIBUTES_USAGE_KEY; import static abi38_0_0.expo.modules.notifications.notifications.channels.NotificationChannelSerializer.BYPASS_DND_KEY; import static abi38_0_0.expo.modules.notifications.notifications.channels.NotificationChannelSerializer.DESCRIPTION_KEY; import static abi38_0_0.expo.modules.notifications.notifications.channels.NotificationChannelSerializer.ENABLE_LIGHTS_KEY; import static abi38_0_0.expo.modules.notifications.notifications.channels.NotificationChannelSerializer.ENABLE_VIBRATE_KEY; import static abi38_0_0.expo.modules.notifications.notifications.channels.NotificationChannelSerializer.GROUP_ID_KEY; import static abi38_0_0.expo.modules.notifications.notifications.channels.NotificationChannelSerializer.IMPORTANCE_KEY; import static abi38_0_0.expo.modules.notifications.notifications.channels.NotificationChannelSerializer.LIGHT_COLOR_KEY; import static abi38_0_0.expo.modules.notifications.notifications.channels.NotificationChannelSerializer.LOCKSCREEN_VISIBILITY_KEY; import static abi38_0_0.expo.modules.notifications.notifications.channels.NotificationChannelSerializer.NAME_KEY; import static abi38_0_0.expo.modules.notifications.notifications.channels.NotificationChannelSerializer.SHOW_BADGE_KEY; import static abi38_0_0.expo.modules.notifications.notifications.channels.NotificationChannelSerializer.SOUND_AUDIO_ATTRIBUTES_KEY; import static abi38_0_0.expo.modules.notifications.notifications.channels.NotificationChannelSerializer.SOUND_KEY; import static abi38_0_0.expo.modules.notifications.notifications.channels.NotificationChannelSerializer.VIBRATION_PATTERN_KEY; import static abi38_0_0.expo.modules.notifications.notifications.channels.NotificationChannelSerializer.toBundle; /** * An exported module responsible for exposing methods for managing notification channels. */ public class NotificationChannelManagerModule extends ExportedModule { private final static String EXPORTED_NAME = "ExpoNotificationChannelManager"; private final NotificationManagerCompat mNotificationManager; private SoundResolver mSoundResolver; public NotificationChannelManagerModule(Context context) { super(context); mSoundResolver = new SoundResolver(context); mNotificationManager = NotificationManagerCompat.from(context); } @Override public String getName() { return EXPORTED_NAME; } @ExpoMethod public void getNotificationChannelAsync(String channelId, Promise promise) { if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.O) { promise.resolve(null); return; } promise.resolve(toBundle(mNotificationManager.getNotificationChannel(channelId))); } @ExpoMethod public void getNotificationChannelsAsync(Promise promise) { if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.O) { promise.resolve(Collections.EMPTY_LIST); return; } List<NotificationChannel> existingChannels = mNotificationManager.getNotificationChannels(); List<Bundle> serializedChannels = new ArrayList<>(existingChannels.size()); for (NotificationChannel channel : existingChannels) { serializedChannels.add(toBundle(channel)); } promise.resolve(serializedChannels); } @ExpoMethod public void setNotificationChannelAsync(String channelId, ReadableArguments channelOptions, Promise promise) { if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.O) { promise.resolve(null); return; } NotificationChannel channel = new NotificationChannel(channelId, getNameFromOptions(channelOptions), getImportanceFromOptions(channelOptions)); configureChannelWithOptions(channel, channelOptions); mNotificationManager.createNotificationChannel(channel); promise.resolve(toBundle(channel)); } @ExpoMethod public void deleteNotificationChannelAsync(String channelId, Promise promise) { if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.O) { promise.resolve(null); return; } mNotificationManager.deleteNotificationChannel(channelId); promise.resolve(null); } // Processing options protected CharSequence getNameFromOptions(ReadableArguments channelOptions) { return channelOptions.getString(NAME_KEY); } @RequiresApi(api = Build.VERSION_CODES.N) protected int getImportanceFromOptions(ReadableArguments channelOptions) { int enumValue = channelOptions.getInt(IMPORTANCE_KEY, NotificationImportance.DEFAULT.getEnumValue()); NotificationImportance importance = Objects.requireNonNull(NotificationImportance.fromEnumValue(enumValue)); return importance.getNativeValue(); } @RequiresApi(api = Build.VERSION_CODES.O) protected void configureChannelWithOptions(Object maybeChannel, ReadableArguments args) { // We cannot use NotificationChannel in the signature of the method // since it's a class available only on newer OSes and the adapter iterates // through all the methods and triggers the NoClassDefFoundError. if (!(maybeChannel instanceof NotificationChannel)) { return; } NotificationChannel channel = (NotificationChannel) maybeChannel; if (args.containsKey(NAME_KEY)) { channel.setName(getNameFromOptions(args)); } if (args.containsKey(IMPORTANCE_KEY)) { channel.setImportance(getImportanceFromOptions(args)); } if (args.containsKey(BYPASS_DND_KEY)) { channel.setBypassDnd(args.getBoolean(BYPASS_DND_KEY)); } if (args.containsKey(DESCRIPTION_KEY)) { channel.setDescription(args.getString(DESCRIPTION_KEY)); } if (args.containsKey(LIGHT_COLOR_KEY)) { channel.setLightColor(Color.parseColor(args.getString(LIGHT_COLOR_KEY))); } if (args.containsKey(GROUP_ID_KEY)) { channel.setGroup(args.getString(GROUP_ID_KEY)); } if (args.containsKey(LOCKSCREEN_VISIBILITY_KEY)) { NotificationVisibility visibility = NotificationVisibility.fromEnumValue(args.getInt(LOCKSCREEN_VISIBILITY_KEY)); if (visibility != null) { channel.setLockscreenVisibility(visibility.getNativeValue()); } } if (args.containsKey(SHOW_BADGE_KEY)) { channel.setShowBadge(args.getBoolean(SHOW_BADGE_KEY)); } if (args.containsKey(SOUND_KEY) || args.containsKey(SOUND_AUDIO_ATTRIBUTES_KEY)) { Uri soundUri = createSoundUriFromArguments(args); AudioAttributes soundAttributes = createAttributesFromArguments(args.getArguments(SOUND_AUDIO_ATTRIBUTES_KEY)); channel.setSound(soundUri, soundAttributes); } if (args.containsKey(VIBRATION_PATTERN_KEY)) { channel.setVibrationPattern(createVibrationPatternFromList(args.getList(VIBRATION_PATTERN_KEY))); } if (args.containsKey(ENABLE_LIGHTS_KEY)) { channel.enableLights(args.getBoolean(ENABLE_LIGHTS_KEY)); } if (args.containsKey(ENABLE_VIBRATE_KEY)) { channel.enableVibration(args.getBoolean(ENABLE_VIBRATE_KEY)); } } @Nullable protected AudioAttributes createAttributesFromArguments(@Nullable ReadableArguments args) { if (args == null) { return null; } AudioAttributes.Builder attributesBuilder = new AudioAttributes.Builder(); if (args.containsKey(AUDIO_ATTRIBUTES_USAGE_KEY)) { attributesBuilder.setUsage(AudioUsage.fromEnumValue(args.getInt(AUDIO_ATTRIBUTES_USAGE_KEY)).getNativeValue()); } if (args.containsKey(AUDIO_ATTRIBUTES_CONTENT_TYPE_KEY)) { attributesBuilder.setContentType(AudioContentType.fromEnumValue(args.getInt(AUDIO_ATTRIBUTES_CONTENT_TYPE_KEY)).getNativeValue()); } if (args.containsKey(AUDIO_ATTRIBUTES_FLAGS_KEY)) { int flags = 0; ReadableArguments flagsArgs = args.getArguments(AUDIO_ATTRIBUTES_FLAGS_KEY); if (flagsArgs.getBoolean(AUDIO_ATTRIBUTES_FLAGS_ENFORCE_AUDIBILITY_KEY)) { flags |= AudioAttributes.FLAG_AUDIBILITY_ENFORCED; } if (flagsArgs.getBoolean(AUDIO_ATTRIBUTES_FLAGS_HW_AV_SYNC_KEY)) { flags |= AudioAttributes.FLAG_HW_AV_SYNC; } attributesBuilder.setFlags(flags); } return attributesBuilder.build(); } @Nullable protected Uri createSoundUriFromArguments(ReadableArguments args) { // The default is... the default sound. if (!args.containsKey(SOUND_KEY)) { return Settings.System.DEFAULT_NOTIFICATION_URI; } // "null" means "no sound" String filename = args.getString(SOUND_KEY); if (filename == null) { return null; } // Otherwise it should be a sound filename return mSoundResolver.resolve(filename); } @Nullable protected long[] createVibrationPatternFromList(@Nullable List patternRequest) throws InvalidVibrationPatternException { if (patternRequest == null) { return null; } long[] pattern = new long[patternRequest.size()]; for (int i = 0; i < patternRequest.size(); i++) { if (patternRequest.get(i) instanceof Number) { pattern[i] = ((Number) patternRequest.get(i)).longValue(); } else { throw new InvalidVibrationPatternException(i, patternRequest.get(i)); } } return pattern; } }
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.flex.forks.batik.bridge; import java.awt.Font; import java.awt.font.FontRenderContext; import java.text.AttributedCharacterIterator; import java.text.CharacterIterator; import java.text.StringCharacterIterator; import java.util.StringTokenizer; import java.util.List; import java.util.ArrayList; import org.apache.flex.forks.batik.css.engine.SVGCSSEngine; import org.apache.flex.forks.batik.css.engine.value.Value; import org.apache.flex.forks.batik.dom.util.XMLSupport; import org.apache.flex.forks.batik.gvt.font.GVTFont; import org.apache.flex.forks.batik.gvt.font.GVTFontFace; import org.apache.flex.forks.batik.gvt.font.GVTGlyphVector; import org.apache.flex.forks.batik.gvt.font.GVTLineMetrics; import org.apache.flex.forks.batik.gvt.font.Glyph; import org.apache.flex.forks.batik.gvt.font.Kern; import org.apache.flex.forks.batik.gvt.font.KerningTable; import org.apache.flex.forks.batik.gvt.font.SVGGVTGlyphVector; import org.apache.flex.forks.batik.gvt.text.GVTAttributedCharacterIterator; import org.apache.flex.forks.batik.gvt.text.TextPaintInfo; import org.apache.flex.forks.batik.util.SVGConstants; import org.w3c.dom.Element; /** * Represents an SVG font. * * @author <a href="mailto:[email protected]">Bella Robinson</a> * @version $Id: SVGGVTFont.java 489226 2006-12-21 00:05:36Z cam $ */ public final class SVGGVTFont implements GVTFont, SVGConstants { public static final AttributedCharacterIterator.Attribute PAINT_INFO = GVTAttributedCharacterIterator.TextAttribute.PAINT_INFO; private float fontSize; private GVTFontFace fontFace; private String[] glyphUnicodes; private String[] glyphNames; private String[] glyphLangs; private String[] glyphOrientations; private String[] glyphForms; private Element[] glyphElements; private Element[] hkernElements; private Element[] vkernElements; private BridgeContext ctx; private Element textElement; private Element missingGlyphElement; private KerningTable hKerningTable; private KerningTable vKerningTable; private String language; private String orientation; private float scale; private GVTLineMetrics lineMetrics=null; /** * Constructs a new SVGGVTFont of the specified size. * * @param fontSize The size of the font to create. * @param fontFace The font face that describes the font. * @param glyphUnicodes An array containing the unicode values for * all the glyphs this font can display. * @param glyphNames An array containing the names of all the * glyphs this font can display. * @param ctx The bridge context. * @param glyphElements An array containing the children glyph * elements of the SVG font. * @param missingGlyphElement The missing glyph element for this * font. * @param hkernElements An array containing all hkern elements for * this font. * @param vkernElements An array containing all vkern elements for * this font. * @param textElement The text element that contains the text to * be rendered using this font. */ public SVGGVTFont(float fontSize, GVTFontFace fontFace, String[] glyphUnicodes, String[] glyphNames, String[] glyphLangs, String[] glyphOrientations, String[] glyphForms, BridgeContext ctx, Element[] glyphElements, Element missingGlyphElement, Element[] hkernElements, Element[] vkernElements, Element textElement) { this.fontFace = fontFace; this.fontSize = fontSize; this.glyphUnicodes = glyphUnicodes; this.glyphNames = glyphNames; this.glyphLangs = glyphLangs; this.glyphOrientations = glyphOrientations; this.glyphForms = glyphForms; this.ctx = ctx; this.glyphElements = glyphElements; this.missingGlyphElement = missingGlyphElement; this.hkernElements = hkernElements; this.vkernElements = vkernElements; this.scale = fontSize/fontFace.getUnitsPerEm(); this.textElement = textElement; this.language = XMLSupport.getXMLLang(textElement); Value v = CSSUtilities.getComputedStyle (textElement, SVGCSSEngine.WRITING_MODE_INDEX); if (v.getStringValue().startsWith(CSS_TB_VALUE)) { // top to bottom, so set orientation to "v" this.orientation = SVG_V_VALUE; } else { this.orientation = SVG_H_VALUE; } createKerningTables(); } /** * Creates the kerning tables for this font. Two tables are created, * horizontal and vertical. If there are not children vkern or hkern * elements these tables will be empty. */ private void createKerningTables() { Kern[] hEntries = new Kern[hkernElements.length]; for (int i = 0; i < hkernElements.length; i++) { Element hkernElement = hkernElements[i]; SVGHKernElementBridge hkernBridge = (SVGHKernElementBridge)ctx.getBridge(hkernElement); Kern hkern = hkernBridge.createKern(ctx, hkernElement, this); hEntries[i] = hkern; } hKerningTable = new KerningTable(hEntries); Kern[] vEntries = new Kern[vkernElements.length]; for (int i = 0; i < vkernElements.length; i++) { Element vkernElement = vkernElements[i]; SVGVKernElementBridge vkernBridge = (SVGVKernElementBridge)ctx.getBridge(vkernElement); Kern vkern = vkernBridge.createKern(ctx, vkernElement, this); vEntries[i] = vkern; } vKerningTable = new KerningTable(vEntries); } /** * Returns the horizontal kerning value for the specified glyph pair. * This will be zero if there is no explicit horizontal kerning value * for this particular glyph pair. * * @param glyphCode1 The id of the first glyph. * @param glyphCode2 The id of the second glyph. * * @return The horizontal kerning value. */ public float getHKern(int glyphCode1, int glyphCode2) { if (glyphCode1 < 0 || glyphCode1 >= glyphUnicodes.length || glyphCode2 < 0 || glyphCode2 >= glyphUnicodes.length) { return 0f; } float ret; ret = hKerningTable.getKerningValue(glyphCode1, glyphCode2, glyphUnicodes[glyphCode1], glyphUnicodes[glyphCode2]); return ret*scale; } /** * Returns the vertical kerning value for the specified glyph pair. * This will be zero if there is no explicit vertical kerning value for * for this particular glyph pair. * * @param glyphCode1 The id of the first glyph. * @param glyphCode2 The id of the second glyph. * * @return The vertical kerning value. */ public float getVKern(int glyphCode1, int glyphCode2) { if (glyphCode1 < 0 || glyphCode1 >= glyphUnicodes.length || glyphCode2 < 0 || glyphCode2 >= glyphUnicodes.length) { return 0f; } float ret; ret = vKerningTable.getKerningValue(glyphCode1, glyphCode2, glyphUnicodes[glyphCode1], glyphUnicodes[glyphCode2]); return ret*scale; } /** * Returns an array of glyph codes (unique ids) of the glyphs with the * specified name (there may be more than one). * * @param name The name of the glyph. * * @return An array of matching glyph codes. This may be empty. */ public int[] getGlyphCodesForName(String name) { List glyphCodes = new ArrayList(); for (int i = 0; i < glyphNames.length; i++) { if (glyphNames[i] != null && glyphNames[i].equals(name)) { glyphCodes.add(new Integer(i)); } } int[] glyphCodeArray = new int[glyphCodes.size()]; for (int i = 0; i < glyphCodes.size(); i++) { glyphCodeArray[i] = ((Integer)glyphCodes.get(i)).intValue(); } return glyphCodeArray; } /** * Returns an array of glyph codes (unique ids) of the glyphs with the * specified unicode value (there may be more than one). * * @param unicode The unicode value of the glyph. * * @return An array of matching glyph codes. This may be empty. */ public int[] getGlyphCodesForUnicode(String unicode) { List glyphCodes = new ArrayList(); for (int i = 0; i < glyphUnicodes.length; i++) { if (glyphUnicodes[i] != null && glyphUnicodes[i].equals(unicode)) { glyphCodes.add(new Integer(i)); } } int[] glyphCodeArray = new int[glyphCodes.size()]; for (int i = 0; i < glyphCodes.size(); i++) { glyphCodeArray[i] = ((Integer)glyphCodes.get(i)).intValue(); } return glyphCodeArray; } /** * Returns true if the glyph language matches the language of the * text node to be rendered by this font. This will be the case * if one of the languages in glyphLang matches exactly with the * xml:lang attibute of the text node, or if the xml:lang * attribute exactly equals a prefix of one glyph languages. * * @param glyphLang A comma separated list of languages that are associated * with a glyph. * * @return Whether or not the glyph language matches the language of the * text node. */ private boolean languageMatches(String glyphLang) { if (glyphLang == null || glyphLang.length() == 0) { return true; // will match all languages } StringTokenizer st = new StringTokenizer(glyphLang, ","); while (st.hasMoreTokens()) { String s = st.nextToken(); if (s.equals(language) || (s.startsWith(language) && s.length() > language.length() && s.charAt(language.length()) == '-')) { return true; } } return false; } /** * Returns true if the glyph orientation matches the orientation of the * text node to be rendered by this font. * * @param glyphOrientation The glyph orientation attribute value. Will be * "h", "v" or empty. * * @return Whether or not the glyph orientation matches the text to be * rendered by this font object. */ private boolean orientationMatches(String glyphOrientation) { if (glyphOrientation == null || glyphOrientation.length() == 0) { return true; } return glyphOrientation.equals(orientation); } /** * Returns true if the glyph form matches that of the current character in * the aci. * * @param glyphUnicode The unicode value of the glyph. * @param glyphForm The arabic-form glyph attribute. * @param aci The aci containing the character to check. * @param currentIndex The index of the character to check. */ private boolean formMatches(String glyphUnicode, String glyphForm, AttributedCharacterIterator aci, int currentIndex) { if (aci == null || glyphForm == null || glyphForm.length() == 0) { // there aren't any attributes attached to the text // or the glyph doesn't have an arabic form return true; } char c = aci.setIndex(currentIndex); Integer form = (Integer)aci.getAttribute (GVTAttributedCharacterIterator.TextAttribute.ARABIC_FORM); if (form == null || form.equals (GVTAttributedCharacterIterator.TextAttribute.ARABIC_NONE)) { // the glyph has an arabic form and the current character // form is "none" so don't match return false; } // see if c is the start of an arabic ligature if (glyphUnicode.length() > 1) { boolean matched = true; for (int j = 1; j < glyphUnicode.length(); j++) { c = aci.next(); if (glyphUnicode.charAt(j) != c) { matched = false; break; } } // reset the aci aci.setIndex(currentIndex); if (matched) { // ligature matches, now check that the arabic forms are ok aci.setIndex(currentIndex + glyphUnicode.length() - 1); Integer lastForm = (Integer)aci.getAttribute( GVTAttributedCharacterIterator.TextAttribute.ARABIC_FORM); // reset the aci again aci.setIndex(currentIndex); if (form != null && lastForm != null) { if (form.equals(GVTAttributedCharacterIterator. TextAttribute.ARABIC_TERMINAL) && lastForm.equals(GVTAttributedCharacterIterator. TextAttribute.ARABIC_INITIAL)) { // return true if the glyph form is isolated return glyphForm.equals (SVGConstants.SVG_ISOLATED_VALUE); } else if (form.equals(GVTAttributedCharacterIterator. TextAttribute.ARABIC_TERMINAL)) { // return true if the glyph form is terminal return glyphForm.equals (SVGConstants.SVG_TERMINAL_VALUE); } else if (form.equals(GVTAttributedCharacterIterator. TextAttribute.ARABIC_MEDIAL) && lastForm.equals(GVTAttributedCharacterIterator. TextAttribute.ARABIC_MEDIAL)) { // return true if the glyph form is medial return glyphForm.equals(SVGConstants.SVG_MEDIAL_VALUE); } // should test for other combos as well here } } } if (form.equals(GVTAttributedCharacterIterator. TextAttribute.ARABIC_ISOLATED)) { return glyphForm.equals(SVGConstants.SVG_ISOLATED_VALUE); } if (form.equals(GVTAttributedCharacterIterator. TextAttribute.ARABIC_TERMINAL)) { return glyphForm.equals(SVGConstants.SVG_TERMINAL_VALUE); } if (form.equals(GVTAttributedCharacterIterator. TextAttribute.ARABIC_INITIAL)) { return glyphForm.equals(SVGConstants.SVG_INITIAL_VALUE); } if (form.equals(GVTAttributedCharacterIterator. TextAttribute.ARABIC_MEDIAL)) { return glyphForm.equals(SVGConstants.SVG_MEDIAL_VALUE); } return false; } /** * Indicates whether or not the specified glyph can be displayed by this * font. * * @param name The name of the glyph to check. * * @return true if the glyph can be displayed. */ public boolean canDisplayGivenName(String name) { for (int i = 0; i < glyphNames.length; i++) { if (glyphNames[i] != null && glyphNames[i].equals(name) && languageMatches(glyphLangs[i]) && orientationMatches(glyphOrientations[i])) { return true; } } return false; } /** * Indicates whether or not the specified character can be * displayed by this font. * * @param c The character to check. * * @return true if the character can be displayed. */ public boolean canDisplay(char c) { for (int i = 0; i < glyphUnicodes.length; i++) { if (glyphUnicodes[i].indexOf(c) != -1 && languageMatches(glyphLangs[i]) && orientationMatches(glyphOrientations[i])) { return true; } } return false; } /** * Checks whether this Font can display the characters in the * specified character array starting at start and ending at limit. * * @param text An array containing the characters to check. * @param start The index of the first character to check. * @param limit The index of the last character to check. * * @return The index of the first character it can't display or -1 if * it can display the whole string. */ public int canDisplayUpTo(char[] text, int start, int limit) { StringCharacterIterator sci = new StringCharacterIterator(new String(text)); return canDisplayUpTo(sci, start, limit); } /** * Checks whether this Font can display the characters in the * specified character iterator starting at start and ending at limit. * * @param iter The iterator containing the characters to check. * @param start The index of the first character to check. * @param limit The index of the last character to check. * * @return The index of the first character it can't display or -1 if * it can display the whole string. */ public int canDisplayUpTo(CharacterIterator iter, int start, int limit) { AttributedCharacterIterator aci = null; if (iter instanceof AttributedCharacterIterator) { aci = (AttributedCharacterIterator)iter; } char c = iter.setIndex(start); int currentIndex = start; while (c != CharacterIterator.DONE && currentIndex < limit) { boolean foundMatchingGlyph = false; for (int i = 0; i < glyphUnicodes.length; i++) { if (glyphUnicodes[i].indexOf(c) == 0 && languageMatches(glyphLangs[i]) && orientationMatches(glyphOrientations[i]) && formMatches(glyphUnicodes[i], glyphForms[i], aci, currentIndex)) { // found a possible match if (glyphUnicodes[i].length() == 1) { // not a ligature foundMatchingGlyph = true; break; } else { // glyphCodes[i] is a ligature so try and // match the rest of the glyphCode chars boolean matched = true; for (int j = 1; j < glyphUnicodes[i].length(); j++) { c = iter.next(); if (glyphUnicodes[i].charAt(j) != c) { matched = false; break; } } if (matched) { // found a matching ligature! foundMatchingGlyph = true; break; } else { // did not match ligature, keep looking // for another glyph c = iter.setIndex(currentIndex); } } } } if (!foundMatchingGlyph) { return currentIndex; } c = iter.next(); currentIndex = iter.getIndex(); } return -1; } /** * Checks whether or not this font can display the characters in the * specified String. * * @param str The string containing the characters to check. * * @return The index of the first character it can't display or -1 if * it can display the whole string. */ public int canDisplayUpTo(String str) { StringCharacterIterator sci = new StringCharacterIterator(str); return canDisplayUpTo(sci, 0, str.length()); } /** * Returns a new GVTGlyphVector object for the specified array of * characters. * * @param frc The current font render context. * @param chars The array of chars that the glyph vector will represent. * * @return The new glyph vector. */ public GVTGlyphVector createGlyphVector(FontRenderContext frc, char[] chars) { StringCharacterIterator sci = new StringCharacterIterator(new String(chars)); return createGlyphVector(frc, sci); } /** * Returns a new GVTGlyphVector object for the characters in the * specified character iterator. * * @param frc The current font render context. * @param ci The character iterator that the glyph vector will represent. * * @return The new glyph vector. */ public GVTGlyphVector createGlyphVector(FontRenderContext frc, CharacterIterator ci) { AttributedCharacterIterator aci = null; if (ci instanceof AttributedCharacterIterator) { aci = (AttributedCharacterIterator)ci; } List glyphs = new ArrayList(); char c = ci.first(); while (c != CharacterIterator.DONE) { boolean foundMatchingGlyph = false; for (int i = 0; i < glyphUnicodes.length; i++) { if (glyphUnicodes[i].indexOf(c) == 0 && languageMatches(glyphLangs[i]) && orientationMatches(glyphOrientations[i]) && formMatches(glyphUnicodes[i], glyphForms[i], aci, ci.getIndex())) { // found a possible match if (glyphUnicodes[i].length() == 1) { // not a ligature Element glyphElement = glyphElements[i]; SVGGlyphElementBridge glyphBridge = (SVGGlyphElementBridge)ctx.getBridge(glyphElement); TextPaintInfo tpi = null; if (aci != null) { tpi = (TextPaintInfo)aci.getAttribute(PAINT_INFO); } Glyph glyph = glyphBridge.createGlyph (ctx, glyphElement, textElement, i, fontSize, fontFace, tpi); glyphs.add(glyph); foundMatchingGlyph = true; break; } else { // glyphCodes[i] is a ligature so try and // match the rest of the glyphCode chars int current = ci.getIndex(); boolean matched = true; for (int j = 1; j < glyphUnicodes[i].length(); j++) { c = ci.next(); if (glyphUnicodes[i].charAt(j) != c) { matched = false; break; } } if (matched) { // found a matching ligature! Element glyphElement = glyphElements[i]; SVGGlyphElementBridge glyphBridge = (SVGGlyphElementBridge)ctx.getBridge (glyphElement); TextPaintInfo tpi = null; if (aci != null) { aci.setIndex(ci.getIndex()); tpi = (TextPaintInfo)aci.getAttribute (PAINT_INFO); } Glyph glyph = glyphBridge.createGlyph (ctx, glyphElement, textElement, i, fontSize, fontFace, tpi); glyphs.add(glyph); foundMatchingGlyph = true; break; } else { // did not match ligature, keep looking // for another glyph c = ci.setIndex(current); } } } } if (!foundMatchingGlyph) { // add the missing glyph SVGGlyphElementBridge glyphBridge = (SVGGlyphElementBridge)ctx.getBridge(missingGlyphElement); TextPaintInfo tpi = null; if (aci != null) { aci.setIndex(ci.getIndex()); tpi = (TextPaintInfo)aci.getAttribute(PAINT_INFO); } Glyph glyph = glyphBridge.createGlyph (ctx, missingGlyphElement, textElement, -1, fontSize, fontFace, tpi); glyphs.add(glyph); } c = ci.next(); } // turn the vector of glyphs into an array; int numGlyphs = glyphs.size(); Glyph[] glyphArray = (Glyph[])glyphs.toArray( new Glyph[numGlyphs] ); // return a new SVGGVTGlyphVector return new SVGGVTGlyphVector(this, glyphArray, frc); } /** * Returns a new GVTGlyphVector object for the glyphs in the * the glyph code array. * * @param frc The current font render context. * @param glyphCodes An array containin the ids of the glyphs that * the glyph vector will represent. * * @return The new glyph vector. */ public GVTGlyphVector createGlyphVector(FontRenderContext frc, int[] glyphCodes, CharacterIterator ci) { // construct a string from the glyphCodes int nGlyphs = glyphCodes.length; StringBuffer workBuff = new StringBuffer( nGlyphs ); for (int i = 0; i < nGlyphs; i++) { workBuff.append( glyphUnicodes[glyphCodes[i]] ); } StringCharacterIterator sci = new StringCharacterIterator( workBuff.toString() ); return createGlyphVector(frc, sci); } /** * Returns a new GVTGlyphVector object for the specified String. * * @param frc The current font render context. * @param str The string that the glyph vector will represent. * * @return The new glyph vector. */ public GVTGlyphVector createGlyphVector(FontRenderContext frc, String str) { StringCharacterIterator sci = new StringCharacterIterator(str); return createGlyphVector(frc, sci); } /** * Creates a new GVTFont object by replicating this font object and * applying a new size to it. * * @param size The size of the new font. * * @return The new font object. */ public GVTFont deriveFont(float size) { return new SVGGVTFont(size, fontFace, glyphUnicodes, glyphNames, glyphLangs, glyphOrientations, glyphForms, ctx, glyphElements, missingGlyphElement, hkernElements, vkernElements, textElement); } public String getFamilyName() { return fontFace.getFamilyName(); } protected GVTLineMetrics getLineMetrics(int beginIndex, int limit) { if (lineMetrics != null) return lineMetrics; float fontHeight = fontFace.getUnitsPerEm(); float scale = fontSize/fontHeight; float ascent = fontFace.getAscent() * scale; float descent = fontFace.getDescent() * scale; float[] baselineOffsets = new float[3]; baselineOffsets[Font.ROMAN_BASELINE] = 0; baselineOffsets[Font.CENTER_BASELINE] = (ascent+descent)/2-ascent; baselineOffsets[Font.HANGING_BASELINE] = -ascent; float stOffset = fontFace.getStrikethroughPosition() * -scale; float stThickness = fontFace.getStrikethroughThickness() * scale; float ulOffset = fontFace.getUnderlinePosition() * scale; float ulThickness = fontFace.getUnderlineThickness() * scale; float olOffset = fontFace.getOverlinePosition() * -scale; float olThickness = fontFace.getOverlineThickness() * scale; lineMetrics = new GVTLineMetrics (ascent, Font.ROMAN_BASELINE, baselineOffsets, descent, fontHeight, fontHeight, limit-beginIndex, stOffset, stThickness, ulOffset, ulThickness, olOffset, olThickness); return lineMetrics; } /** * Returns the line metrics for the specified text. * * @param chars The character array containing the text. * @param beginIndex The index of the first character. * @param limit The limit of characters. * @param frc The current font render context. * * @return The new GVTLineMetrics object. */ public GVTLineMetrics getLineMetrics(char[] chars, int beginIndex, int limit, FontRenderContext frc) { return getLineMetrics(beginIndex, limit); } /** * Returns the line metrics for the specified text. * * @param ci The character iterator containing the text. * @param beginIndex The index of the first character. * @param limit The limit of characters. * @param frc The current font render context. * * @return The new GVTLineMetrics object. */ public GVTLineMetrics getLineMetrics(CharacterIterator ci, int beginIndex, int limit, FontRenderContext frc) { return getLineMetrics(beginIndex, limit); } /** * Returns the line metrics for the specified text. * * @param str The string containing the text. * @param frc The current font render context. * * @return The new GVTLineMetrics object. */ public GVTLineMetrics getLineMetrics(String str, FontRenderContext frc) { StringCharacterIterator sci = new StringCharacterIterator(str); return getLineMetrics(sci, 0, str.length(), frc); } /** * Returns the line metrics for the specified text. * * @param str The string containing the text. * @param beginIndex The index of the first character. * @param limit The limit of characters. * @param frc The current font render context. * * @return The new GVTLineMetrics object. */ public GVTLineMetrics getLineMetrics(String str, int beginIndex, int limit, FontRenderContext frc) { StringCharacterIterator sci = new StringCharacterIterator(str); return getLineMetrics(sci, beginIndex, limit, frc); } /** * Returns the size of this font. * * @return The font size. */ public float getSize() { return fontSize; } /** * Returns a string representation of this font. * This is for debugging purposes only. * * @return A string representation of this font. */ public String toString() { return fontFace.getFamilyName() + " " + fontFace.getFontWeight() + " " + fontFace.getFontStyle(); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.search.geo; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.Filter; import org.apache.lucene.util.Bits; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.ElasticsearchIllegalArgumentException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.lucene.docset.AndDocIdSet; import org.elasticsearch.common.lucene.docset.DocIdSets; import org.elasticsearch.common.lucene.docset.MatchDocIdSet; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.index.fielddata.GeoPointValues; import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import java.io.IOException; /** * */ public class GeoDistanceRangeFilter extends Filter { private final double lat; private final double lon; private final double inclusiveLowerPoint; // in meters private final double inclusiveUpperPoint; // in meters private final GeoDistance geoDistance; private final GeoDistance.FixedSourceDistance fixedSourceDistance; private GeoDistance.DistanceBoundingCheck distanceBoundingCheck; private final Filter boundingBoxFilter; private final IndexGeoPointFieldData indexFieldData; public GeoDistanceRangeFilter(GeoPoint point, Double lowerVal, Double upperVal, boolean includeLower, boolean includeUpper, GeoDistance geoDistance, GeoPointFieldMapper mapper, IndexGeoPointFieldData indexFieldData, String optimizeBbox) { this.lat = point.lat(); this.lon = point.lon(); this.geoDistance = geoDistance; this.indexFieldData = indexFieldData; this.fixedSourceDistance = geoDistance.fixedSourceDistance(lat, lon, DistanceUnit.DEFAULT); if (lowerVal != null) { double f = lowerVal.doubleValue(); long i = NumericUtils.doubleToSortableLong(f); inclusiveLowerPoint = NumericUtils.sortableLongToDouble(includeLower ? i : (i + 1L)); } else { inclusiveLowerPoint = Double.NEGATIVE_INFINITY; } if (upperVal != null) { double f = upperVal.doubleValue(); long i = NumericUtils.doubleToSortableLong(f); inclusiveUpperPoint = NumericUtils.sortableLongToDouble(includeUpper ? i : (i - 1L)); } else { inclusiveUpperPoint = Double.POSITIVE_INFINITY; // we disable bounding box in this case, since the upper point is all and we create bounding box up to the // upper point it will effectively include all // TODO we can create a bounding box up to from and "not" it optimizeBbox = null; } if (optimizeBbox != null && !"none".equals(optimizeBbox)) { distanceBoundingCheck = GeoDistance.distanceBoundingCheck(lat, lon, inclusiveUpperPoint, DistanceUnit.DEFAULT); if ("memory".equals(optimizeBbox)) { boundingBoxFilter = null; } else if ("indexed".equals(optimizeBbox)) { boundingBoxFilter = IndexedGeoBoundingBoxFilter.create(distanceBoundingCheck.topLeft(), distanceBoundingCheck.bottomRight(), mapper); distanceBoundingCheck = GeoDistance.ALWAYS_INSTANCE; // fine, we do the bounding box check using the filter } else { throw new ElasticsearchIllegalArgumentException("type [" + optimizeBbox + "] for bounding box optimization not supported"); } } else { distanceBoundingCheck = GeoDistance.ALWAYS_INSTANCE; boundingBoxFilter = null; } } public double lat() { return lat; } public double lon() { return lon; } public GeoDistance geoDistance() { return geoDistance; } @Override public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptedDocs) throws IOException { DocIdSet boundingBoxDocSet = null; if (boundingBoxFilter != null) { boundingBoxDocSet = boundingBoxFilter.getDocIdSet(context, acceptedDocs); if (DocIdSets.isEmpty(boundingBoxDocSet)) { return null; } } GeoPointValues values = indexFieldData.load(context).getGeoPointValues(); GeoDistanceRangeDocSet distDocSet = new GeoDistanceRangeDocSet(context.reader().maxDoc(), acceptedDocs, values, fixedSourceDistance, distanceBoundingCheck, inclusiveLowerPoint, inclusiveUpperPoint); if (boundingBoxDocSet == null) { return distDocSet; } else { return new AndDocIdSet(new DocIdSet[]{boundingBoxDocSet, distDocSet}); } } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GeoDistanceRangeFilter filter = (GeoDistanceRangeFilter) o; if (Double.compare(filter.inclusiveLowerPoint, inclusiveLowerPoint) != 0) return false; if (Double.compare(filter.inclusiveUpperPoint, inclusiveUpperPoint) != 0) return false; if (Double.compare(filter.lat, lat) != 0) return false; if (Double.compare(filter.lon, lon) != 0) return false; if (!indexFieldData.getFieldNames().indexName().equals(filter.indexFieldData.getFieldNames().indexName())) return false; if (geoDistance != filter.geoDistance) return false; return true; } @Override public String toString() { return "GeoDistanceRangeFilter(" + indexFieldData.getFieldNames().indexName() + ", " + geoDistance + ", [" + inclusiveLowerPoint + " - " + inclusiveUpperPoint + "], " + lat + ", " + lon + ")"; } @Override public int hashCode() { int result; long temp; temp = lat != +0.0d ? Double.doubleToLongBits(lat) : 0L; result = (int) (temp ^ (temp >>> 32)); temp = lon != +0.0d ? Double.doubleToLongBits(lon) : 0L; result = 31 * result + (int) (temp ^ (temp >>> 32)); temp = inclusiveLowerPoint != +0.0d ? Double.doubleToLongBits(inclusiveLowerPoint) : 0L; result = 31 * result + (int) (temp ^ (temp >>> 32)); temp = inclusiveUpperPoint != +0.0d ? Double.doubleToLongBits(inclusiveUpperPoint) : 0L; result = 31 * result + (int) (temp ^ (temp >>> 32)); result = 31 * result + (geoDistance != null ? geoDistance.hashCode() : 0); result = 31 * result + indexFieldData.getFieldNames().indexName().hashCode(); return result; } public static class GeoDistanceRangeDocSet extends MatchDocIdSet { private final GeoPointValues values; private final GeoDistance.FixedSourceDistance fixedSourceDistance; private final GeoDistance.DistanceBoundingCheck distanceBoundingCheck; private final double inclusiveLowerPoint; // in miles private final double inclusiveUpperPoint; // in miles public GeoDistanceRangeDocSet(int maxDoc, @Nullable Bits acceptDocs, GeoPointValues values, GeoDistance.FixedSourceDistance fixedSourceDistance, GeoDistance.DistanceBoundingCheck distanceBoundingCheck, double inclusiveLowerPoint, double inclusiveUpperPoint) { super(maxDoc, acceptDocs); this.values = values; this.fixedSourceDistance = fixedSourceDistance; this.distanceBoundingCheck = distanceBoundingCheck; this.inclusiveLowerPoint = inclusiveLowerPoint; this.inclusiveUpperPoint = inclusiveUpperPoint; } @Override public boolean isCacheable() { return true; } @Override protected boolean matchDoc(int doc) { final int length = values.setDocument(doc); for (int i = 0; i < length; i++) { GeoPoint point = values.nextValue(); if (distanceBoundingCheck.isWithin(point.lat(), point.lon())) { double d = fixedSourceDistance.calculate(point.lat(), point.lon()); if (d >= inclusiveLowerPoint && d <= inclusiveUpperPoint) { return true; } } } return false; } } }
package org.laladev.moneyjinn.server.controller.moneyflow; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.laladev.moneyjinn.core.error.ErrorCode; import org.laladev.moneyjinn.core.rest.model.ErrorResponse; import org.laladev.moneyjinn.core.rest.model.moneyflow.SearchMoneyflowsRequest; import org.laladev.moneyjinn.core.rest.model.moneyflow.SearchMoneyflowsResponse; import org.laladev.moneyjinn.core.rest.model.moneyflow.transport.MoneyflowSearchParamsTransport; import org.laladev.moneyjinn.core.rest.model.transport.ContractpartnerTransport; import org.laladev.moneyjinn.core.rest.model.transport.MoneyflowTransport; import org.laladev.moneyjinn.core.rest.model.transport.PostingAccountTransport; import org.laladev.moneyjinn.core.rest.model.transport.ValidationItemTransport; import org.laladev.moneyjinn.server.builder.ContractpartnerTransportBuilder; import org.laladev.moneyjinn.server.builder.DateUtil; import org.laladev.moneyjinn.server.builder.MoneyflowSplitEntryTransportBuilder; import org.laladev.moneyjinn.server.builder.MoneyflowTransportBuilder; import org.laladev.moneyjinn.server.builder.PostingAccountTransportBuilder; import org.laladev.moneyjinn.server.builder.UserTransportBuilder; import org.laladev.moneyjinn.server.controller.AbstractControllerTest; import org.springframework.http.HttpMethod; import org.springframework.test.context.jdbc.Sql; public class SearchMoneyflowsTest extends AbstractControllerTest { private static final Short SHORT_1 = (short) 1; private final HttpMethod method = HttpMethod.PUT; private String userName; private String userPassword; @BeforeEach public void setUp() { this.userName = UserTransportBuilder.USER1_NAME; this.userPassword = UserTransportBuilder.USER1_PASSWORD; } @Override protected String getUsername() { return this.userName; } @Override protected String getPassword() { return this.userPassword; } @Override protected String getUsecase() { return super.getUsecaseFromTestClassName(this.getClass()); } private SearchMoneyflowsResponse getDefaultResponse() { final SearchMoneyflowsResponse expected = new SearchMoneyflowsResponse(); final List<PostingAccountTransport> postingAccountTransports = new ArrayList<>(); postingAccountTransports.add(new PostingAccountTransportBuilder().forPostingAccount1().build()); postingAccountTransports.add(new PostingAccountTransportBuilder().forPostingAccount2().build()); postingAccountTransports.add(new PostingAccountTransportBuilder().forPostingAccount3().build()); expected.setPostingAccountTransports(postingAccountTransports); final List<ContractpartnerTransport> contractpartnerTransports = new ArrayList<>(); contractpartnerTransports.add(new ContractpartnerTransportBuilder().forContractpartner1().build()); contractpartnerTransports.add(new ContractpartnerTransportBuilder().forContractpartner2().build()); contractpartnerTransports.add(new ContractpartnerTransportBuilder().forContractpartner3().build()); contractpartnerTransports.add(new ContractpartnerTransportBuilder().forContractpartner4().build()); expected.setContractpartnerTransports(contractpartnerTransports); return expected; } private void assertEquals(final SearchMoneyflowsResponse expected, final SearchMoneyflowsResponse actual) { if (expected.getMoneyflowTransports() != null) { Collections.sort(expected.getMoneyflowTransports(), new MoneyflowTransportComparator()); } if (actual.getMoneyflowTransports() != null) { Collections.sort(actual.getMoneyflowTransports(), new MoneyflowTransportComparator()); } Assertions.assertEquals(expected, actual); } private class MoneyflowTransportComparator implements Comparator<MoneyflowTransport> { @Override public int compare(final MoneyflowTransport o1, final MoneyflowTransport o2) { if (o1 == null) { if (o2 == null) { return 0; } return Integer.MIN_VALUE; } else { if (o2 == null) { return Integer.MAX_VALUE; } int result = 0; if (o1.getId() == null) { if (o2.getId() != null) { return Integer.MIN_VALUE; } } else { result = o1.getId().compareTo(o2.getId()); } if (result != 0) { return result; } if (o1.getAmount() == null) { if (o2.getAmount() != null) { return Integer.MIN_VALUE; } } else { result = o1.getAmount().compareTo(o2.getAmount()); } if (result != 0) { return result; } if (o1.getComment() == null) { if (o2.getComment() != null) { return Integer.MIN_VALUE; } } else { result = o1.getComment().compareTo(o2.getComment()); } return result; } } } @Test public void test_searchString_successfull() throws Exception { final SearchMoneyflowsRequest request = new SearchMoneyflowsRequest(); final MoneyflowSearchParamsTransport transport = new MoneyflowSearchParamsTransport(); transport.setSearchString("ENERATED"); request.setMoneyflowSearchParamsTransport(transport); final SearchMoneyflowsResponse expected = this.getDefaultResponse(); this.fillYearlySearchGenerated(expected); final SearchMoneyflowsResponse actual = super.callUsecaseWithContent("", this.method, request, false, SearchMoneyflowsResponse.class); this.assertEquals(expected, actual); } @Test public void test_searchStringDateRange_successfull() throws Exception { final SearchMoneyflowsRequest request = new SearchMoneyflowsRequest(); final MoneyflowSearchParamsTransport transport = new MoneyflowSearchParamsTransport(); transport.setSearchString("ENERATED"); transport.setStartDate(DateUtil.getGmtDate("2009-05-01")); transport.setEndDate(DateUtil.getGmtDate("2009-11-10")); request.setMoneyflowSearchParamsTransport(transport); final SearchMoneyflowsResponse expected = this.getDefaultResponse(); final List<MoneyflowTransport> moneyflowTransports = new ArrayList<>(); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow6().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow7().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow8().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow9().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow10().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow11().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow12().build()); expected.setMoneyflowTransports(moneyflowTransports); final SearchMoneyflowsResponse actual = super.callUsecaseWithContent("", this.method, request, false, SearchMoneyflowsResponse.class); this.assertEquals(expected, actual); } @Test public void test_caseSensitive_noMatches() throws Exception { final SearchMoneyflowsRequest request = new SearchMoneyflowsRequest(); final MoneyflowSearchParamsTransport transport = new MoneyflowSearchParamsTransport(); transport.setSearchString("ENERATED"); transport.setFeatureCaseSensitive(SHORT_1); request.setMoneyflowSearchParamsTransport(transport); final SearchMoneyflowsResponse expected = this.getDefaultResponse(); final SearchMoneyflowsResponse actual = super.callUsecaseWithContent("", this.method, request, false, SearchMoneyflowsResponse.class); this.assertEquals(expected, actual); } @Test public void test_caseSensitive_successfull() throws Exception { final SearchMoneyflowsRequest request = new SearchMoneyflowsRequest(); final MoneyflowSearchParamsTransport transport = new MoneyflowSearchParamsTransport(); transport.setSearchString("enerated"); transport.setFeatureCaseSensitive(SHORT_1); request.setMoneyflowSearchParamsTransport(transport); final SearchMoneyflowsResponse expected = this.getDefaultResponse(); this.fillYearlySearchGenerated(expected); final SearchMoneyflowsResponse actual = super.callUsecaseWithContent("", this.method, request, false, SearchMoneyflowsResponse.class); this.assertEquals(expected, actual); } @Test public void test_regexp_successfull() throws Exception { final SearchMoneyflowsRequest request = new SearchMoneyflowsRequest(); final MoneyflowSearchParamsTransport transport = new MoneyflowSearchParamsTransport(); transport.setSearchString("[E][N][E][R][A][T][E][D]"); transport.setFeatureRegexp(SHORT_1); request.setMoneyflowSearchParamsTransport(transport); final SearchMoneyflowsResponse expected = this.getDefaultResponse(); this.fillYearlySearchGenerated(expected); final SearchMoneyflowsResponse actual = super.callUsecaseWithContent("", this.method, request, false, SearchMoneyflowsResponse.class); this.assertEquals(expected, actual); } @Test public void test_regexpCaseSensitive_noMatches() throws Exception { final SearchMoneyflowsRequest request = new SearchMoneyflowsRequest(); final MoneyflowSearchParamsTransport transport = new MoneyflowSearchParamsTransport(); transport.setSearchString("[E][N][E][R][A][T][E][D]"); transport.setFeatureRegexp(SHORT_1); transport.setFeatureCaseSensitive(SHORT_1); request.setMoneyflowSearchParamsTransport(transport); final SearchMoneyflowsResponse expected = this.getDefaultResponse(); final SearchMoneyflowsResponse actual = super.callUsecaseWithContent("", this.method, request, false, SearchMoneyflowsResponse.class); this.assertEquals(expected, actual); } @Test public void test_regexpCaseSensitive_successfull() throws Exception { final SearchMoneyflowsRequest request = new SearchMoneyflowsRequest(); final MoneyflowSearchParamsTransport transport = new MoneyflowSearchParamsTransport(); transport.setSearchString("[e][n][e][r][a][t][e][d]"); transport.setFeatureRegexp(SHORT_1); transport.setFeatureCaseSensitive(SHORT_1); request.setMoneyflowSearchParamsTransport(transport); final SearchMoneyflowsResponse expected = this.getDefaultResponse(); this.fillYearlySearchGenerated(expected); final SearchMoneyflowsResponse actual = super.callUsecaseWithContent("", this.method, request, false, SearchMoneyflowsResponse.class); this.assertEquals(expected, actual); } @Test public void test_equals_successfull() throws Exception { final SearchMoneyflowsRequest request = new SearchMoneyflowsRequest(); final MoneyflowSearchParamsTransport transport = new MoneyflowSearchParamsTransport(); transport.setSearchString("GENERATED"); transport.setFeatureEqual(SHORT_1); request.setMoneyflowSearchParamsTransport(transport); final SearchMoneyflowsResponse expected = this.getDefaultResponse(); this.fillYearlySearchGenerated(expected); final SearchMoneyflowsResponse actual = super.callUsecaseWithContent("", this.method, request, false, SearchMoneyflowsResponse.class); this.assertEquals(expected, actual); } @Test public void test_equals_noMatches() throws Exception { final SearchMoneyflowsRequest request = new SearchMoneyflowsRequest(); final MoneyflowSearchParamsTransport transport = new MoneyflowSearchParamsTransport(); transport.setSearchString("ENERATED"); transport.setFeatureEqual(SHORT_1); request.setMoneyflowSearchParamsTransport(transport); final SearchMoneyflowsResponse expected = this.getDefaultResponse(); final SearchMoneyflowsResponse actual = super.callUsecaseWithContent("", this.method, request, false, SearchMoneyflowsResponse.class); this.assertEquals(expected, actual); } @Test public void test_equalsCaseSensitive_noMatches() throws Exception { final SearchMoneyflowsRequest request = new SearchMoneyflowsRequest(); final MoneyflowSearchParamsTransport transport = new MoneyflowSearchParamsTransport(); transport.setSearchString("GENERATED"); transport.setFeatureEqual(SHORT_1); transport.setFeatureCaseSensitive(SHORT_1); request.setMoneyflowSearchParamsTransport(transport); final SearchMoneyflowsResponse expected = this.getDefaultResponse(); final SearchMoneyflowsResponse actual = super.callUsecaseWithContent("", this.method, request, false, SearchMoneyflowsResponse.class); this.assertEquals(expected, actual); } @Test public void test_equalsCaseSensitive_successfull() throws Exception { final SearchMoneyflowsRequest request = new SearchMoneyflowsRequest(); final MoneyflowSearchParamsTransport transport = new MoneyflowSearchParamsTransport(); transport.setSearchString("generated"); transport.setFeatureEqual(SHORT_1); transport.setFeatureCaseSensitive(SHORT_1); request.setMoneyflowSearchParamsTransport(transport); final SearchMoneyflowsResponse expected = this.getDefaultResponse(); this.fillYearlySearchGenerated(expected); final SearchMoneyflowsResponse actual = super.callUsecaseWithContent("", this.method, request, false, SearchMoneyflowsResponse.class); this.assertEquals(expected, actual); } @Test public void test_onlyMinusAmounts_successfull() throws Exception { final SearchMoneyflowsRequest request = new SearchMoneyflowsRequest(); final MoneyflowSearchParamsTransport transport = new MoneyflowSearchParamsTransport(); transport.setSearchString("generated"); transport.setFeatureOnlyMinusAmounts(SHORT_1); request.setMoneyflowSearchParamsTransport(transport); final SearchMoneyflowsResponse expected = this.getDefaultResponse(); final List<MoneyflowTransport> moneyflowTransports = new ArrayList<>(); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow4().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow6().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow8().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow10().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow12().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow14().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow16().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow18().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow19().build()); expected.setMoneyflowTransports(moneyflowTransports); final SearchMoneyflowsResponse actual = super.callUsecaseWithContent("", this.method, request, false, SearchMoneyflowsResponse.class); this.assertEquals(expected, actual); } @Test public void test_postingAccount_successfull() throws Exception { final SearchMoneyflowsRequest request = new SearchMoneyflowsRequest(); final MoneyflowSearchParamsTransport transport = new MoneyflowSearchParamsTransport(); transport.setPostingAccountId(PostingAccountTransportBuilder.POSTING_ACCOUNT1_ID); request.setMoneyflowSearchParamsTransport(transport); final SearchMoneyflowsResponse expected = this.getDefaultResponse(); final List<MoneyflowTransport> moneyflowTransports = new ArrayList<>(); final MoneyflowTransport transport1a = new MoneyflowTransportBuilder().forMoneyflow1().build(); transport1a.setComment(MoneyflowSplitEntryTransportBuilder.MONEYFLOW_SPLIT_ENTRY1_COMMENT); transport1a.setAmount(MoneyflowSplitEntryTransportBuilder.MONEYFLOW_SPLIT_ENTRY1_AMOUNT); transport1a.setPostingaccountid(PostingAccountTransportBuilder.POSTING_ACCOUNT1_ID); transport1a.setPostingaccountname(PostingAccountTransportBuilder.POSTING_ACCOUNT1_NAME); moneyflowTransports.add(transport1a); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow2().build()); expected.setMoneyflowTransports(moneyflowTransports); final SearchMoneyflowsResponse actual = super.callUsecaseWithContent("", this.method, request, false, SearchMoneyflowsResponse.class); this.assertEquals(expected, actual); } @Test public void test_contractpartner_successfull() throws Exception { final SearchMoneyflowsRequest request = new SearchMoneyflowsRequest(); final MoneyflowSearchParamsTransport transport = new MoneyflowSearchParamsTransport(); transport.setContractpartnerId(ContractpartnerTransportBuilder.CONTRACTPARTNER1_ID); request.setMoneyflowSearchParamsTransport(transport); final SearchMoneyflowsResponse expected = this.getDefaultResponse(); final List<MoneyflowTransport> moneyflowTransports = new ArrayList<>(); final MoneyflowTransport transport1a = new MoneyflowTransportBuilder().forMoneyflow1().build(); transport1a.setComment(MoneyflowSplitEntryTransportBuilder.MONEYFLOW_SPLIT_ENTRY1_COMMENT); transport1a.setAmount(MoneyflowSplitEntryTransportBuilder.MONEYFLOW_SPLIT_ENTRY1_AMOUNT); transport1a.setPostingaccountid(PostingAccountTransportBuilder.POSTING_ACCOUNT1_ID); transport1a.setPostingaccountname(PostingAccountTransportBuilder.POSTING_ACCOUNT1_NAME); final MoneyflowTransport transport1b = new MoneyflowTransportBuilder().forMoneyflow1().build(); transport1b.setComment(MoneyflowSplitEntryTransportBuilder.MONEYFLOW_SPLIT_ENTRY2_COMMENT); transport1b.setAmount(MoneyflowSplitEntryTransportBuilder.MONEYFLOW_SPLIT_ENTRY2_AMOUNT); transport1b.setPostingaccountid(PostingAccountTransportBuilder.POSTING_ACCOUNT2_ID); transport1b.setPostingaccountname(PostingAccountTransportBuilder.POSTING_ACCOUNT2_NAME); moneyflowTransports.add(transport1a); moneyflowTransports.add(transport1b); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow2().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow3().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow4().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow5().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow6().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow7().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow8().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow9().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow10().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow11().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow12().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow13().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow14().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow15().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow16().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow17().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow18().build()); expected.setMoneyflowTransports(moneyflowTransports); final SearchMoneyflowsResponse actual = super.callUsecaseWithContent("", this.method, request, false, SearchMoneyflowsResponse.class); this.assertEquals(expected, actual); } @Test public void test_searchStringContractpartnerPostingAccount_successfull() throws Exception { final SearchMoneyflowsRequest request = new SearchMoneyflowsRequest(); final MoneyflowSearchParamsTransport transport = new MoneyflowSearchParamsTransport(); transport.setPostingAccountId(PostingAccountTransportBuilder.POSTING_ACCOUNT1_ID); transport.setContractpartnerId(ContractpartnerTransportBuilder.CONTRACTPARTNER1_ID); transport.setSearchString("generated"); request.setMoneyflowSearchParamsTransport(transport); final SearchMoneyflowsResponse expected = this.getDefaultResponse(); final List<MoneyflowTransport> moneyflowTransports = new ArrayList<>(); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow2().build()); expected.setMoneyflowTransports(moneyflowTransports); final SearchMoneyflowsResponse actual = super.callUsecaseWithContent("", this.method, request, false, SearchMoneyflowsResponse.class); this.assertEquals(expected, actual); } private void fillYearlySearchGenerated(final SearchMoneyflowsResponse expected) { final List<MoneyflowTransport> moneyflowTransports = new ArrayList<>(); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow2().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow3().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow4().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow5().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow6().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow7().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow8().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow9().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow10().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow11().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow12().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow13().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow14().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow15().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow16().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow17().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow18().build()); moneyflowTransports.add(new MoneyflowTransportBuilder().forMoneyflow19().build()); expected.setMoneyflowTransports(moneyflowTransports); } @Test public void test_AuthorizationRequired_Error() throws Exception { this.userName = null; this.userPassword = null; final ErrorResponse actual = super.callUsecaseWithoutContent("", this.method, false, ErrorResponse.class); Assertions.assertEquals(super.accessDeniedErrorResponse(), actual); } @Test public void test_noTransport_ErrorResponse() throws Exception { this.userName = UserTransportBuilder.ADMIN_NAME; this.userPassword = UserTransportBuilder.ADMIN_PASSWORD; final SearchMoneyflowsRequest request = new SearchMoneyflowsRequest(); final SearchMoneyflowsResponse expected = new SearchMoneyflowsResponse(); final List<PostingAccountTransport> postingAccountTransports = new ArrayList<>(); postingAccountTransports.add(new PostingAccountTransportBuilder().forPostingAccount1().build()); postingAccountTransports.add(new PostingAccountTransportBuilder().forPostingAccount2().build()); postingAccountTransports.add(new PostingAccountTransportBuilder().forPostingAccount3().build()); expected.setPostingAccountTransports(postingAccountTransports); final List<ContractpartnerTransport> contractpartnerTransports = new ArrayList<>(); contractpartnerTransports.add(new ContractpartnerTransportBuilder().forContractpartner5().build()); expected.setContractpartnerTransports(contractpartnerTransports); final ValidationItemTransport validationItemTransport1 = new ValidationItemTransport(); validationItemTransport1.setError(ErrorCode.NO_SEARCH_CRITERIA_ENTERED.getErrorCode()); final List<ValidationItemTransport> validationItemTransports = Arrays.asList(validationItemTransport1); expected.setValidationItemTransports(validationItemTransports); expected.setResult(Boolean.FALSE); final SearchMoneyflowsResponse actual = super.callUsecaseWithContent("", this.method, request, false, SearchMoneyflowsResponse.class); Assertions.assertEquals(expected, actual); } @Test @Sql("classpath:h2defaults.sql") public void test_emptyDatabase_noException() throws Exception { this.userName = UserTransportBuilder.ADMIN_NAME; this.userPassword = UserTransportBuilder.ADMIN_PASSWORD; final SearchMoneyflowsRequest request = new SearchMoneyflowsRequest(); final MoneyflowSearchParamsTransport transport = new MoneyflowSearchParamsTransport(); transport.setSearchString("hugo"); request.setMoneyflowSearchParamsTransport(transport); super.callUsecaseWithContent("", this.method, request, false, SearchMoneyflowsResponse.class); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapreduce.v2.app.launcher; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapred.ShuffleHandler; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType; import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest; import org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest; import org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse; import org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest; import org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy; import org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy.ContainerManagementProtocolProxyData; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import com.google.common.util.concurrent.ThreadFactoryBuilder; /** * This class is responsible for launching of containers. */ public class ContainerLauncherImpl extends AbstractService implements ContainerLauncher { static final Log LOG = LogFactory.getLog(ContainerLauncherImpl.class); private ConcurrentHashMap<ContainerId, Container> containers = new ConcurrentHashMap<ContainerId, Container>(); private final AppContext context; protected ThreadPoolExecutor launcherPool; protected static final int INITIAL_POOL_SIZE = 10; private int limitOnPoolSize; private Thread eventHandlingThread; protected BlockingQueue<ContainerLauncherEvent> eventQueue = new LinkedBlockingQueue<ContainerLauncherEvent>(); private final AtomicBoolean stopped; private ContainerManagementProtocolProxy cmProxy; private Container getContainer(ContainerLauncherEvent event) { ContainerId id = event.getContainerID(); Container c = containers.get(id); if(c == null) { c = new Container(event.getTaskAttemptID(), event.getContainerID(), event.getContainerMgrAddress()); Container old = containers.putIfAbsent(id, c); if(old != null) { c = old; } } return c; } private void removeContainerIfDone(ContainerId id) { Container c = containers.get(id); if(c != null && c.isCompletelyDone()) { containers.remove(id); } } private static enum ContainerState { PREP, FAILED, RUNNING, DONE, KILLED_BEFORE_LAUNCH } private class Container { private ContainerState state; // store enough information to be able to cleanup the container private TaskAttemptId taskAttemptID; private ContainerId containerID; final private String containerMgrAddress; public Container(TaskAttemptId taId, ContainerId containerID, String containerMgrAddress) { this.state = ContainerState.PREP; this.taskAttemptID = taId; this.containerMgrAddress = containerMgrAddress; this.containerID = containerID; } public synchronized boolean isCompletelyDone() { return state == ContainerState.DONE || state == ContainerState.FAILED; } @SuppressWarnings("unchecked") public synchronized void launch(ContainerRemoteLaunchEvent event) { LOG.info("Launching " + taskAttemptID); if(this.state == ContainerState.KILLED_BEFORE_LAUNCH) { state = ContainerState.DONE; sendContainerLaunchFailedMsg(taskAttemptID, "Container was killed before it was launched"); return; } ContainerManagementProtocolProxyData proxy = null; try { proxy = getCMProxy(containerMgrAddress, containerID); // Construct the actual Container ContainerLaunchContext containerLaunchContext = event.getContainerLaunchContext(); LOG.info("launch remote task on"+this.containerMgrAddress+"from"+this.taskAttemptID.toString()); // Now launch the actual container StartContainerRequest startRequest = StartContainerRequest.newInstance(containerLaunchContext, event.getContainerToken()); List<StartContainerRequest> list = new ArrayList<StartContainerRequest>(); list.add(startRequest); StartContainersRequest requestList = StartContainersRequest.newInstance(list); StartContainersResponse response = proxy.getContainerManagementProtocol().startContainers(requestList); if (response.getFailedRequests() != null && response.getFailedRequests().containsKey(containerID)) { throw response.getFailedRequests().get(containerID).deSerialize(); } ByteBuffer portInfo = response.getAllServicesMetaData().get( ShuffleHandler.MAPREDUCE_SHUFFLE_SERVICEID); int port = -1; if(portInfo != null) { port = ShuffleHandler.deserializeMetaData(portInfo); } LOG.info("Shuffle port returned by ContainerManager for " + taskAttemptID + " : " + port); if(port < 0) { this.state = ContainerState.FAILED; throw new IllegalStateException("Invalid shuffle port number " + port + " returned for " + taskAttemptID); } // after launching, send launched event to task attempt to move // it from ASSIGNED to RUNNING state context.getEventHandler().handle( new TaskAttemptContainerLaunchedEvent(taskAttemptID, port)); this.state = ContainerState.RUNNING; } catch (Throwable t) { String message = "Container launch failed for " + containerID + " : " + StringUtils.stringifyException(t); this.state = ContainerState.FAILED; sendContainerLaunchFailedMsg(taskAttemptID, message); } finally { if (proxy != null) { cmProxy.mayBeCloseProxy(proxy); } } } @SuppressWarnings("unchecked") public synchronized void kill() { if(this.state == ContainerState.PREP) { LOG.info("final state for killed container"+"KILLED BEFORE LAUNCH"); this.state = ContainerState.KILLED_BEFORE_LAUNCH; } else if (!isCompletelyDone()) { LOG.info("KILLING " + taskAttemptID); ContainerManagementProtocolProxyData proxy = null; try { proxy = getCMProxy(this.containerMgrAddress, this.containerID); // kill the remote container if already launched List<ContainerId> ids = new ArrayList<ContainerId>(); ids.add(this.containerID); StopContainersRequest request = StopContainersRequest.newInstance(ids); StopContainersResponse response = proxy.getContainerManagementProtocol().stopContainers(request); if (response.getFailedRequests() != null && response.getFailedRequests().containsKey(this.containerID)) { throw response.getFailedRequests().get(this.containerID) .deSerialize(); } } catch (Throwable t) { // ignore the cleanup failure String message = "cleanup failed for container " + this.containerID + " : " + StringUtils.stringifyException(t); context.getEventHandler() .handle( new TaskAttemptDiagnosticsUpdateEvent(this.taskAttemptID, message)); LOG.warn(message); } finally { if (proxy != null) { cmProxy.mayBeCloseProxy(proxy); } } LOG.info("final state for killed container"+"DONE"); this.state = ContainerState.DONE; } // after killing, send killed event to task attempt context.getEventHandler().handle( new TaskAttemptEvent(this.taskAttemptID, TaskAttemptEventType.TA_CONTAINER_CLEANED)); } } public ContainerLauncherImpl(AppContext context) { super(ContainerLauncherImpl.class.getName()); this.context = context; this.stopped = new AtomicBoolean(false); } @Override protected void serviceInit(Configuration conf) throws Exception { this.limitOnPoolSize = conf.getInt( MRJobConfig.MR_AM_CONTAINERLAUNCHER_THREAD_COUNT_LIMIT, MRJobConfig.DEFAULT_MR_AM_CONTAINERLAUNCHER_THREAD_COUNT_LIMIT); LOG.info("Upper limit on the thread pool size is " + this.limitOnPoolSize); super.serviceInit(conf); cmProxy = new ContainerManagementProtocolProxy(conf); } protected void serviceStart() throws Exception { ThreadFactory tf = new ThreadFactoryBuilder().setNameFormat( "ContainerLauncher #%d").setDaemon(true).build(); // Start with a default core-pool size of 10 and change it dynamically. launcherPool = new ThreadPoolExecutor(INITIAL_POOL_SIZE, Integer.MAX_VALUE, 1, TimeUnit.HOURS, new LinkedBlockingQueue<Runnable>(), tf); eventHandlingThread = new Thread() { @Override public void run() { ContainerLauncherEvent event = null; Set<String> allNodes = new HashSet<String>(); while (!stopped.get() && !Thread.currentThread().isInterrupted()) { try { event = eventQueue.take(); } catch (InterruptedException e) { if (!stopped.get()) { LOG.error("Returning, interrupted : " + e); } return; } allNodes.add(event.getContainerMgrAddress()); int poolSize = launcherPool.getCorePoolSize(); // See if we need up the pool size only if haven't reached the // maximum limit yet. if (poolSize != limitOnPoolSize) { // nodes where containers will run at *this* point of time. This is // *not* the cluster size and doesn't need to be. int numNodes = allNodes.size(); int idealPoolSize = Math.min(limitOnPoolSize, numNodes); if (poolSize < idealPoolSize) { // Bump up the pool size to idealPoolSize+INITIAL_POOL_SIZE, the // later is just a buffer so we are not always increasing the // pool-size int newPoolSize = Math.min(limitOnPoolSize, idealPoolSize + INITIAL_POOL_SIZE); LOG.info("Setting ContainerLauncher pool size to " + newPoolSize + " as number-of-nodes to talk to is " + numNodes); launcherPool.setCorePoolSize(newPoolSize); } } // the events from the queue are handled in parallel // using a thread pool launcherPool.execute(createEventProcessor(event)); // TODO: Group launching of multiple containers to a single // NodeManager into a single connection } } }; eventHandlingThread.setName("ContainerLauncher Event Handler"); eventHandlingThread.start(); super.serviceStart(); } private void shutdownAllContainers() { for (Container ct : this.containers.values()) { if (ct != null) { ct.kill(); } } } protected void serviceStop() throws Exception { if (stopped.getAndSet(true)) { // return if already stopped return; } // shutdown any containers that might be left running shutdownAllContainers(); if (eventHandlingThread != null) { eventHandlingThread.interrupt(); } if (launcherPool != null) { launcherPool.shutdownNow(); } super.serviceStop(); } protected EventProcessor createEventProcessor(ContainerLauncherEvent event) { return new EventProcessor(event); } /** * Setup and start the container on remote nodemanager. */ class EventProcessor implements Runnable { private ContainerLauncherEvent event; EventProcessor(ContainerLauncherEvent event) { this.event = event; } @Override public void run() { LOG.info("Processing the event " + event.toString()); // Load ContainerManager tokens before creating a connection. // TODO: Do it only once per NodeManager. ContainerId containerID = event.getContainerID(); Container c = getContainer(event); switch(event.getType()) { case CONTAINER_REMOTE_LAUNCH: ContainerRemoteLaunchEvent launchEvent = (ContainerRemoteLaunchEvent) event; c.launch(launchEvent); break; case CONTAINER_REMOTE_CLEANUP: c.kill(); break; } removeContainerIfDone(containerID); } } @SuppressWarnings("unchecked") void sendContainerLaunchFailedMsg(TaskAttemptId taskAttemptID, String message) { LOG.error(message); context.getEventHandler().handle( new TaskAttemptDiagnosticsUpdateEvent(taskAttemptID, message)); context.getEventHandler().handle( new TaskAttemptEvent(taskAttemptID, TaskAttemptEventType.TA_CONTAINER_LAUNCH_FAILED)); } @Override public void handle(ContainerLauncherEvent event) { try { eventQueue.put(event); } catch (InterruptedException e) { throw new YarnRuntimeException(e); } } public ContainerManagementProtocolProxy.ContainerManagementProtocolProxyData getCMProxy(String containerMgrBindAddr, ContainerId containerId) throws IOException { return cmProxy.getProxy(containerMgrBindAddr, containerId); } }
/* Copyright (c) 2017 lib4j * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * You should have received a copy of The MIT License (MIT) along with this * program. If not, see <http://opensource.org/licenses/MIT/>. */ package org.lib4j.util; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.IdentityHashMap; public abstract class PartitionedList<E,T> extends ObservableList<E> implements Cloneable { public class PartitionList<P extends E> extends ObservableList<P> implements Cloneable { protected T type; protected ArrayList<Integer> indexes; protected PartitionList(final T type) { super(new ArrayList<P>()); this.type = type; this.indexes = new ArrayList<>(); } public T getType() { return this.type; } private ArrayList<Integer> getIndexes() { return this.indexes; } private void addUnsafe(final E e) { super.source.add(e); } private void addUnsafe(final int index, final E e) { super.source.add(index, e); } @SuppressWarnings("unchecked") private E setUnsafe(final int index, final E e) { return (E)super.source.set(index, e); } @SuppressWarnings("unchecked") private E removeUnsafe(final int index) { this.indexes.remove(index); return (E)super.source.remove(index); } @Override @SuppressWarnings("unchecked") protected void afterAdd(final int index, final P e, final RuntimeException exception) { if (index == size() - 1) { final PartitionedList<E,T> superList = PartitionedList.this; superList.indexes.add(index); superList.subLists.add(this); superList.addUnsafe(e); indexes.add(superList.size() - 1); } else { final int superIndex = index > 0 ? indexes.get(index - 1) + 1 : indexes.get(index); final PartitionList<P> subList = this; final PartitionedList<E,T> superList = PartitionedList.this; superList.indexes.add(superIndex, index); superList.subLists.add(superIndex, subList); superList.addUnsafe(superIndex, e); final IdentityHashSet<PartitionList<P>> visited = new IdentityHashSet<>(); for (int i = superIndex + 1; i < superList.size(); i++) { final PartitionList<P> nextSubList = (PartitionedList<E,T>.PartitionList<P>)superList.subLists.get(i); if (nextSubList == subList) superList.indexes.set(i, superList.indexes.get(i) + 1); if (visited.contains(nextSubList) || nextSubList == subList) continue; visited.add(nextSubList); incSuperIndexes(nextSubList, superIndex); } indexes.add(index, superIndex); for (int i = index + 1; i < indexes.size(); i++) indexes.set(i, indexes.get(i) + 1); } } @Override @SuppressWarnings("unchecked") protected boolean beforeRemove(final int index) { final int superIndex = indexes.remove(index); final PartitionedList<E,T> superList = PartitionedList.this; superList.removeUnsafe(superIndex); superList.indexes.remove(superIndex); final PartitionList<P> subList = (PartitionedList<E,T>.PartitionList<P>)superList.subLists.remove(superIndex); final IdentityHashSet<PartitionList<P>> visited = new IdentityHashSet<>(); for (int i = superIndex; i < superList.size(); i++) { final PartitionList<P> nextSubList = (PartitionedList<E,T>.PartitionList<P>)superList.subLists.get(i); if (nextSubList == subList) superList.indexes.set(i, superList.indexes.get(i) - 1); if (visited.contains(nextSubList) || nextSubList == subList) continue; visited.add(nextSubList); decSuperIndexes(nextSubList, superIndex); } for (int i = index; i < indexes.size(); i++) indexes.set(i, indexes.get(i) - 1); return true; } @Override protected boolean beforeSet(final int index, final P newElement) { final int superIndex = indexes.get(index); subLists.set(superIndex, this); PartitionedList.this.setUnsafe(superIndex, newElement); return true; } protected PartitionedList<E,T> getSuperList() { return PartitionedList.this; } protected void print() { System.err.println(" SubList<" + type + "> " + System.identityHashCode(this)); System.err.print(" I:"); indexes.stream().forEach(i -> System.err.print(" " + i)); System.err.println(); System.err.print(" E:"); stream().forEach(e -> System.err.print(" " + System.identityHashCode(e))); System.err.println(); } @Override @SuppressWarnings("unchecked") public PartitionList<P> clone() { try { final PartitionList<P> clone = (PartitionList<P>)super.clone(); clone.source = (ArrayList<E>)((ArrayList<E>)source).clone(); clone.type = type; clone.indexes = (ArrayList<Integer>)indexes.clone(); return clone; } catch (final CloneNotSupportedException e) { throw new UnsupportedOperationException(e); } } } protected ArrayList<Integer> indexes; protected ArrayList<PartitionList<? extends E>> subLists; protected HashMap<T,PartitionList<? extends E>> typeToSubList = new HashMap<>(); public PartitionedList() { super(new ArrayList<E>()); this.indexes = new ArrayList<>(); this.subLists = new ArrayList<>(); } @SafeVarargs public PartitionedList(final T ... types) { this(); for (final T type : types) typeToSubList.put(type, null); } public PartitionedList(final Collection<T> types) { this(); if (types != null) for (final T type : types) typeToSubList.put(type, null); } protected PartitionList<E> newPartition(final T type) { return new PartitionList<>(type); } protected abstract PartitionList<E> getPartition(final Class<? extends E> type); private void addUnsafe(final E e) { super.source.add(e); } private void addUnsafe(final int index, final E e) { super.source.add(index, e); } @SuppressWarnings("unchecked") private E setUnsafe(final int index, final E e) { return (E)super.source.set(index, e); } @SuppressWarnings("unchecked") private E removeUnsafe(final int index) { return (E)super.source.remove(index); } private int add(final Integer superIndex, final Integer subIndex, final E element, final PartitionList<E> subList) { if (superIndex == null) { subList.add(element); } else if (subIndex == null) { subList.indexes.add(superIndex); subList.addUnsafe(element); } else { subList.addUnsafe(subIndex, element); subList.indexes.add(subIndex, superIndex); for (int i = subIndex + 1; i < subList.indexes.size(); i++) subList.indexes.set(i, subList.indexes.get(i) + 1); } return subIndex != null ? subIndex : this.size() - 1; } private void incSuperIndexes(final PartitionList<? extends E> subList, final int index) { final ArrayList<Integer> superIndexes = subList.getIndexes(); for (int j = 0; j < superIndexes.size(); j++) { final int superIndex = superIndexes.get(j); if (superIndex >= index) superIndexes.set(j, superIndex + 1); } } private void decSuperIndexes(final PartitionList<? extends E> subList, final int index) { final ArrayList<Integer> superIndexes = subList.getIndexes(); for (int j = 0; j < superIndexes.size(); j++) { final int superIndex = superIndexes.get(j); if (superIndex > index) superIndexes.set(j, superIndex - 1); } } private int findSubIndex(final PartitionList<? extends E> subList, final int index) { if (subList.size() == 0) return 0; final ArrayList<Integer> superIndexes = subList.getIndexes(); for (int i = subList.size() - 1; i >= 0; i--) { final int superIndex = superIndexes.get(i); if (superIndex < index) return index; } return 0; } @Override @SuppressWarnings("unchecked") protected void afterAdd(final int index, final E e, final RuntimeException exception) { final PartitionList<E> subList = getPartition((Class<E>)e.getClass()); if (subList == null) throw new IllegalArgumentException("Object of type " + e.getClass() + " is not allowed to appear in " + PartitionedList.class.getName()); subLists.add(index, subList); if (index == size() - 1) { indexes.add(subList.size()); add(index, null, e, subList); } else { final int subIndex = index == 0 ? 0 : findSubIndex(subList, index); add(index, subIndex, e, subList); if (subIndex != -1) { indexes.add(index, subIndex); final IdentityHashSet<PartitionList<? extends E>> visited = new IdentityHashSet<>(); for (int i = index + 1; i < subLists.size(); i++) { final PartitionList<? extends E> nextSubList = subLists.get(i); if (nextSubList == subList) indexes.set(i, indexes.get(i) + 1); if (visited.contains(nextSubList) || nextSubList == subList) continue; visited.add(nextSubList); incSuperIndexes(nextSubList, index); } } } } @Override @SuppressWarnings("unchecked") protected boolean beforeSet(final int index, final E newElement) { final PartitionList<? extends E> subList = subLists.get(index); final E element = get(index); final int subIndex = indexes.get(index); if (element.getClass() == newElement.getClass()) { subList.setUnsafe(subIndex, newElement); } else { final PartitionList<E> newSubList = getPartition((Class<E>)newElement.getClass()); if (newSubList == null) throw new IllegalArgumentException("Object of type " + newElement.getClass() + " is not allowed to appear in " + PartitionedList.class.getName()); subList.removeUnsafe(subIndex); for (int i = index + 1; i < subLists.size(); i++) { final PartitionList<? extends E> nextSubList = subLists.get(i); if (nextSubList == subList) indexes.set(i, indexes.get(i) - 1); } add(index, 0, newElement, newSubList); subLists.set(index, newSubList); } return true; } @Override protected boolean beforeRemove(final int index) { final PartitionList<? extends E> subList = subLists.remove(index); subList.removeUnsafe(indexes.remove(index)); final ArrayList<Integer> subIndexes = subList.getIndexes(); for (int i = index; i < subIndexes.size(); i++) subIndexes.set(i, subIndexes.get(i) - 1); final IdentityHashSet<PartitionList<? extends E>> visited = new IdentityHashSet<>(); for (int i = index; i < subLists.size(); i++) { final PartitionList<? extends E> nextSubList = subLists.get(i); if (nextSubList == subList) indexes.set(i, indexes.get(i) - 1); if (visited.contains(nextSubList) || nextSubList == subList) continue; visited.add(nextSubList); decSuperIndexes(nextSubList, index); } return true; } protected void print() { System.err.print(" I:"); indexes.stream().forEach(i -> System.err.print(" " + i)); System.err.println(); System.err.print(" E:"); stream().forEach(e -> System.err.print(" " + System.identityHashCode(e))); System.err.println(); System.err.print(" A:"); subLists.stream().forEach(e -> System.err.print(" " + System.identityHashCode(e))); System.err.println(); new IdentityHashSet<>(subLists).stream().forEach(e -> e.print()); } protected E clone(final E item) { return item; } @Override @SuppressWarnings("unchecked") public PartitionedList<E,T> clone() { try { final PartitionedList<E,T> clone = (PartitionedList<E,T>)super.clone(); clone.source = (ArrayList<E>)((ArrayList<E>)source).clone(); clone.indexes = (ArrayList<Integer>)indexes.clone(); clone.subLists = new ArrayList<>(); for (final PartitionList<? extends E> subList : subLists) clone.subLists.add(subList.clone()); final IdentityHashMap<E,E> clones = new IdentityHashMap<>(); for (int i = 0; i < clone.source.size(); i++) { final E item = (E)clone.source.get(i); final E copy = clone(item); clones.put(item, copy); clone.source.set(i, copy); } clone.typeToSubList = new HashMap<>(); for (final PartitionList<? extends E> subList : clone.subLists) { clone.typeToSubList.put(subList.getType(), subList); for (int i = 0; i < subList.source.size(); i++) subList.source.set(i, clones.get(subList.source.get(i))); } return clone; } catch (final CloneNotSupportedException e) { throw new UnsupportedOperationException(e); } } }
/** * Copyright 2010-2015 The PlayN Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing permissions and * limitations under the License. */ package playn.java; import javax.swing.JOptionPane; import org.lwjgl.LWJGLException; import org.lwjgl.input.Keyboard; import org.lwjgl.input.Mouse; import org.lwjgl.opengl.Display; import pythagoras.f.Point; import react.RFuture; import playn.core.Key; import static playn.core.Keyboard.*; // to avoid clash with LWJGL Keyboard; meh import static playn.core.Mouse.*; // to avoid clash with LWJGL Mouse; double meh public class LWJGLInput extends JavaInput { public LWJGLInput (LWJGLPlatform plat) { super(plat); try { Keyboard.create(); Mouse.create(); } catch (LWJGLException e) { throw new RuntimeException(e); } } @Override public RFuture<String> getText(TextType textType, String label, String initVal, String ok, String cancel) { Object result = JOptionPane.showInputDialog( null, label, "", JOptionPane.QUESTION_MESSAGE, null, null, initVal); return RFuture.success((String)result); } @Override public RFuture<Boolean> sysDialog (String title, String text, String ok, String cancel) { int optType = JOptionPane.OK_CANCEL_OPTION; int msgType = cancel == null ? JOptionPane.INFORMATION_MESSAGE : JOptionPane.QUESTION_MESSAGE; Object[] options = (cancel == null) ? new Object[] { ok } : new Object[] { ok, cancel }; Object defOption = (cancel == null) ? ok : cancel; int result = JOptionPane.showOptionDialog( null, text, title, optType, msgType, null, options, defOption); return RFuture.success(result == 0); } @Override public boolean hasMouseLock () { return true; } @Override public boolean isMouseLocked() { return Mouse.isGrabbed(); } @Override public void setMouseLocked (boolean locked) { Mouse.setGrabbed(locked); } @Override void update () { super.update(); // determine the current state of the modifier keys (note: the code assumes the current state // of the modifier keys is "correct" for all events that have arrived since the last call to // update; since that happens pretty frequently, 60fps, that's probably good enough) Keyboard.poll(); int flags = modifierFlags( Keyboard.isKeyDown(Keyboard.KEY_LMENU) || Keyboard.isKeyDown(Keyboard.KEY_RMENU), Keyboard.isKeyDown(Keyboard.KEY_LCONTROL) || Keyboard.isKeyDown(Keyboard.KEY_RCONTROL), Keyboard.isKeyDown(Keyboard.KEY_LMETA) || Keyboard.isKeyDown(Keyboard.KEY_RMETA), Keyboard.isKeyDown(Keyboard.KEY_LSHIFT) || Keyboard.isKeyDown(Keyboard.KEY_RSHIFT)); // process keyboard events while (Keyboard.next()) { double time = (double) (Keyboard.getEventNanoseconds() / 1000000); int keyCode = Keyboard.getEventKey(); if (Keyboard.getEventKeyState()) { Key key = translateKey(keyCode); if (key != null) emitKeyPress(flags, time, key, true); char keyChar = Keyboard.getEventCharacter(); if (!Character.isISOControl(keyChar)) emitKeyTyped(time, keyChar); } else { Key key = translateKey(keyCode); if (key != null) emitKeyPress(flags, time, key, false); } } // process mouse events while (Mouse.next()) { double time = (double) (Mouse.getEventNanoseconds() / 1000000); Point m = new Point(Mouse.getEventX(), Display.getHeight() - Mouse.getEventY() - 1); int btnIdx = Mouse.getEventButton(); if (btnIdx >= 0) { ButtonEvent.Id btn = getButton(btnIdx); if (btn != null) emitMouseButton(flags, time, m.x, m.y, btn, Mouse.getEventButtonState()); } else { int wheel = Mouse.getEventDWheel(); if (wheel != 0) emitMouseWheel(flags, time, m.x, m.y, wheel > 0 ? -1 : 1); else emitMouseMotion(flags, time, m.x, m.y, Mouse.getEventDX(), -Mouse.getEventDY()); } } } private Key translateKey(int keyCode) { switch (keyCode) { case Keyboard.KEY_ESCAPE : return Key.ESCAPE; case Keyboard.KEY_1 : return Key.K1; case Keyboard.KEY_2 : return Key.K2; case Keyboard.KEY_3 : return Key.K3; case Keyboard.KEY_4 : return Key.K4; case Keyboard.KEY_5 : return Key.K5; case Keyboard.KEY_6 : return Key.K6; case Keyboard.KEY_7 : return Key.K7; case Keyboard.KEY_8 : return Key.K8; case Keyboard.KEY_9 : return Key.K9; case Keyboard.KEY_0 : return Key.K0; case Keyboard.KEY_MINUS : return Key.MINUS; case Keyboard.KEY_EQUALS : return Key.EQUALS; case Keyboard.KEY_BACK : return Key.BACK; case Keyboard.KEY_TAB : return Key.TAB; case Keyboard.KEY_Q : return Key.Q; case Keyboard.KEY_W : return Key.W; case Keyboard.KEY_E : return Key.E; case Keyboard.KEY_R : return Key.R; case Keyboard.KEY_T : return Key.T; case Keyboard.KEY_Y : return Key.Y; case Keyboard.KEY_U : return Key.U; case Keyboard.KEY_I : return Key.I; case Keyboard.KEY_O : return Key.O; case Keyboard.KEY_P : return Key.P; case Keyboard.KEY_LBRACKET : return Key.LEFT_BRACKET; case Keyboard.KEY_RBRACKET : return Key.RIGHT_BRACKET; case Keyboard.KEY_RETURN : return Key.ENTER; case Keyboard.KEY_LCONTROL : return Key.CONTROL; case Keyboard.KEY_A : return Key.A; case Keyboard.KEY_S : return Key.S; case Keyboard.KEY_D : return Key.D; case Keyboard.KEY_F : return Key.F; case Keyboard.KEY_G : return Key.G; case Keyboard.KEY_H : return Key.H; case Keyboard.KEY_J : return Key.J; case Keyboard.KEY_K : return Key.K; case Keyboard.KEY_L : return Key.L; case Keyboard.KEY_SEMICOLON : return Key.SEMICOLON; case Keyboard.KEY_APOSTROPHE : return Key.QUOTE; case Keyboard.KEY_GRAVE : return Key.BACKQUOTE; case Keyboard.KEY_LSHIFT : return Key.SHIFT; // PlayN doesn't know left v. right case Keyboard.KEY_BACKSLASH : return Key.BACKSLASH; case Keyboard.KEY_Z : return Key.Z; case Keyboard.KEY_X : return Key.X; case Keyboard.KEY_C : return Key.C; case Keyboard.KEY_V : return Key.V; case Keyboard.KEY_B : return Key.B; case Keyboard.KEY_N : return Key.N; case Keyboard.KEY_M : return Key.M; case Keyboard.KEY_COMMA : return Key.COMMA; case Keyboard.KEY_PERIOD : return Key.PERIOD; case Keyboard.KEY_SLASH : return Key.SLASH; case Keyboard.KEY_RSHIFT : return Key.SHIFT; // PlayN doesn't know left v. right case Keyboard.KEY_MULTIPLY : return Key.MULTIPLY; case Keyboard.KEY_LMENU : return Key.ALT; // PlayN doesn't know left v. right case Keyboard.KEY_SPACE : return Key.SPACE; case Keyboard.KEY_CAPITAL : return Key.CAPS_LOCK; case Keyboard.KEY_F1 : return Key.F1; case Keyboard.KEY_F2 : return Key.F2; case Keyboard.KEY_F3 : return Key.F3; case Keyboard.KEY_F4 : return Key.F4; case Keyboard.KEY_F5 : return Key.F5; case Keyboard.KEY_F6 : return Key.F6; case Keyboard.KEY_F7 : return Key.F7; case Keyboard.KEY_F8 : return Key.F8; case Keyboard.KEY_F9 : return Key.F9; case Keyboard.KEY_F10 : return Key.F10; case Keyboard.KEY_NUMLOCK : return Key.NP_NUM_LOCK; case Keyboard.KEY_SCROLL : return Key.SCROLL_LOCK; case Keyboard.KEY_NUMPAD7 : return Key.NP7; case Keyboard.KEY_NUMPAD8 : return Key.NP8; case Keyboard.KEY_NUMPAD9 : return Key.NP9; case Keyboard.KEY_SUBTRACT : return Key.NP_SUBTRACT; case Keyboard.KEY_NUMPAD4 : return Key.NP4; case Keyboard.KEY_NUMPAD5 : return Key.NP5; case Keyboard.KEY_NUMPAD6 : return Key.NP6; case Keyboard.KEY_ADD : return Key.NP_ADD; case Keyboard.KEY_NUMPAD1 : return Key.NP1; case Keyboard.KEY_NUMPAD2 : return Key.NP2; case Keyboard.KEY_NUMPAD3 : return Key.NP3; case Keyboard.KEY_NUMPAD0 : return Key.NP0; case Keyboard.KEY_DECIMAL : return Key.NP_DECIMAL; case Keyboard.KEY_F11 : return Key.F11; case Keyboard.KEY_F12 : return Key.F12; //case Keyboard.KEY_F13 : return Key.F13; //case Keyboard.KEY_F14 : return Key.F14; //case Keyboard.KEY_F15 : return Key.F15; //case Keyboard.KEY_F16 : return Key.F16; //case Keyboard.KEY_F17 : return Key.F17; //case Keyboard.KEY_F18 : return Key.F18; //case Keyboard.KEY_KANA : return Key. //case Keyboard.KEY_F19 : return Key.F19; //case Keyboard.KEY_CONVERT : return Key. //case Keyboard.KEY_NOCONVERT : return Key. //case Keyboard.KEY_YEN : return Key. //case Keyboard.KEY_NUMPADEQUALS : return Key. case Keyboard.KEY_CIRCUMFLEX : return Key.CIRCUMFLEX; case Keyboard.KEY_AT : return Key.AT; case Keyboard.KEY_COLON : return Key.COLON; case Keyboard.KEY_UNDERLINE : return Key.UNDERSCORE; //case Keyboard.KEY_KANJI : return Key. //case Keyboard.KEY_STOP : return Key. //case Keyboard.KEY_AX : return Key. //case Keyboard.KEY_UNLABELED : return Key. //case Keyboard.KEY_NUMPADENTER : return Key. case Keyboard.KEY_RCONTROL : return Key.CONTROL; // PlayN doesn't know left v. right //case Keyboard.KEY_SECTION : return Key. //case Keyboard.KEY_NUMPADCOMMA : return Key. //case Keyboard.KEY_DIVIDE : case Keyboard.KEY_SYSRQ : return Key.SYSRQ; case Keyboard.KEY_RMENU : return Key.ALT; // PlayN doesn't know left v. right case Keyboard.KEY_FUNCTION : return Key.FUNCTION; case Keyboard.KEY_PAUSE : return Key.PAUSE; case Keyboard.KEY_HOME : return Key.HOME; case Keyboard.KEY_UP : return Key.UP; case Keyboard.KEY_PRIOR : return Key.PAGE_UP; case Keyboard.KEY_LEFT : return Key.LEFT; case Keyboard.KEY_RIGHT : return Key.RIGHT; case Keyboard.KEY_END : return Key.END; case Keyboard.KEY_DOWN : return Key.DOWN; case Keyboard.KEY_NEXT : return Key.PAGE_DOWN; case Keyboard.KEY_INSERT : return Key.INSERT; case Keyboard.KEY_DELETE : return Key.DELETE; case Keyboard.KEY_CLEAR : return Key.CLEAR; case Keyboard.KEY_LMETA : return Key.META; // PlayN doesn't know left v. right //case Keyboard.KEY_LWIN : return Key.WINDOWS; // Duplicate with KEY_LMETA case Keyboard.KEY_RMETA : return Key.META; // PlayN doesn't know left v. right //case Keyboard.KEY_RWIN : return Key.WINDOWS; // Duplicate with KEY_RMETA //case Keyboard.KEY_APPS : return Key. case Keyboard.KEY_POWER : return Key.POWER; //case Keyboard.KEY_SLEEP : return Key. } return null; } private static ButtonEvent.Id getButton(int lwjglButton) { switch (lwjglButton) { case 0: return ButtonEvent.Id.LEFT; case 2: return ButtonEvent.Id.MIDDLE; case 1: return ButtonEvent.Id.RIGHT; default: return null; } } }
package edu.cmu.sv.ws.ssnoc.data.dao; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.List; import edu.cmu.sv.ws.ssnoc.common.logging.Log; import edu.cmu.sv.ws.ssnoc.data.SQL; import edu.cmu.sv.ws.ssnoc.data.po.MessagePO; import edu.cmu.sv.ws.ssnoc.data.po.UserPO; public class MessageDAOImpl extends BaseDAOImpl implements IMessageDAO { @Override public long save(MessagePO messagePO) { Log.enter(messagePO); if(messagePO == null){ Log.warn("Inside save method with MessagePO == NULL"); return -1; } long insertedId=-1; try { Connection conn = getConnection(); PreparedStatement stmt; if(findMessageById(messagePO.getMessageId()) == null){ stmt = conn.prepareStatement(SQL.INSERT_MESSAGE, Statement.RETURN_GENERATED_KEYS); stmt.setString(1, messagePO.getContent()); stmt.setLong(2, messagePO.getAuthor()); stmt.setLong(3, messagePO.getTarget()); stmt.setString(4, messagePO.getMessageType()); stmt.setTimestamp(5, new Timestamp(messagePO.getPostedAt().getTime())); } else { stmt = conn.prepareStatement(SQL.UPDATE_MESSAGE); stmt.setString(1, messagePO.getContent()); stmt.setLong(2, messagePO.getAuthor()); stmt.setLong(3, messagePO.getTarget()); stmt.setString(4, messagePO.getMessageType()); stmt.setTimestamp(5, new Timestamp(messagePO.getPostedAt().getTime())); stmt.setLong(6, messagePO.getMessageId()); } int rowCount = stmt.executeUpdate(); Log.debug("Statement executed, and " + rowCount + " rows inserted."); ResultSet generatedKeys = stmt.getGeneratedKeys(); if(generatedKeys.next()){ insertedId = generatedKeys.getLong(1); } conn.close(); } catch(SQLException e){ handleException(e); } finally { Log.exit(); } return insertedId; } @Override public List<MessagePO> findLatestWallMessages(int limit, int offset) { Log.enter("Find latest wall messages (limit: " + limit + ", offset: " + offset + ")"); List<MessagePO> messages = null; try { Connection conn = getConnection(); PreparedStatement stmt = conn.prepareStatement(SQL.FIND_LATEST_MESSAGES_OF_TYPE); stmt.setString(1, SQL.MESSAGE_TYPE_WALL); stmt.setInt(2, limit); stmt.setInt(3, offset); messages = processResults(stmt); conn.close(); } catch(SQLException e){ handleException(e); } finally { Log.exit(messages); } return messages; } @Override public List<MessagePO> findAllAnnouncement(int limit, int offset) { Log.enter("Find announcement (limit: " + limit + ", offset: " + offset + ")"); List<MessagePO> messages = null; try { Connection conn = getConnection(); PreparedStatement stmt = conn.prepareStatement(SQL.FIND_LATEST_MESSAGES_OF_TYPE); stmt.setString(1, SQL.MESSAGE_TYPE_ANNOUNCEMENT); stmt.setInt(2, limit); stmt.setInt(3, offset); messages = processResults(stmt); conn.close(); } catch(SQLException e){ handleException(e); } finally { Log.exit(messages); } return messages; } @Override public MessagePO findMessageById(long messageId) { Log.enter(messageId); MessagePO po = null; try { Connection conn = getConnection(); PreparedStatement stmt = conn.prepareStatement(SQL.FIND_MESSAGE_BY_ID); stmt.setLong(1, messageId); List<MessagePO> messages = processResults(stmt); if(messages.size()==0){ Log.debug("No message of id: " + messageId); } else { po = messages.get(0); } conn.close(); } catch(SQLException e){ handleException(e); } finally { Log.exit(po); } return po; } private List<MessagePO> processResults(PreparedStatement stmt) { Log.enter(stmt); if(stmt == null){ Log.warn("Inside processResults method with NULL statement object"); return null; } Log.debug("Executing stmt = " + stmt); List<MessagePO> messages = new ArrayList<MessagePO>(); try { ResultSet rs = stmt.executeQuery(); while(rs.next()){ MessagePO po = new MessagePO(); po.setMessageId(rs.getLong(1)); po.setContent(rs.getString(2)); po.setAuthor(rs.getLong(3)); po.setTarget(rs.getLong(4)); po.setMessageType(rs.getString(5)); po.setPostedAt(new Date(rs.getTimestamp(6).getTime())); messages.add(po); } if(rs != null){ rs.close(); } } catch (SQLException e) { handleException(e); } finally { Log.exit(messages); } return messages; } @Override public List<MessagePO> findChatHistoryBetweenTwoUsers(long userIdOne, long userIdTwo) { Log.enter("Find Histroy messages between " + userIdOne + ", and : " + userIdTwo + ")"); List<MessagePO> messages = null; try { Connection conn = getConnection(); PreparedStatement stmt = conn.prepareStatement(SQL.FIND_ALL_MESSAGES_BETWEEN_TWO_USERS); stmt.setLong(1, userIdOne); stmt.setLong(2, userIdTwo); stmt.setLong(3, userIdTwo); stmt.setLong(4, userIdOne); stmt.setString(5, SQL.MESSAGE_TYPE_CHAT); messages = processResults(stmt); conn.close(); } catch(SQLException e){ handleException(e); } finally { Log.exit(messages); } return messages; } @Override public List<UserPO> findChatBuddies(long userId) { Log.enter("find chat buddies for userID: "+userId); List<UserPO> users = new ArrayList<UserPO>(); try { Connection conn = getConnection(); PreparedStatement stmt = conn.prepareStatement(SQL.FIND_CHAT_BUDDIES_AUTHOR); stmt.setLong(1, userId); stmt.setString(2, SQL.MESSAGE_TYPE_CHAT); ResultSet rs = stmt.executeQuery(); HashSet<Long> chatBuddiesSet = new HashSet<Long>(); findCharBuddyTarget(users, rs, chatBuddiesSet); stmt = conn.prepareStatement(SQL.FIND_CHAT_BUDDIES_TARGET); stmt.setLong(1, userId); stmt.setString(2, SQL.MESSAGE_TYPE_CHAT); rs = stmt.executeQuery(); findChatBuddyAuthor(users, rs, chatBuddiesSet); conn.close(); } catch(SQLException e){ handleException(e); } finally { Log.exit(users); } return users; } /** * @param users * @param rs * @param chatBuddiesSet * @throws SQLException */ private void findChatBuddyAuthor(List<UserPO> users, ResultSet rs, HashSet<Long> chatBuddiesSet) throws SQLException { while(rs.next()){ long authorBuddyId = rs.getLong("author"); if(!chatBuddiesSet.contains(authorBuddyId)) { chatBuddiesSet.add(authorBuddyId); UserPO po = DAOFactory.getInstance().getUserDAO().findByUserID(authorBuddyId); if(po != null) { users.add(po); } } } } /** * @param users * @param rs * @param chatBuddiesSet * @throws SQLException */ private void findCharBuddyTarget(List<UserPO> users, ResultSet rs, HashSet<Long> chatBuddiesSet) throws SQLException { while(rs.next()){ long tartgetBuddyId = rs.getLong("target"); if(!chatBuddiesSet.contains(tartgetBuddyId)) { chatBuddiesSet.add(tartgetBuddyId); UserPO po = DAOFactory.getInstance().getUserDAO().findByUserID(tartgetBuddyId); if(po != null) { users.add(po); } } } } @Override public void truncateMessageTable() { Log.enter("enter delete all messages records"); try { Connection conn = getConnection(); PreparedStatement stmt = conn.prepareStatement(SQL.DELETE_MESSAGES); stmt.executeUpdate(); conn.close(); } catch (SQLException e) { e.printStackTrace(); } } @Override public List<MessagePO> findChatMessagesSinceDate(Date date) { Log.enter("find messages since date: " + date); List<MessagePO> messages = null; try { Connection conn = getConnection(); PreparedStatement stmt = conn.prepareStatement(SQL.FIND_CHAT_MESSAGES_SINCE_DATE); stmt.setTimestamp(1, new Timestamp(date.getTime())); stmt.setString(2, SQL.MESSAGE_TYPE_CHAT); messages = processResults(stmt); conn.close(); } catch(SQLException e){ handleException(e); } finally { Log.exit(messages); } return messages; } @Override public List<MessagePO> findAllRequestMessages(long requestid, int limit, int offset) { Log.enter("Find Request messages (limit: " + limit + ", offset: " + offset + ")"); List<MessagePO> messages = null; try { Connection conn = getConnection(); PreparedStatement stmt = conn.prepareStatement(SQL.FIND_LATEST_MESSAGES_OF_REQUEST); stmt.setString(1, SQL.MESSAGE_TYPE_REQUEST); stmt.setLong(2, requestid); stmt.setInt(3, limit); stmt.setInt(4, offset); messages = processResults(stmt); conn.close(); } catch(SQLException e){ handleException(e); } finally { Log.exit(messages); } return messages; } }
package com.runetooncraft.warpigeon.engine.level; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import com.runetooncraft.warpigeon.engine.GameType; import com.runetooncraft.warpigeon.engine.WPEngine1; import com.runetooncraft.warpigeon.engine.WPEngine4; import com.runetooncraft.warpigeon.engine.entity.Entity; import com.runetooncraft.warpigeon.engine.entity.mob.Player; import com.runetooncraft.warpigeon.engine.graphics.ScreenEngine2D; import com.runetooncraft.warpigeon.engine.graphics.Sprite; import com.runetooncraft.warpigeon.engine.level.Layer.*; import com.runetooncraft.warpigeon.engine.level.lighting.LightingType; import com.runetooncraft.warpigeon.engine.level.specialtiles.removeCollisionTile; import com.runetooncraft.warpigeon.engine.utils.FileSystem; import com.runetooncraft.warpigeon.engine.utils.MouseEvents; import com.runetooncraft.warpigeon.engine.utils.Vector2Type; import com.runetooncraft.warpigeon.engine.utils.Vector2i; import com.runetooncraft.warpigeon.engine.utils.YamlConfig; import edu.emory.mathcs.backport.java.util.Collections; public class Level { protected int width, height; protected int pixelWidth, pixelHeight; //Pixel width and height of entire level, not screen protected BasicTile overlayTile; protected int PSpawnX, PSpawnY; public ArrayList<Layer> LayerList = new ArrayList<Layer>(); public ArrayList<Layer> collisionLayers = new ArrayList<Layer>(); public static HashMap<Integer, Tile> TileIDS = new HashMap<Integer, Tile>(); public static HashMap<Integer, Tile> CollTileIDS = new HashMap<Integer, Tile>(); public static Tile VoidTile; public static Tile LoadingTile = null; public static Tile EmptyTile = new EmptyTile(null, -1); public static int PDR = 5; public static int PDRX = 5; public static int PDRY = 5; public static String name = "UnNamed"; private File workingDir; private int x0,x1,y0,y1; public int Layers = 2; private double x0double,y0double; public boolean render = true; public boolean GravityEnabled = false; public Gravity gravity = null; public WPEngine4 engine; public ArrayList<Boolean> RenderLayers = new ArrayList<Boolean>(); private boolean isSDK; private Sprite SDKHoverTile = null; //Just in SDK private Vector2i SDKHoverCoords = null; CoordinateHandler CoordinateHandler = new CoordinateHandler(); private LightingType lightingType = LightingType.BASIC_AMBIENT; private Layer lightingLayer = null; public static Tile BlackTile = new Tile(new Sprite(Vector2i.TILE_SIZEX,Vector2i.TILE_SIZEY,0),-10, "Black Tile"); public Layer getLightingLayer() { return lightingLayer; } public LightingType getLightingtype() { return lightingType; } public void setLightingtype(LightingType lightingType, Layer lightingLayer) { this.lightingType = lightingType; this.lightingLayer = lightingLayer; } public int getPixelWidth() { return pixelWidth; } public int getPixelHeight() { return pixelHeight; } public Sprite getSDKHoverTile() { return SDKHoverTile; } public void setSDKHoverTile(Sprite SDKHoverTile) { this.SDKHoverTile = SDKHoverTile; } public boolean isSDK() { return isSDK; } public boolean renderColl; public int collLayerselected; public boolean overlayEnabled = false; public CollisionType colltype = null; //Set this on level creation, set config values and create collision layers public collisionTiles colltiles; private Comparator<Node> nodeSorter = new Comparator<Node>() { public int compare(Node n0, Node n1) { if(n1.fCost < n0.fCost) return 1; if(n1.fCost > n0.fCost) return -1; return 0; } }; private ArrayList<Entity> Que = new ArrayList<Entity>(); /** * The amount of layers rendered under the player's current layer */ public int getLayersRenderedUnder() { return layersRenderedUnder; } /** * The amount of layers rendered under the player's current layer */ public void setLayersRenderedUnder(int layersRenderedUnder) { this.layersRenderedUnder = layersRenderedUnder; } /** * The amount of layers rendered Above the player's current layer */ public int getLayersRenderedAbove() { return layersRenderedAbove; } /** * The amount of layers rendered Above the player's current layer */ public void setLayersRenderedAbove(int layersRenderedAbove) { this.layersRenderedAbove = layersRenderedAbove; } private int layersRenderedUnder = 1; private int layersRenderedAbove = 1; public void ExpandLevel(int xExpand, int yExpand) { render = false; HashMap<Integer,HashMap<Vector2i,Tile>> LayerMap = new HashMap<Integer,HashMap<Vector2i,Tile>>(); HashMap<Vector2i,Tile> TileMap = new HashMap<Vector2i,Tile>(); Vector2i Tc; for(int layer = 1; layer <= Layers; layer++) { for(int yt = 0; yt < height; yt++) { for(int xt = 0; xt < width; xt++) { Tc = new Vector2i(xt,yt, Vector2Type.BY_TILE); TileMap.put(Tc, getTileLayer(LayerList.get(layer-1),xt,yt)); } } LayerMap.put(layer, TileMap); TileMap = new HashMap<Vector2i,Tile>(); } NewLevel(width + xExpand, height + yExpand, workingDir, name, colltype); render = false; Layers = LayerMap.size(); for(int clayer = 1; clayer <= Layers; clayer++) { LayerList.add(new Layer(new int[width * height],LayerType.DEFAULT_LAYER)); for(Vector2i coords: TileMap.keySet()) { setTile(coords, TileMap.get(coords), LayerList.get(clayer-1)); } } render = true; } /** * A* Search YEAH! Pathfinding :D */ public List<Node> findPath(Vector2i start, Vector2i goal) { //System.out.println(goal.getX() + "," + goal.getY()); List<Node> openList = new ArrayList<Node>(); List<Node> closedList = new ArrayList<Node>(); Node current = new Node(start, null, 0, Vector2i.getDistance(start, goal)); openList.add(current); while(openList.size() > 0) { Collections.sort(openList, nodeSorter); current = openList.get(0); if(current.tile.equals(goal)) { List<Node> path = new ArrayList<Node>(); while (current.parent != null) { path.add(current); current = current.parent; } openList.clear(); closedList.clear(); return path; } openList.remove(current); closedList.add(current); for (int i = 0; i < 9; i++) { if(i == 4) continue; int x = current.tile.getX(); int y = current.tile.getY(); int xi = (i % 3) - 1; int yi = (i / 3) - 1; Tile at = getTileLayer(getLayer(1),x + xi, y + yi); if(at == null) continue; if (at.solid()) continue; Vector2i a = new Vector2i(x + xi, y + yi, Vector2Type.BY_TILE); double gCost = current.gCost + Vector2i.getDistance(current.tile, a) == 1 ? 1 : 0.95; double hCost = Vector2i.getDistance(a, goal); Node node = new Node(a, current, gCost, hCost); if (VecInList(closedList, a) && gCost >= node.gCost) continue; if (!VecInList(openList, a) || gCost < node.gCost) openList.add(node); } } closedList.clear(); return null; } private boolean VecInList(List<Node> list, Vector2i vector) { for (Node n : list) { if(n.tile.equals(vector)) return true; } return false; } public List<Entity> getEntityQue() { return Que; } /** * @param entity * @param radius in pixels * @return a list of entities within the radius around the selected entity. */ public List<Entity> getEntityRadius(Entity entity, int radius) { List<Entity> result = new ArrayList<Entity>(); for(int i = 0; i < Que.size(); i++) { Entity e = Que.get(i); int dx = Math.abs(e.getX() - entity.getX()); int dy = Math.abs(e.getY() - entity.getY()); double Distance = Math.sqrt((dx*dx)+(dy*dy)); if(Distance <= radius) result.add(e); } return result; } public Player getPlayerRadius(Entity entity, int radius) { Player player = engine.getPlayer(); int dx = Math.abs(player.getX() - entity.getX()); int dy = Math.abs(player.getY() - entity.getY()); double Distance = Math.sqrt((dx*dx)+(dy*dy)); if(Distance <= radius) { return player; } else { return null; } } /** * Level constructor. * Make sure to set individual TileID's using the TileIDS hashmap. * @param width * @param height */ public Level(int width, int height, WPEngine4 engine, CollisionType colltype) { this.engine = engine; this.colltype = colltype; TileIDS.put(-1, EmptyTile); this.width = width; this.height = height; this.pixelWidth = width * Vector2i.TILE_SIZEX; this.pixelHeight = height * Vector2i.TILE_SIZEY; Layer mainLayer = new Layer(new int[width * height],LayerType.DEFAULT_LAYER); Layer Layer2 = new Layer(new int[width * height],LayerType.DEFAULT_LAYER); LayerList.add(mainLayer); LayerList.add(Layer2); isSDK = engine.gametype.equals(GameType.PIGION_SDK); for(int i = 0; i < Layers; i++) { RenderLayers.add(i, true); } generateLevel(); if(colltype == CollisionType.ADVANCED_COLLBOX) advancedCollLayers(); setupOverlay(); } private void collTiles() { colltiles = new collisionTiles(engine.getScreenEngine2D().PixelWidth,engine.getScreenEngine2D().PixelHeight); } private void advancedCollLayers() { collisionLayers.clear(); for(int i = 1; i<=Layers; i++) { Layer layeri_collision = new Layer(new int[width * height], LayerType.COLLISION_LAYER, "Layer" + i +"_Collision"); for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { Tile gTile = getTileLayer(LayerList.get(i-1), x, y); if(gTile.Collide) { layeri_collision.tiles[x+y*height] = -2; } } } collisionLayers.add(layeri_collision); } } public Layer resetCollisionLayer(Layer layer, Layer collisionLayer) { collisionLayer = new Layer(new int[width * height], LayerType.COLLISION_LAYER); for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { Tile gTile = getTileLayer(layer, x, y); if(gTile.Collide) { collisionLayer.tiles[x+y*height] = -2; } } } return collisionLayer; } /** * PigionSDK Tile Overlay */ private void setupOverlay() { int spriteSize = VoidTile.sprite.SIZE; Sprite Overlaysprite = new Sprite(spriteSize,0xFFFF00D0); int col = 0xFF000000; for(int i = 0; i < spriteSize; i++) { Overlaysprite.pixels[i] = col; Overlaysprite.pixels[(i * spriteSize)] =col; //Overlaysprite.pixels[(i * spriteSize) + 31] = 0xFF808080; //Overlaysprite.pixels[((spriteSize * spriteSize) - (31)) + i] = 0xFF808080; } overlayTile = new BasicTile(Overlaysprite, -2, "Overlay", false); } public int getWidth() { return width; } public int getHeight() { return height; } /** * Creates a new level. * Make sure to set individual TileID's using the TileIDS hashmap. * @param width * @param height * @param workingDir */ public Level(int width, int height, File workingDir, String LevelName, WPEngine4 engine, CollisionType colltype) { this.engine = engine; this.colltype = colltype; TileIDS.put(-1, EmptyTile); name = LevelName; this.workingDir = new File(workingDir.getPath() + "/Levels/" + name + "/"); this.workingDir.mkdirs(); this.width = width; this.height = height; this.pixelWidth = width * Vector2i.TILE_SIZEX; this.pixelHeight = height * Vector2i.TILE_SIZEY; Layer mainLayer = new Layer(new int[width * height],LayerType.DEFAULT_LAYER); Layer Layer2 = new Layer(new int[width * height],LayerType.DEFAULT_LAYER); LayerList.add(mainLayer); LayerList.add(Layer2); isSDK = engine.gametype.equals(GameType.PIGION_SDK); for(int i = 0; i < Layers; i++) { RenderLayers.add(i, true); } generateLevel(); if(colltype == CollisionType.ADVANCED_COLLBOX) advancedCollLayers(); setupOverlay(); } /** * Loads a level from file. * @param path */ public Level(File Dir, String LevelName, WPEngine4 engine) { this.engine = engine; TileIDS.put(-1, EmptyTile); isSDK = engine.gametype.equals(GameType.PIGION_SDK); LoadLevelFile(Dir,LevelName); setupOverlay(); } public void renderCollLayer(boolean render, int layer) { renderColl = render; collLayerselected = layer; } public void NewLevel(int width, int height, File workingDir, String LevelName, CollisionType colltype) { this.colltype = colltype; render = false; Layers = 2; LayerList.clear(); TileIDS.put(-1, EmptyTile); name = LevelName; this.workingDir = new File(workingDir.getPath() + "/Levels/" + name + "/"); this.workingDir.mkdirs(); this.width = width; this.height = height; this.pixelWidth = width * Vector2i.TILE_SIZEX; this.pixelHeight = height * Vector2i.TILE_SIZEY; Layer mainLayer = new Layer(new int[width * height],LayerType.DEFAULT_LAYER); Layer Layer2 = new Layer(new int[width * height],LayerType.DEFAULT_LAYER); LayerList.add(mainLayer); LayerList.add(Layer2); isSDK = engine.gametype.equals(GameType.PIGION_SDK); for(int i = 0; i < Layers; i++) { RenderLayers.add(i, true); } generateLevel(); if(colltype == CollisionType.ADVANCED_COLLBOX) advancedCollLayers(); setupOverlay(); render = true; System.out.println( "\"" + LevelName + "\" generated \n" + "--------------------\n" + "Bounds{" + width + "," + height + "} \n" + "Layers{" + Layers + "} \n" + "Collision{" + colltype +"} \n" + "workingDir{" + this.workingDir + "}\n" + "--------------------\n"); } /** * Generates a default level * 64x64 * Basic collision type */ private void GenLevelDefault() { workingDir.mkdirs(); this.width = 64; this.height = 64; this.pixelWidth = width * Vector2i.TILE_SIZEX; this.pixelHeight = height * Vector2i.TILE_SIZEY; colltype = CollisionType.BASIC; Layer mainLayer = new Layer(new int[width * height],LayerType.DEFAULT_LAYER); Layer Layer2 = new Layer(new int[width * height],LayerType.DEFAULT_LAYER); LayerList.add(mainLayer); LayerList.add(Layer2); isSDK = engine.gametype.equals(GameType.PIGION_SDK); generateLevel(); setupOverlay(); } public void setTile(Vector2i coords, Tile tile, Layer Layer) { int Tiley = coords.tileY() * (width); int Tilex = coords.tileX(); if((Tiley + Tilex) <= (width * height) && (Tiley + Tilex) >= 0) { int ChosenTile = Tiley + Tilex; int TileID = tile.getTileID(); if(TileID == VoidTile.getTileID()) TileID = -1; Layer.tiles[ChosenTile] = TileID; if(colltype == CollisionType.ADVANCED_COLLBOX && tile.Collide) { int index = indexofLayer(Layer); if(index >= 0) { collisionLayers.get(index).tiles[ChosenTile] = -2; //change later when collision layers is worked on thoroughly } else { System.out.println("Returned layer index of " + index + " during setTile. unable to update collisions correctly."); } } } } public Tile getTile(Vector2i coords, Layer layer) { int Tiley = coords.tileY() * (width); int Tilex = coords.tileX(); if((Tiley + Tilex) <= (width * height) && (Tiley + Tilex) >= 0) { int ChosenTile = Tiley + Tilex; return TileIDS.get(layer.tiles[ChosenTile]); } return VoidTile; } private int indexofLayer(Layer layer) { return LayerList.indexOf(layer); } public Layer getmainLayer() { return LayerList.get(0); } private void CorruptLevel() { System.out.println("Invalid of corrupt level file, generating new level."); GenLevelDefault(); } @SuppressWarnings("rawtypes") public void LoadLevelFile(File Dir, String LevelName) { render = false; name = LevelName; workingDir = new File(Dir.getPath() + "/Levels/" + name + "/"); LayerList = new ArrayList<Layer>(); isSDK = engine.gametype.equals(GameType.PIGION_SDK); if(!workingDir.exists()) { System.out.println("Level " + name + " does not exist, generating level with size 64*64."); GenLevelDefault(); } else { File Layer1 = new File(workingDir, "Layer1.dat"); File YMLFile = new File(workingDir, "level.yml"); if(!Layer1.exists() || !YMLFile.exists()) { CorruptLevel(); } else { YamlConfig YMLConfig = new YamlConfig(YMLFile); if(YMLConfig.getMap() == null) { CorruptLevel(); } else { Map config = YMLConfig.getMap(); try { name = (String) config.get("Name"); width = Integer.parseInt((String) config.get("Width")); height = Integer.parseInt((String) config.get("Height")); this.pixelWidth = width * Vector2i.TILE_SIZEX; this.pixelHeight = height * Vector2i.TILE_SIZEY; Layers = Integer.parseInt((String) config.get("Layers")); colltype = CollisionType.valueOf((String) config.get("CollisionType")); for(int i = 1; i<=Layers; i++) { String LayerString = "Layer" + i + ".dat"; File LayerFile; if(i == 1) { LayerFile = Layer1; } else { LayerFile = new File(workingDir, LayerString); } int[] Tilesload = FileSystem.LoadDatFile(LayerFile); Layer layer = new Layer(new int[width * height],LayerType.DEFAULT_LAYER); for(int tilenumber = 0; tilenumber < (width * height); tilenumber++) { layer.tiles[tilenumber] = Tilesload[tilenumber]; } LayerList.add(layer); } if(colltype.equals(CollisionType.ADVANCED_COLLBOX)) { for(int i = 1; i<=Layers; i++) { File Layeri_collision_file = new File(workingDir, "Layer" + i + "_Collision.dat"); int[] Layeri_collision_file_load = FileSystem.LoadDatFile(Layeri_collision_file); Layer Layeri_collision = new Layer(Layeri_collision_file_load,LayerType.COLLISION_LAYER); collisionLayers.add(Layeri_collision); } collTiles(); } } catch(Exception e) { e.printStackTrace(); CorruptLevel(); } } } } for(int i = 0; i < Layers; i++) { RenderLayers.add(i, true); } render = true; System.out.println( "\"" + LevelName + "\" loaded \n" + "--------------------\n" + "Bounds{" + width + "," + height + "} \n" + "Layers{" + Layers + "} \n" + "Collision{" + colltype +"} \n" + "workingDir{" + this.workingDir + "}\n" + "--------------------\n"); } protected void generateLevel() { for(int t = 0; t < LayerList.get(0).tiles.length; t++) { LayerList.get(0).tiles[t] = EmptyTile.getTileID(); } } /** * List of updates: * - Entities are attached to their layers if entity.shouldChangeLayer() is true. * - Layer render Range is updated according to the player's layer position */ public void update() { for(int layer = 0; layer < LayerList.size(); layer++) { Layer l = LayerList.get(layer); if(RenderLayers.get(layer).equals(true)) { l.update(engine); } } for(int e = 0; e < Que.size(); e++) { Entity entity = Que.get(e); if(entity.shouldChangeLayer()) { getLayer(entity.getLayer()).addEntity(entity); } entity.update(); } if(engine.getPlayer() != null) { int cameraLayer = engine.getCamera().getLayer(); for(int i = 1; i <= RenderLayers.size(); i++) { if(i == cameraLayer || i >= (cameraLayer - layersRenderedUnder) && i <= (cameraLayer + layersRenderedAbove)) { RenderLayers.set(i - 1, true); } else { RenderLayers.set(i - 1, false); } } } if(isSDK) { SDKHoverCoords = CoordinateHandler.getTileCoordinateAtMouse(MouseEvents.mouseX, MouseEvents.mouseY, engine.getScreenEngine2D(), this); } if(lightingLayer != null) { lightingLayer.update(engine); } } /** * Time basically will handle events that happen at specific times in the level. */ @SuppressWarnings("unused") private void time() { } /** * Renders what the level is scrolled to. */ public void render(int xScroll, int yScroll, ScreenEngine2D screen) { if(render) { xScroll = xScroll - screen.width / 2; yScroll = yScroll - screen.height / 2; if(engine.getCamera().isFixingatExtremes()) { if(xScroll <= engine.getCamera().getMinX()) xScroll = engine.getCamera().getMinX(); if(xScroll + (screen.width - 2) >= engine.getCamera().getMaxX()) xScroll = engine.getCamera().getMaxX() - (screen.width - 2); if(yScroll <= engine.getCamera().getMinY()) yScroll = engine.getCamera().getMinY(); if(yScroll + (screen.height - 9) >= engine.getCamera().getMaxY()) yScroll = engine.getCamera().getMaxY() - (screen.height - 9); } screen.setOffset(xScroll, yScroll); x0double = xScroll; x0 = xScroll >> PDR; x1 = (xScroll + screen.width + screen.ImageToPixelRatio) >> PDR; y0double = yScroll; y0 = yScroll >> PDR; y1 = (yScroll + screen.height + screen.ImageToPixelRatio) >> PDR; for(int layer = 0; layer < LayerList.size(); layer++) { Layer l = LayerList.get(layer); if(RenderLayers.get(layer).equals(true)) { l.render(this,screen,y0,y1,x0,x1); } else { if(layer == 0) { renderLayerofVoidTiles(this,screen,y0,y1,x0,x1, l); } } } if(renderColl) { Layer coll = collisionLayers.get(collLayerselected - 1); coll.render(this,screen,y0,y1,x0,x1); } if(!lightingType.equals(LightingType.BASIC_AMBIENT)) { lightingLayer.render(this,screen,yScroll,yScroll + screen.height + screen.ImageToPixelRatio,xScroll,xScroll + screen.width + screen.ImageToPixelRatio); //Scroll because this deals with pixels } if(isSDK) { if(SDKHoverTile != null && SDKHoverCoords != null) { screen.renderSpriteWithAlpha(SDKHoverCoords.getPixelX(), SDKHoverCoords.getPixelY(), SDKHoverTile, 40); } } } } private void renderLayerofVoidTiles(Level level, ScreenEngine2D screen, int y02, int y12, int x02, int x12, Layer l) { for (int y = y0; y < y1; y++) { for (int x = x0; x < x1; x++) { VoidTile.render(x, y, screen, l); } } } /** * @param e * @param x * @param y * @return the tile at the given position on the layer the entity is on */ public Tile getTile(Entity e, int x, int y) { if(x < 0 || y < 0 || x >= width || y >= height) return VoidTile; if(getLayer(e.getLayer()).tiles[x + y * width] < TileIDS.size()) { return TileIDS.get(getLayer(e.getLayer()).tiles[x + y * width]); } else { return VoidTile; } } /** * @param e * @return the tile directly behind the entity */ public Tile getTile(Entity e) { return getTile(e, e.getX(), e.getY()); } public Tile getTileLayer(Layer layer, int x, int y) { int[] SelectedLayer = layer.tiles; if(x < 0 || y < 0 || x >= width || y >= height) return VoidTile; if(SelectedLayer[x + y * width] < TileIDS.size()) { return TileIDS.get(SelectedLayer[x + y * width]); } else { return EmptyTile; } } public int getIntfromArray(int[] p, int x, int y) { if(x < 0 || y < 0 || x >= width || y >= height) return 0; return p[x + y * width]; } public Tile getTileLayerCollision(Layer layer, int x, int y) { if(x < 0 || y < 0 || x >= width || y >= height) return VoidTile; if (CollTileIDS.get(layer.tiles[x + y * width]) != null) { return CollTileIDS.get(layer.tiles[x + y * width]); } else { return EmptyTile; } } public Tile getTileIntArray(int[] Layer, int x, int y) { if(x < 0 || y < 0 || x >= width || y >= height) return VoidTile; if(Layer[x + y * width] < TileIDS.size()) { return TileIDS.get(Layer[x + y * width]); } else { return EmptyTile; } } @SuppressWarnings({ "unchecked", "rawtypes" }) public void SaveLevel() { System.out.println("Attempting level save."); new Thread(new Runnable() { public void run() { for(int i = 1; i<=Layers; i++) { File layer_file = new File(workingDir, ("Layer" + i + ".dat")); FileSystem.SaveDatFile(LayerList.get((i - 1)).tiles, layer_file); } if(colltype.equals(CollisionType.ADVANCED_COLLBOX)) { for(int e = 1; e<=Layers; e++) { File layer_file = new File(workingDir, ("Layer" + e + "_Collision.dat")); FileSystem.SaveDatFile(collisionLayers.get(e - 1).tiles, layer_file); } } File LevelConfig = new File(workingDir, "level.yml"); try { LevelConfig.createNewFile(); YamlConfig configYML = new YamlConfig(LevelConfig); Map config = configYML.getMap(); if(config == null) config = new HashMap(); config.put("WarPigionVersion", WPEngine1.Version); config.put("Name", name); config.put("Width", width); config.put("Height", height); config.put("Layers", Layers); config.put("CollisionType", colltype); configYML.setMap(config); configYML.save(); } catch (IOException e) { e.printStackTrace(); } } }).start(); System.out.println("Level save complete."); } /** * @return X value at the very LEFT of the screen according to the position in the level. * This returns in whatever the tile size of the level is. Make sure to handle it that way. */ public int getLeftBoundXScroll() { return x0; } public double getLeftBoundXScrolldouble() { return x0double; } /** * @return X value at the very RIGHT of the screen according to the position in the level. * This returns in whatever the tile size of the level is. Make sure to handle it that way. */ public int getRightBoundXScroll() { return x1; } /** * @return Y value at the very TOP of the screen according to the position in the level. * This returns in whatever the tile size of the level is. Make sure to handle it that way. */ public int getTopBoundYScroll() { return y0; } public double getTopBoundYScrolldouble() { return y0double; } /** * @return X value at the very BOTTOM of the screen according to the position in the level. * This returns in whatever the tile size of the level is. Make sure to handle it that way. */ public int getBottomBoundYScroll() { return y1; } @SuppressWarnings("unused") private ArrayList<Integer> toarray(int[] tiles) { ArrayList<Integer> ReturnList = new ArrayList<Integer>(); for(int i : tiles) { ReturnList.add(i); } return ReturnList; } public void deleteLayerFile(int selectedLayer) { File layer = new File(workingDir, "Layer" + selectedLayer + ".dat"); File layerColl = new File(workingDir, "Layer" + selectedLayer + "_Collision.dat"); if(layer.exists()) layer.delete(); if(layerColl.exists()) layerColl.delete(); if(Layers >= selectedLayer) { //rename all layer files after this one for(int i = (selectedLayer + 1); i <= (Layers + 1); i++) { File Old = new File(workingDir, "Layer" + i + ".dat"); File New = new File(workingDir, "Layer" + (i - 1) + ".dat"); Old.renameTo(New); if(colltype.equals(CollisionType.ADVANCED_COLLBOX)) { File Old2 = new File(workingDir, "Layer" + i + "_Collision.dat"); File New2 = new File(workingDir, "Layer" + (i - 1) + "_Collision.dat"); Old2.renameTo(New2); } } } } public File getWorkingDir() { return workingDir; } public int getLayerID(Layer selectedLayer) { return LayerList.indexOf(selectedLayer) + 1; } public Layer getLayer(int layer) { return LayerList.get(layer-1); } public Layer getCollisionLayer(int layer) { return collisionLayers.get(layer-1); } static class collisionTiles { public Sprite default_collide_Sprite; public Sprite default_notcollide_Sprite; public Tile default_collide; public Tile default_notcollide; collisionTiles(int TileSizex, int TileSizey) { default_collide_Sprite = new Sprite(TileSizex, TileSizey,0xFFFF0000); default_notcollide_Sprite = new Sprite(TileSizex, TileSizey,0xFF4CFF00); default_collide = new Tile(default_collide_Sprite,-2,"Collide"); default_collide.Collide = true; default_notcollide = new removeCollisionTile(default_notcollide_Sprite,-3,"Remove Collide"); default_collide.isCollisionLayerTile = true; Level.CollTileIDS.put(-2, default_collide); Level.CollTileIDS.put(-3, default_notcollide); } } //public boolean tileCollision(double x, double y, int sizex, int sizey, int layerPresent, int xOffset, int yOffset) { // boolean solid = false; // int width = ScreenEngine2D.PixelWidth; // int height = ScreenEngine2D.PixelHeight; // int Left = (int)(x - 1) >> PDRX; // int Right = (int)(x + 1) >> PDRX; // int Top = (int)(y - 1) >> PDRY; // int Bottom = (int)(y + 1) >> PDRY; //// if (colltype.equals(CollisionType.BASIC)) { //// if (getTileLayer(getLayer(layerPresent), xp, yp).collide(i)) solid = true; //// } else if(colltype.equals(CollisionType.ADVANCED_COLLBOX)) { //// if (getTileLayerCollision(getCollisionLayer(layerPresent), xp, yp).collide(i)) solid = true; //// } // return solid; //} public boolean tileCollision(double x, double y, int sizex, int sizey, int layerPresent, int xOffset, int yOffset) { boolean solid = false; for(int i = 0; i < 4; i++) { int xp = (int)(x - (i % 2) * 31) / engine.getScreenEngine2D().PixelWidth; int yp = (int)(y - (i / 2) * 31) / engine.getScreenEngine2D().PixelHeight; // int xp2 = (int)(x - ((i / 2) * 32)) / engine.getScreenEngine2D().PixelWidth; // int yp2 = (int)(y - ((i % 2) * 32)) / engine.getScreenEngine2D().PixelHeight; // int xp3 = (int)(x - ((i % 2) * 32)) / engine.getScreenEngine2D().PixelWidth; // int yp3 = (int)(y - ((i % 2) * 32)) / engine.getScreenEngine2D().PixelHeight; //int xp4 = (int)(x - ((i / 2) * 32)) / engine.getScreenEngine2D().PixelWidth; //int yp4 = (int)(y - ((i / 2) * 32)) / engine.getScreenEngine2D().PixelHeight; //if (colltype.equals(CollisionType.BASIC)) { if (getTileLayer(getLayer(layerPresent), xp, yp).collide(i)) solid = true; //if (getTileLayer(getLayer(layerPresent), xp2, yp2).collide(i)) solid = true; //if (getTileLayer(getLayer(layerPresent), xp3, yp3).collide(i)) solid = true; //if (getTileLayer(getLayer(layerPresent), xp4, yp4).collide(i)) solid = true; //} else if(colltype.equals(CollisionType.ADVANCED_COLLBOX)) { // if (getTileLayerCollision(getCollisionLayer(layerPresent), xp, yp).collide(i)) solid = true; //} } return solid; } /** * Adds entity to update and render que. (Make sure entity is properly initialized beforehand) * @param entity */ public void add(Entity entity) { Que.add(entity); } public void deleteLayer(int layerid) { if(layerid != 1) { render = false; LayerList.remove(layerid - 2); collisionLayers.remove(layerid - 1); Layers-=1; deleteLayerFile(layerid); RenderLayers.remove(layerid - 1); render = true; } } }
package org.codelogger.utils; import static org.codelogger.utils.PrintUtils.println; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.reflect.InvocationTargetException; import org.codelogger.utils.FileUtils; import org.junit.Before; import org.junit.Test; public class FileUtilsTest { public final static String testPath = "FileUtilsTest/"; private final String testReadFileName = "testFile.txt"; private final String testWriteFileName = "testForWrite.txt"; private final String testFolderName = "testFolderForRead"; private final String testFolderNameForDelete = "testFolderForDelete"; private final String noSuchFileOrFolder = "/aaa/No such file or folder!"; private String testFilePath; private String testWritePath; private String testFolderPath; private String testFolderPathForRead; private String testFolderPathForDelete; private File testFileForRead; private File testFileForWrite; private File testFolderForRead; private File testFolderForDelete; private InputStream testInputStream; private OutputStream testOutputStream; @Before public void init() throws FileNotFoundException { testFilePath = testPath + testReadFileName; testWritePath = testPath + testWriteFileName; testFolderPath = testPath + testFolderName; testFolderPathForRead = testPath + testFolderName; testFolderPathForDelete = testPath + testFolderNameForDelete; testFileForRead = new File(testPath + testReadFileName); testFileForWrite = new File(testPath + testWriteFileName); testFolderForRead = new File(testFolderPathForRead); testFolderForDelete = new File(testFolderPathForDelete); testInputStream = new FileInputStream(testPath + testReadFileName); testOutputStream = new FileOutputStream(testPath + testWriteFileName); } @Test public void isExist_fileIsIncorrectFile_returnFalse() { testFileForRead = buildIncorrectFile(); assertFalse(FileUtils.isExist(testFileForRead)); } @Test public void isExist_fileIsCorrectFile_returnTrue() { assertTrue(FileUtils.isExist(testFileForRead)); } @Test public void isExist_fileOrFolderPathIsNotExisted_returnFalse() { testFilePath = noSuchFileOrFolder; assertFalse(FileUtils.isExist(testFilePath)); } @Test public void isExist_filePathIsExisted_returnTrue() { assertTrue(FileUtils.isExist(testFilePath)); } @Test public void isExist_folderPathIsExisted_returnTrue() { assertTrue(FileUtils.isExist(testFolderPathForRead)); } @Test public void isDirectory_folderIsIncorrect_returnFalse() { testFolderForRead = buildIncorrectFile(); assertFalse(FileUtils.isDirectory(testFolderForRead)); } @Test public void isDirectory_folderIscorrect_returnFalse() { assertTrue(FileUtils.isDirectory(testFolderForRead)); } @Test public void isDirectory_folderPathIsIncorrect_returnFalse() { testFolderPath = noSuchFileOrFolder; assertFalse(FileUtils.isDirectory(testFolderPath)); } @Test public void isDirectory_folderPathIscorrect_returnFalse() { assertTrue(FileUtils.isDirectory(testFolderPath)); } @Test public void createDirectory_destinationPathIsIncorrect_returnFalse() { boolean isCreateedDirectory = FileUtils.createDirectory(noSuchFileOrFolder); assertFalse(isCreateedDirectory); } @Test public void createDirectory_destinationPathIsCorrectAndExisted_returnTrue() { boolean isCreateedDirectory = FileUtils.createDirectory(testFolderPathForDelete); assertTrue(isCreateedDirectory); } @Test public void createDirectory_destinationPathIsCorrectAndNotExisted_returnTrue() { FileUtils.delete(testFolderPathForDelete); boolean isCreateedDirectory = FileUtils.createDirectory(testFolderPathForDelete); assertTrue(isCreateedDirectory); } @Test public void createDirectory_destinationFileIsIncorrect_returnFalse() { boolean isCreateedDirectory = FileUtils.createDirectory(buildIncorrectFile()); assertFalse(isCreateedDirectory); } @Test public void createDirectory_destinationFileIsCorrectAndExisted_returnTrue() { boolean isCreateedDirectory = FileUtils.createDirectory(testFolderForDelete); assertTrue(isCreateedDirectory); } @Test public void createDirectory_destinationFileIsCorrectAndNotExisted_returnTrue() { FileUtils.delete(testFolderForDelete); boolean isCreateedDirectory = FileUtils.createDirectory(testFolderForDelete); assertTrue(isCreateedDirectory); } @Test(expected = FileNotFoundException.class) public void getBytes_fileIsIncorrect_throwFileNotFoundException() throws IOException { FileUtils.getBytes(buildIncorrectFile()); } @Test public void getBytes_fileIsCorrect_returnBytes() throws IOException { byte[] bytes = FileUtils.getBytes(testFileForRead); assertTrue(bytes.length > 0); } @Test(expected = FileNotFoundException.class) public void getBytes_filePathIsIncorrect_throwFileNotFoundException() throws IOException { testFilePath = noSuchFileOrFolder; FileUtils.getBytes(testFilePath); } @Test public void getBytes_filePathIsCorrect_returnBytes() throws IOException { byte[] bytes = FileUtils.getBytes(testFilePath); assertTrue(bytes.length > 0); } @Test public void getAllFilesOfFolder() { File[] allFiles = FileUtils.getAllFilesOfFolder(testFolderPathForRead); assertEquals(2, allFiles.length); println(allFiles); } @Test(expected = FileNotFoundException.class) public void write_inputStreamIsIncorrectAndDestinationFileIsCorrect_throwFileNotFoundException() throws IOException { testInputStream = new FileInputStream(noSuchFileOrFolder); FileUtils.write(testInputStream, testFileForWrite); } @Test(expected = FileNotFoundException.class) public void write_inputStreamIsCorrectAndDestinationFileIsIncorrect_throwFileNotFoundException() throws IOException { try { testFileForWrite = buildIncorrectFile(); FileUtils.write(testInputStream, testFileForWrite); } finally { testInputStream.close(); } } @Test public void write_inputStreamAndDestinationFileBothCorrect_returnTrue() throws IOException { try { boolean writeSuccess = FileUtils.write(testInputStream, testFileForWrite); assertTrue(writeSuccess); } finally { testInputStream.close(); } } @Test(expected = FileNotFoundException.class) public void write_inputStreamIsIncorrectAndDestinationPathIsCorrect_throwFileNotFoundException() throws IOException { testInputStream = new FileInputStream(noSuchFileOrFolder); FileUtils.write(testInputStream, testWritePath); } @Test(expected = FileNotFoundException.class) public void write_inputStreamIsCorrectAndDestinationPathIsIncorrect_throwFileNotFoundException() throws IOException { try { testWritePath = noSuchFileOrFolder; FileUtils.write(testInputStream, testWritePath); } finally { testInputStream.close(); } } @Test public void write_inputStreamAndDestinationPathBothCorrect_returnTrue() throws IOException { try { boolean writeSuccess = FileUtils.write(testInputStream, testWritePath); assertTrue(writeSuccess); } finally { testInputStream.close(); } } @Test(expected = FileNotFoundException.class) public void write_sourceFileIsIncorrectAndOutputStreamIsCorrect_throwFileNotFoundException() throws IOException { try { testFileForRead = buildIncorrectFile(); FileUtils.write(testFileForRead, testOutputStream); } finally { testOutputStream.close(); } } @Test(expected = FileNotFoundException.class) public void write_outputStreamIsIncorrectAndSourceFileIsCorrect_throwFileNotFoundException() throws IOException { testOutputStream = new FileOutputStream(noSuchFileOrFolder); FileUtils.write(testFileForRead, testOutputStream); } @Test public void write_sourceFileAndOutputStreamBothCorrect_returnTrue() throws IOException { try { boolean writeSuccess = FileUtils.write(testFileForRead, testOutputStream); assertTrue(writeSuccess); } finally { testOutputStream.close(); } } @Test(expected = FileNotFoundException.class) public void write_sourceFileIsIncorrectAndDestinationFileIsCorrect_throwFileNotFoundException() throws IOException { testFileForRead = buildIncorrectFile(); FileUtils.write(testFileForRead, testFileForWrite); } @Test(expected = FileNotFoundException.class) public void write_sourceFileIsCorrectAndDestinationFileIsIncorrect_throwFileNotFoundException() throws IOException { testFileForWrite = buildIncorrectFile(); FileUtils.write(testFileForRead, testFileForWrite); } @Test public void write_sourceFileAndDestinationFileBothCorrect_returnTrue() throws IOException { boolean writeSuccess = FileUtils.write(testFileForRead, testFileForWrite); assertTrue(writeSuccess); } @Test(expected = FileNotFoundException.class) public void write_sourceFileIsIncorrectAndDestinationPathIsCorrect_throwFileNotFoundException() throws IOException { testFileForRead = buildIncorrectFile(); FileUtils.write(testFileForRead, testWritePath); } @Test(expected = FileNotFoundException.class) public void write_sourceFileIsCorrectAndDestinationPathIsIncorrect_throwFileNotFoundException() throws IOException { testWritePath = noSuchFileOrFolder; FileUtils.write(testFileForRead, testWritePath); } @Test public void write_sourceFileAndDestinationPathBothCorrect_returnTrue() throws IOException { boolean writeSuccess = FileUtils.write(testFileForRead, testWritePath); assertTrue(writeSuccess); } @Test(expected = Exception.class) public void write_sourceInputStreamIsIncorrectAndDestinationPathIsCorrect_throwException() throws SecurityException, IllegalArgumentException, IOException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException { Object sourceInputStream = new Object(); FileUtils.write(sourceInputStream, testWritePath); } @Test(expected = FileNotFoundException.class) public void write_sourceInputStreamIsCorrectAndDestinationPathIsIncorrect_throwFileNotFoundException() throws SecurityException, IllegalArgumentException, IOException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException { testWritePath = noSuchFileOrFolder; Object sourceInputStream = testInputStream; FileUtils.write(sourceInputStream, testWritePath); } @Test public void write_parameterBothCorrect_returnTrue() throws IOException, SecurityException, IllegalArgumentException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException { Object input = testInputStream; boolean isWriteSuccess = FileUtils.write(input, testWritePath); assertTrue(isWriteSuccess); testInputStream.close(); } @Test(expected = FileNotFoundException.class) public void write_sourceBytesIsCorrectAndDestinationFileIsIncorrect_throwFileNotFoundException() throws IOException { byte[] sourceBytes = new byte[1]; testFileForWrite = buildIncorrectFile(); FileUtils.write(sourceBytes, testFileForWrite); } @Test public void write_sourceBytesAndDestinationFileBothCorrect_runSuccess() throws IOException { byte[] sourceBytes = FileUtils.getBytes(testFileForRead); FileUtils.write(sourceBytes, testFileForWrite); } @Test(expected = FileNotFoundException.class) public void write_sourceBytesIsCorrectAndDestinationPathIsIncorrect_throwFileNotFoundException() throws IOException { byte[] sourceBytes = new byte[1]; FileUtils.write(sourceBytes, noSuchFileOrFolder); } @Test public void write_sourceBytesAndDestinationPathBothCorrect_runSuccess() throws IOException { byte[] sourceBytes = FileUtils.getBytes(testFileForRead); FileUtils.write(sourceBytes, testWritePath); } @Test public void delete_destinationPathIsIncorrect_returnFalse() { boolean isDeleted = FileUtils.delete(noSuchFileOrFolder); assertFalse(isDeleted); } @Test public void delete_destinationPathIsCorrect_returnTrue() { boolean isDeleted = FileUtils.delete(testWritePath); assertTrue(isDeleted); } @Test public void delete_destinationFileIsIncorrect_returnFalse() { File incorrectFile = buildIncorrectFile(); boolean isDeleted = FileUtils.delete(incorrectFile); assertFalse(isDeleted); } @Test public void delete_destinationFileIsCorrect_returnTrue() { boolean isDeleted = FileUtils.delete(testFileForWrite); assertTrue(isDeleted); } @Test public void delete_destinationFileIsNotEmptyFolderAndCorrect_returnTrue() throws IOException { String testFolderPathForDelete = testPath + "testFolderForDelete/"; File testFolderForDelete = new File(testFolderPathForDelete); testFolderForDelete.mkdirs(); FileUtils.write(testFileForRead, testFolderPathForDelete + testWriteFileName); File testDeleteFolder = new File(testFolderPathForDelete); boolean isDeleted = FileUtils.delete(testDeleteFolder); assertTrue(isDeleted); } @Test public void delete_destinationFileIsEmptyFolderAndCorrect_returnTrue() throws IOException { String testFolderPathForDelete = testPath + "testFolderForDelete/"; File testFolderForDelete = new File(testFolderPathForDelete); testFolderForDelete.mkdirs(); boolean isDeleted = FileUtils.delete(testFolderForDelete); assertTrue(isDeleted); } @Test public void copy_sourceFileIsIncorrectAndDestinationFileBothCorrect_returnFalse() throws IOException { testFileForRead = buildIncorrectFile(); FileUtils.delete(testFileForWrite); boolean isCopySuccess = FileUtils.copy(testFileForRead, testFileForWrite); assertFalse(isCopySuccess); } @Test(expected = IOException.class) public void copy_sourceFileIsCorrectAndDestinationFileIsIncorrect_throwIOException() throws IOException { testFileForWrite = buildIncorrectFile(); FileUtils.copy(testFileForRead, testFileForWrite); } @Test public void copy_sourceFileAndDestinationFileBothCorrect_returnTrue() throws IOException { FileUtils.delete(testFileForWrite); boolean isCopySuccess = FileUtils.copy(testFileForRead, testFileForWrite); boolean destinationFileIsExist = FileUtils.isExist(testFileForWrite); assertEquals(isCopySuccess, destinationFileIsExist); FileUtils.delete(testFileForWrite); } @Test public void copy_sourceFolderAndDestinationFolderBothCorrect_returnTrue() throws IOException { FileUtils.delete(testFolderForDelete); boolean isCopySuccess = FileUtils.copy(testFolderForRead, testFolderForDelete); boolean destinationFileIsExist = FileUtils.isExist(testFolderForDelete); assertEquals(isCopySuccess, destinationFileIsExist); FileUtils.delete(testFolderForDelete); } @Test public void copy_sourceFileAndDestinationFolderBothCorrect_returnTrue() throws IOException { FileUtils.delete(testFolderForDelete); boolean isCopySuccess = FileUtils.copy(testFileForRead, testFolderForDelete); boolean destinationFileIsExist = FileUtils.isExist(testFolderForDelete); assertEquals(isCopySuccess, destinationFileIsExist); FileUtils.delete(testFolderForDelete); } @Test public void copy_sourceFolderAndDestinationFileBothCorrect_returnTrue() throws IOException { FileUtils.delete(testFileForWrite); boolean isCopySuccess = FileUtils.copy(testFolderForRead, testFileForWrite); boolean destinationFileIsExist = FileUtils.isExist(testFileForWrite); assertEquals(isCopySuccess, destinationFileIsExist); FileUtils.delete(testFileForWrite); } private File buildIncorrectFile() { return new File(noSuchFileOrFolder); } }
package org.apache.hadoop.mapred; import javax.servlet.*; import javax.servlet.http.*; import javax.servlet.jsp.*; import javax.servlet.http.*; import java.io.*; import java.util.*; import org.apache.hadoop.http.HtmlQuoting; import org.apache.hadoop.mapred.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.util.*; import java.text.SimpleDateFormat; import org.apache.hadoop.mapred.JobHistory.*; public final class taskdetailshistory_jsp extends org.apache.jasper.runtime.HttpJspBase implements org.apache.jasper.runtime.JspSourceDependent { /** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ private static SimpleDateFormat dateFormat = new SimpleDateFormat("d/MM HH:mm:ss") ; private static final long serialVersionUID = 1L; private void printTaskAttempt(JobHistory.TaskAttempt taskAttempt, String type, JspWriter out, String logFile) throws Exception { out.print("<tr>"); out.print("<td>" + taskAttempt.get(Keys.TASK_ATTEMPT_ID) + "</td>"); out.print("<td>" + StringUtils.getFormattedTimeWithDiff(dateFormat, taskAttempt.getLong(Keys.START_TIME), 0 ) + "</td>"); if (Values.REDUCE.name().equals(type)) { JobHistory.ReduceAttempt reduceAttempt = (JobHistory.ReduceAttempt)taskAttempt; out.print("<td>" + StringUtils.getFormattedTimeWithDiff(dateFormat, reduceAttempt.getLong(Keys.SHUFFLE_FINISHED), reduceAttempt.getLong(Keys.START_TIME)) + "</td>"); out.print("<td>" + StringUtils.getFormattedTimeWithDiff(dateFormat, reduceAttempt.getLong(Keys.SORT_FINISHED), reduceAttempt.getLong(Keys.SHUFFLE_FINISHED)) + "</td>"); } out.print("<td>"+ StringUtils.getFormattedTimeWithDiff(dateFormat, taskAttempt.getLong(Keys.FINISH_TIME), taskAttempt.getLong(Keys.START_TIME) ) + "</td>"); out.print("<td>" + taskAttempt.get(Keys.HOSTNAME) + "</td>"); out.print("<td>" + HtmlQuoting.quoteHtmlChars(taskAttempt.get(Keys.ERROR)) + "</td>"); // Print task log urls out.print("<td>"); String taskLogsUrl = JobHistory.getTaskLogsUrl(taskAttempt); if (taskLogsUrl != null) { String tailFourKBUrl = taskLogsUrl + "&start=-4097"; String tailEightKBUrl = taskLogsUrl + "&start=-8193"; String entireLogUrl = taskLogsUrl + "&all=true"; out.print("<a href=\"" + tailFourKBUrl + "\">Last 4KB</a><br/>"); out.print("<a href=\"" + tailEightKBUrl + "\">Last 8KB</a><br/>"); out.print("<a href=\"" + entireLogUrl + "\">All</a><br/>"); } else { out.print("n/a"); } out.print("</td>"); Counters counters = Counters.fromEscapedCompactString(taskAttempt.get(Keys.COUNTERS)); if (counters != null) { TaskAttemptID attemptId = TaskAttemptID.forName(taskAttempt.get(Keys.TASK_ATTEMPT_ID)); TaskID tipid = attemptId.getTaskID(); org.apache.hadoop.mapreduce.JobID jobId = tipid.getJobID(); out.print("<td>" + "<a href=\"taskstatshistory.jsp?attemptid=" + attemptId + "&logFile=" + logFile + "\">" + counters.size() + "</a></td>"); } else { out.print("<td></td>"); } out.print("</tr>"); } private static final JspFactory _jspxFactory = JspFactory.getDefaultFactory(); private static java.util.Vector _jspx_dependants; private org.apache.jasper.runtime.ResourceInjector _jspx_resourceInjector; public Object getDependants() { return _jspx_dependants; } public void _jspService(HttpServletRequest request, HttpServletResponse response) throws java.io.IOException, ServletException { PageContext pageContext = null; HttpSession session = null; ServletContext application = null; ServletConfig config = null; JspWriter out = null; Object page = this; JspWriter _jspx_out = null; PageContext _jspx_page_context = null; try { response.setContentType("text/html; charset=UTF-8"); pageContext = _jspxFactory.getPageContext(this, request, response, null, true, 8192, true); _jspx_page_context = pageContext; application = pageContext.getServletContext(); config = pageContext.getServletConfig(); session = pageContext.getSession(); out = pageContext.getOut(); _jspx_out = out; _jspx_resourceInjector = (org.apache.jasper.runtime.ResourceInjector) application.getAttribute("com.sun.appserv.jsp.resource.injector"); out.write("\n\n\n"); out.write('\n'); out.write('\n'); out.write('\n'); String logFile = request.getParameter("logFile"); String tipid = request.getParameter("tipid"); if (logFile == null || tipid == null) { out.println("Missing job!!"); return; } String encodedLogFileName = JobHistory.JobInfo.encodeJobHistoryFilePath(logFile); String jobid = JSPUtil.getJobID(new Path(encodedLogFileName).getName()); FileSystem fs = (FileSystem) application.getAttribute("fileSys"); JobConf jobConf = (JobConf) application.getAttribute("jobConf"); ACLsManager aclsManager = (ACLsManager) application.getAttribute("aclManager"); JobHistory.JobInfo job = JSPUtil.checkAccessAndGetJobInfo(request, response, jobConf, aclsManager, fs, new Path(logFile)); if (job == null) { return; } JobHistory.Task task = job.getAllTasks().get(tipid); String type = task.get(Keys.TASK_TYPE); out.write("\n<!DOCTYPE html>\n<html>\n<body>\n<h2>"); out.print(tipid ); out.write(" attempts for <a href=\"jobdetailshistory.jsp?logFile="); out.print(encodedLogFileName); out.write("\"> "); out.print(jobid ); out.write(" </a></h2>\n<center>\n<table border=\"2\" cellpadding=\"5\" cellspacing=\"2\">\n<tr><td>Task Id</td><td>Start Time</td>\n"); if (Values.REDUCE.name().equals(type)) { out.write("\n <td>Shuffle Finished</td><td>Sort Finished</td>\n"); } out.write("\n<td>Finish Time</td><td>Host</td><td>Error</td><td>Task Logs</td>\n<td>Counters</td></tr>\n"); for (JobHistory.TaskAttempt attempt : task.getTaskAttempts().values()) { printTaskAttempt(attempt, type, out, encodedLogFileName); } out.write("\n</table>\n</center>\n"); if (Values.MAP.name().equals(type)) { out.write("\n<h3>Input Split Locations</h3>\n<table border=\"2\" cellpadding=\"5\" cellspacing=\"2\">\n"); for (String split : StringUtils.split(task.get(Keys.SPLITS))) { out.println("<tr><td>" + split + "</td></tr>"); } out.write("\n</table> \n"); } out.write('\n'); out.write("\n</body>\n</html>\n"); } catch (Throwable t) { if (!(t instanceof SkipPageException)){ out = _jspx_out; if (out != null && out.getBufferSize() != 0) out.clearBuffer(); if (_jspx_page_context != null) _jspx_page_context.handlePageException(t); } } finally { _jspxFactory.releasePageContext(_jspx_page_context); } } }
package no.agens.depth; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.animation.ObjectAnimator; import android.app.Fragment; import android.graphics.Color; import android.os.Bundle; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.ViewTreeObserver; import android.widget.ImageView; import com.google.android.gms.ads.AdListener; import com.google.android.gms.ads.AdRequest; import com.google.android.gms.ads.InterstitialAd; import no.agens.depth.lib.MaterialMenuDrawable; public class eashwinFragment extends Fragment implements MenuAnimation { public static final int TRANSFORM_DURATION = 900; private boolean introAnimate; InterstitialAd mInterstitialAd; private InterstitialAd interstitial; public eashwinFragment() { } public void setIntroAnimate(boolean introAnimate) { this.introAnimate = introAnimate; } View root; MaterialMenuDrawable menuIcon; WaterSceneView waterScene; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { root = inflater.inflate(R.layout.fragment_eashwin, container, false); waterScene = (WaterSceneView) root.findViewById(R.id.water_scene); setupFab(); introAnimate(); // setupSeekbars(); setupMenuButton(); ((RootActivity) getActivity()).setCurretMenuIndex(6); AdRequest adRequest = new AdRequest.Builder().build(); // Prepare the Interstitial Ad interstitial = new InterstitialAd(root.getContext()); // Insert the Ad Unit ID interstitial.setAdUnitId(getString(R.string.admob_interstitial_id)); interstitial.loadAd(adRequest); // Prepare an Interstitial Ad Listener interstitial.setAdListener(new AdListener() { public void onAdLoaded() { // Call displayInterstitial() function displayInterstitial(); } }); return root; } /* private void setupSeekbars() { //SeekBar waveSeekBar = (SeekBar) root.findViewById(R.id.wave_seekbar); //SeekBar noiseSeekBar = (SeekBar) root.findViewById(R.id.noise_seekbar); //WindFragment.setProgressBarColor(waveSeekBar, getResources().getColor(R.color.fab)); // WindFragment.setProgressBarColor(noiseSeekBar, getResources().getColor(R.color.fab)); // noiseSeekBar.setProgress(50); // waveSeekBar.setProgress(50); //waveSeekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() { @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { waterScene.setWaveHeight(progress / 4f * getResources().getDisplayMetrics().density); } @Override public void onStartTrackingTouch(SeekBar seekBar) { } @Override public void onStopTrackingTouch(SeekBar seekBar) { } }); noiseSeekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() { @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { waterScene.setNoiseIntensity((float) progress / 100f); } @Override public void onStartTrackingTouch(SeekBar seekBar) { } @Override public void onStopTrackingTouch(SeekBar seekBar) { } }); }*/ private void setupMenuButton() { ImageView menu = (ImageView) root.findViewById(R.id.menu); menu.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (!((RootActivity) getActivity()).isMenuVisible) ((RootActivity) getActivity()).showMenu(); else getActivity().onBackPressed(); } }); menuIcon = new MaterialMenuDrawable(getActivity(), Color.WHITE, MaterialMenuDrawable.Stroke.THIN, TRANSFORM_DURATION); menu.setImageDrawable(menuIcon); } public void displayInterstitial() { // If Ads are loaded, show Interstitial else show nothing. if (interstitial.isLoaded() && Math.random()>0.7 ) { interstitial.show(); } } private void introAnimate() { if (introAnimate) root.getViewTreeObserver().addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() { @Override public void onGlobalLayout() { root.getViewTreeObserver().removeOnGlobalLayoutListener(this); TransitionHelper.startIntroAnim(root, showShadowListener); hideShadow(); waterScene.postDelayed(new Runnable() { @Override public void run() { waterScene.setPause(true); } }, 10); } }); } private void setupFab() { root.findViewById(R.id.fab).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { root.getViewTreeObserver().addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() { @Override public void onGlobalLayout() { root.getViewTreeObserver().removeOnGlobalLayoutListener(this); TransitionHelper.startExitAnim(root); } }); WaterFragment waterFragment = new WaterFragment(); waterFragment.setIntroAnimate(true); ((RootActivity) getActivity()).goToFragment(waterFragment); if (((RootActivity) getActivity()).isMenuVisible) ((RootActivity) getActivity()).hideMenu(); hideShadow(); waterScene.setPause(true); } }); } AnimatorListenerAdapter showShadowListener = new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { super.onAnimationEnd(animation); showShadow(); waterScene.setPause(false); } }; private void hideShadow() { View actionbarShadow = root.findViewById(R.id.actionbar_shadow); actionbarShadow.setVisibility(View.GONE); } private void showShadow() { View actionbarShadow = root.findViewById(R.id.actionbar_shadow); actionbarShadow.setVisibility(View.VISIBLE); ObjectAnimator.ofFloat(actionbarShadow, View.ALPHA, 0, 0.8f).setDuration(400).start(); } @Override public void animateTOMenu() { TransitionHelper.animateToMenuState(root, new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { super.onAnimationEnd(animation); waterScene.setPause(false); } }); menuIcon.animateIconState(MaterialMenuDrawable.IconState.ARROW); hideShadow(); waterScene.setPause(true); } @Override public void revertFromMenu() { TransitionHelper.startRevertFromMenu(root, showShadowListener); menuIcon.animateIconState(MaterialMenuDrawable.IconState.BURGER); waterScene.setPause(true); } @Override public void exitFromMenu() { TransitionHelper.animateMenuOut(root); waterScene.setPause(true); } }
package controllers; import com.avaje.ebean.PagedList; import com.fasterxml.jackson.databind.JsonNode; import model.*; import org.apache.commons.lang3.StringUtils; import play.Logger; import play.data.Form; import play.data.validation.ValidationError; import play.libs.Json; import play.libs.mailer.MailerClient; import play.mvc.Controller; import play.mvc.Result; import play.mvc.Security; import security.controller.Secured; import security.model.User; import util.MailService; import javax.inject.Inject; import java.util.*; /** * Created by nareshdon on 03-11-2015. */ public class LeaveService extends Controller { @Inject MailerClient mailerClient; @Security.Authenticated(Secured.class) public Result getUserLeavesOfCurrentYear() { JsonNode json = request().body().asJson(); FilterBean filterBean = Json.fromJson(json, FilterBean.class); PagedList<Leave> pagedList = Leave.getUserLeavesOfCurrentYear(session().get("userName"), filterBean); LeaveMetaData metaData = new LeaveMetaData(); metaData.setLeaves(pagedList.getList()); metaData.setTotalRows(pagedList.getTotalRowCount()); return ok(Json.toJson(metaData)); } @Security.Authenticated(Secured.class) public Result getUserLeavesCount() { Map<String, Integer> leavesCountMap = new HashMap<String, Integer>(); leavesCountMap.put("leavesAvailable", Leave.getLeavesAvailableForEmployee(session().get("userName"))); leavesCountMap.put("leavesTaken", Leave.getLeavesTakenByEmployee(session().get("userName"))); leavesCountMap.put("leavesPending", Leave.getNoOfPendingLeavesForEmployee(session().get("userName"))); return ok(Json.toJson(leavesCountMap)); } @Security.Authenticated(Secured.class) public Result applyLeave() { ActionResult actionResult = new ActionResult(); actionResult.setStatus("SUCCESS"); JsonNode json = request().body().asJson(); Form<Leave> leaveForm = Form.form(Leave.class).bind(json); Logger.debug(leaveForm.data().toString()); if (leaveForm.hasErrors()) { actionResult.setStatus("FAILED"); actionResult.setErrors(leaveForm.errorsAsJson()); return ok(Json.toJson(actionResult)); } try { Leave leave = Json.fromJson(json, Leave.class); Logger.debug(leave.getFromDate().toString()); leave.setApproverId(session().get("approver_id")); leave.setEmployeeId(session().get("userName")); leave.setLeaveType("PLANNED"); leave.setStatus(LeaveStatus.PENDING); leave.setMaker(session().get("userName")); leave.save(); MailService.applyLeave(mailerClient, leave, session().get("user_description"), session().get("approver_email_id")); } catch (Exception exc) { Logger.error("Apply Leave", exc); } return ok(Json.toJson(actionResult)); } @Security.Authenticated(Secured.class) public Result cancelLeave(int leaveId) { Leave leave = new Leave(); leave.setLeaveId(leaveId); leave.setStatus(LeaveStatus.CANCELLED); leave.update(); return ok(Json.toJson(leave)); } @Security.Authenticated(Secured.class) public Result searchLeavesForApprover() { JsonNode json = request().body().asJson(); FilterBean filterBean = Json.fromJson(json, FilterBean.class); PagedList<Leave> pagedList = Leave.searchLeavesForApprover(filterBean, session().get("userName")); LeaveMetaData metaData = new LeaveMetaData(); metaData.setLeaves(pagedList.getList()); metaData.setTotalRows(pagedList.getTotalRowCount()); return ok(Json.toJson(metaData)); } @Security.Authenticated(Secured.class) public Result approveLeave(int leaveId) { Leave leave = new Leave(); leave.setLeaveId(leaveId); leave.setStatus(LeaveStatus.APPROVED); leave.update(); // Mail Code leave = Leave.find.where().eq("leaveid", leaveId).findUnique(); User applierUserObj = User.find.where().eq("user_id", leave.getEmployeeId()).findUnique(); MailService.approveLeave(mailerClient, leave, applierUserObj.getEmailId()); return ok(Json.toJson(leave)); } @Security.Authenticated(Secured.class) public Result rejectLeave(int leaveId) { Leave leave = new Leave(); leave.setLeaveId(leaveId); leave.setStatus(LeaveStatus.REJECTED); leave.update(); // Mail Code leave = Leave.find.where().eq("leaveid", leaveId).findUnique(); User applierUserObj = User.find.where().eq("user_id", leave.getEmployeeId()).findUnique(); MailService.rejectLeave(mailerClient, leave, applierUserObj.getEmailId()); return ok(Json.toJson(leave)); } @Security.Authenticated(Secured.class) public Result getUserCancelLeavesOfCurrentYear() { JsonNode json = request().body().asJson(); FilterBean filterBean = Json.fromJson(json, FilterBean.class); PagedList<Leave> pagedList = Leave.getUserCancelLeavesOfCurrentYear(session().get("userName"), filterBean); LeaveMetaData metaData = new LeaveMetaData(); metaData.setLeaves(pagedList.getList()); metaData.setTotalRows(pagedList.getTotalRowCount()); return ok(Json.toJson(metaData)); } @Security.Authenticated(Secured.class) public Result getCalendarEvents() { JsonNode json = request().body().asJson(); FilterBean filterBean = Json.fromJson(json, FilterBean.class); List<CalendarEventBean> lstLeaves = CalendarEventBean.calendarEvents(filterBean); Map<String,Object> result = new HashMap<String,Object>(); result.put("eventsList", lstLeaves); result.put("userList", getUniqueUserList(lstLeaves)); return ok(Json.toJson(result)); } private Set<String> getUniqueUserList(List<CalendarEventBean> lstEvents){ Map<String, String> userList = new HashMap<String, String>(); for(CalendarEventBean eventBean : lstEvents){ if(null == userList.get(eventBean.getTitle())) { userList.put(eventBean.getTitle(), eventBean.getTitle()); } } Set<String> userKeyList = userList.keySet(); return userKeyList; } @Security.Authenticated(Secured.class) public Result getOnBehalfUserList() { List<CommonBean> lstEmployees = User.getOnBehaulfUserList(session().get("userName")); return ok(Json.toJson(lstEmployees)); } @Security.Authenticated(Secured.class) public Result applyOnBehalfLeave() { ActionResult actionResult = new ActionResult(); actionResult.setStatus("SUCCESS"); JsonNode json = request().body().asJson(); Form<Leave> leaveForm = Form.form(Leave.class).bind(json); Logger.debug(leaveForm.data().toString()); Leave leave = Json.fromJson(json, Leave.class); if (StringUtils.isEmpty(leave.getEmployeeId())) { List<ValidationError> lst = new ArrayList<ValidationError>(); lst.add(0, new ValidationError("employeeId", "Employee is required")); if (lst.size() > 0) { leaveForm.errors().put("employeeId", lst); } } if (StringUtils.isEmpty(leave.getLeaveType())) { List<ValidationError> lst = new ArrayList<ValidationError>(); lst.add(0, new ValidationError("leaveType", "Leave Type is required")); if (lst.size() > 0) { leaveForm.errors().put("leaveType", lst); } } if (leaveForm.hasErrors()) { actionResult.setStatus("FAILED"); actionResult.setErrors(leaveForm.errorsAsJson()); return ok(Json.toJson(actionResult)); } try { Logger.debug(leave.getFromDate().toString()); leave.setApproverId(session().get("userName")); leave.setMaker(session().get("userName")); leave.setStatus(LeaveStatus.APPROVED); leave.save(); MailService.applyOnBehalfLeave(mailerClient, leave, session().get("user_description"), session().get("user_email_id")); } catch (Exception exc) { Logger.error("Apply Leave", exc); } return ok(Json.toJson(actionResult)); } }
package kieker.common.record.system; import java.nio.BufferOverflowException; import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; import kieker.common.record.AbstractMonitoringRecord; import kieker.common.record.IMonitoringRecord; import kieker.common.util.registry.IRegistry; /** * @author Teerat Pitakrat * * @since 1.12 */ public class LoadAverageRecord extends AbstractMonitoringRecord implements IMonitoringRecord.Factory, IMonitoringRecord.BinaryFactory { private static final long serialVersionUID = -664763923774505966L; /** Descriptive definition of the serialization size of the record. */ public static final int SIZE = TYPE_SIZE_LONG // LoadAverageRecord.timestamp + TYPE_SIZE_STRING // LoadAverageRecord.hostname + TYPE_SIZE_DOUBLE // LoadAverageRecord.oneMinLoadAverage + TYPE_SIZE_DOUBLE // LoadAverageRecord.fiveMinLoadAverage + TYPE_SIZE_DOUBLE // LoadAverageRecord.fifteenMinLoadAverage ; public static final Class<?>[] TYPES = { long.class, // LoadAverageRecord.timestamp String.class, // LoadAverageRecord.hostname double.class, // LoadAverageRecord.oneMinLoadAverage double.class, // LoadAverageRecord.fiveMinLoadAverage double.class, // LoadAverageRecord.fifteenMinLoadAverage }; /** user-defined constants */ /** default constants */ public static final long TIMESTAMP = 0L; public static final String HOSTNAME = ""; public static final double ONE_MIN_LOAD_AVERAGE = 0.0; public static final double FIVE_MIN_LOAD_AVERAGE = 0.0; public static final double FIFTEEN_MIN_LOAD_AVERAGE = 0.0; /** property declarations */ private final long timestamp; private final String hostname; private final double oneMinLoadAverage; private final double fiveMinLoadAverage; private final double fifteenMinLoadAverage; /** * Creates a new instance of this class using the given parameters. * * @param timestamp * timestamp * @param hostname * hostname * @param oneMinLoadAverage * oneMinLoadAverage * @param fiveMinLoadAverage * fiveMinLoadAverage * @param fifteenMinLoadAverage * fifteenMinLoadAverage */ public LoadAverageRecord(final long timestamp, final String hostname, final double oneMinLoadAverage, final double fiveMinLoadAverage, final double fifteenMinLoadAverage) { this.timestamp = timestamp; this.hostname = hostname == null?HOSTNAME:hostname; this.oneMinLoadAverage = oneMinLoadAverage; this.fiveMinLoadAverage = fiveMinLoadAverage; this.fifteenMinLoadAverage = fifteenMinLoadAverage; } /** * This constructor converts the given array into a record. * It is recommended to use the array which is the result of a call to {@link #toArray()}. * * @param values * The values for the record. */ public LoadAverageRecord(final Object[] values) { // NOPMD (direct store of values) AbstractMonitoringRecord.checkArray(values, TYPES); this.timestamp = (Long) values[0]; this.hostname = (String) values[1]; this.oneMinLoadAverage = (Double) values[2]; this.fiveMinLoadAverage = (Double) values[3]; this.fifteenMinLoadAverage = (Double) values[4]; } /** * This constructor uses the given array to initialize the fields of this record. * * @param values * The values for the record. * @param valueTypes * The types of the elements in the first array. */ protected LoadAverageRecord(final Object[] values, final Class<?>[] valueTypes) { // NOPMD (values stored directly) AbstractMonitoringRecord.checkArray(values, valueTypes); this.timestamp = (Long) values[0]; this.hostname = (String) values[1]; this.oneMinLoadAverage = (Double) values[2]; this.fiveMinLoadAverage = (Double) values[3]; this.fifteenMinLoadAverage = (Double) values[4]; } /** * This constructor converts the given array into a record. * * @param buffer * The bytes for the record. * * @throws BufferUnderflowException * if buffer not sufficient */ public LoadAverageRecord(final ByteBuffer buffer, final IRegistry<String> stringRegistry) throws BufferUnderflowException { this.timestamp = buffer.getLong(); this.hostname = stringRegistry.get(buffer.getInt()); this.oneMinLoadAverage = buffer.getDouble(); this.fiveMinLoadAverage = buffer.getDouble(); this.fifteenMinLoadAverage = buffer.getDouble(); } /** * {@inheritDoc} */ @Override public Object[] toArray() { return new Object[] { this.getTimestamp(), this.getHostname(), this.getOneMinLoadAverage(), this.getFiveMinLoadAverage(), this.getFifteenMinLoadAverage() }; } /** * {@inheritDoc} */ @Override public void registerStrings(final IRegistry<String> stringRegistry) { // NOPMD (generated code) stringRegistry.get(this.getHostname()); } /** * {@inheritDoc} */ @Override public void writeBytes(final ByteBuffer buffer, final IRegistry<String> stringRegistry) throws BufferOverflowException { buffer.putLong(this.getTimestamp()); buffer.putInt(stringRegistry.get(this.getHostname())); buffer.putDouble(this.getOneMinLoadAverage()); buffer.putDouble(this.getFiveMinLoadAverage()); buffer.putDouble(this.getFifteenMinLoadAverage()); } /** * {@inheritDoc} */ @Override public Class<?>[] getValueTypes() { return TYPES; // NOPMD } /** * {@inheritDoc} */ @Override public int getSize() { return SIZE; } /** * {@inheritDoc} * * @deprecated This record uses the {@link kieker.common.record.IMonitoringRecord.Factory} mechanism. Hence, this method is not implemented. */ @Override @Deprecated public void initFromArray(final Object[] values) { throw new UnsupportedOperationException(); } /** * {@inheritDoc} * * @deprecated This record uses the {@link kieker.common.record.IMonitoringRecord.BinaryFactory} mechanism. Hence, this method is not implemented. */ @Override @Deprecated public void initFromBytes(final ByteBuffer buffer, final IRegistry<String> stringRegistry) throws BufferUnderflowException { throw new UnsupportedOperationException(); } /** * {@inheritDoc} */ @Override public boolean equals(final Object obj) { if (obj == null) return false; if (obj == this) return true; if (obj.getClass() != this.getClass()) return false; final LoadAverageRecord castedRecord = (LoadAverageRecord) obj; if (this.getLoggingTimestamp() != castedRecord.getLoggingTimestamp()) return false; if (this.getTimestamp() != castedRecord.getTimestamp()) return false; if (!this.getHostname().equals(castedRecord.getHostname())) return false; if (isNotEqual(this.getOneMinLoadAverage(), castedRecord.getOneMinLoadAverage())) return false; if (isNotEqual(this.getFiveMinLoadAverage(), castedRecord.getFiveMinLoadAverage())) return false; if (isNotEqual(this.getFifteenMinLoadAverage(), castedRecord.getFifteenMinLoadAverage())) return false; return true; } public final long getTimestamp() { return this.timestamp; } public final String getHostname() { return this.hostname; } public final double getOneMinLoadAverage() { return this.oneMinLoadAverage; } public final double getFiveMinLoadAverage() { return this.fiveMinLoadAverage; } public final double getFifteenMinLoadAverage() { return this.fifteenMinLoadAverage; } }
/** * This class is generated by jOOQ */ package no.mesan.ark.persistering.generated.tables.records; import java.sql.Timestamp; import javax.annotation.Generated; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.Id; import javax.persistence.Table; import no.mesan.ark.persistering.generated.tables.Actor; import org.jooq.Field; import org.jooq.Record1; import org.jooq.Record4; import org.jooq.Row4; import org.jooq.impl.UpdatableRecordImpl; /** * This class is generated by jOOQ. */ @Generated( value = { "http://www.jooq.org", "jOOQ version:3.7.3" }, comments = "This class is generated by jOOQ" ) @SuppressWarnings({ "all", "unchecked", "rawtypes" }) @Entity @Table(name = "actor", schema = "public") public class ActorRecord extends UpdatableRecordImpl<ActorRecord> implements Record4<Integer, String, String, Timestamp> { private static final long serialVersionUID = -525135993; /** * Setter for <code>public.actor.actor_id</code>. */ public void setActorId(Integer value) { setValue(0, value); } /** * Getter for <code>public.actor.actor_id</code>. */ @Id @Column(name = "actor_id", unique = true, nullable = false, precision = 32) public Integer getActorId() { return (Integer) getValue(0); } /** * Setter for <code>public.actor.first_name</code>. */ public void setFirstName(String value) { setValue(1, value); } /** * Getter for <code>public.actor.first_name</code>. */ @Column(name = "first_name", nullable = false, length = 45) public String getFirstName() { return (String) getValue(1); } /** * Setter for <code>public.actor.last_name</code>. */ public void setLastName(String value) { setValue(2, value); } /** * Getter for <code>public.actor.last_name</code>. */ @Column(name = "last_name", nullable = false, length = 45) public String getLastName() { return (String) getValue(2); } /** * Setter for <code>public.actor.last_update</code>. */ public void setLastUpdate(Timestamp value) { setValue(3, value); } /** * Getter for <code>public.actor.last_update</code>. */ @Column(name = "last_update", nullable = false) public Timestamp getLastUpdate() { return (Timestamp) getValue(3); } // ------------------------------------------------------------------------- // Primary key information // ------------------------------------------------------------------------- /** * {@inheritDoc} */ @Override public Record1<Integer> key() { return (Record1) super.key(); } // ------------------------------------------------------------------------- // Record4 type implementation // ------------------------------------------------------------------------- /** * {@inheritDoc} */ @Override public Row4<Integer, String, String, Timestamp> fieldsRow() { return (Row4) super.fieldsRow(); } /** * {@inheritDoc} */ @Override public Row4<Integer, String, String, Timestamp> valuesRow() { return (Row4) super.valuesRow(); } /** * {@inheritDoc} */ @Override public Field<Integer> field1() { return Actor.ACTOR.ACTOR_ID; } /** * {@inheritDoc} */ @Override public Field<String> field2() { return Actor.ACTOR.FIRST_NAME; } /** * {@inheritDoc} */ @Override public Field<String> field3() { return Actor.ACTOR.LAST_NAME; } /** * {@inheritDoc} */ @Override public Field<Timestamp> field4() { return Actor.ACTOR.LAST_UPDATE; } /** * {@inheritDoc} */ @Override public Integer value1() { return getActorId(); } /** * {@inheritDoc} */ @Override public String value2() { return getFirstName(); } /** * {@inheritDoc} */ @Override public String value3() { return getLastName(); } /** * {@inheritDoc} */ @Override public Timestamp value4() { return getLastUpdate(); } /** * {@inheritDoc} */ @Override public ActorRecord value1(Integer value) { setActorId(value); return this; } /** * {@inheritDoc} */ @Override public ActorRecord value2(String value) { setFirstName(value); return this; } /** * {@inheritDoc} */ @Override public ActorRecord value3(String value) { setLastName(value); return this; } /** * {@inheritDoc} */ @Override public ActorRecord value4(Timestamp value) { setLastUpdate(value); return this; } /** * {@inheritDoc} */ @Override public ActorRecord values(Integer value1, String value2, String value3, Timestamp value4) { value1(value1); value2(value2); value3(value3); value4(value4); return this; } // ------------------------------------------------------------------------- // Constructors // ------------------------------------------------------------------------- /** * Create a detached ActorRecord */ public ActorRecord() { super(Actor.ACTOR); } /** * Create a detached, initialised ActorRecord */ public ActorRecord(Integer actorId, String firstName, String lastName, Timestamp lastUpdate) { super(Actor.ACTOR); setValue(0, actorId); setValue(1, firstName); setValue(2, lastName); setValue(3, lastUpdate); } }
/* * Copyright 2017 StreamSets Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.pipeline.stage.destination.hdfs.writer; import com.streamsets.pipeline.api.Field; import com.streamsets.pipeline.api.OnRecordError; import com.streamsets.pipeline.api.Record; import com.streamsets.pipeline.api.StageException; import com.streamsets.pipeline.lib.generator.DataGenerator; import com.streamsets.pipeline.lib.generator.DataGeneratorException; import com.streamsets.pipeline.lib.generator.DataGeneratorFactory; import com.streamsets.pipeline.sdk.ContextInfoCreator; import com.streamsets.pipeline.sdk.RecordCreator; import com.streamsets.pipeline.stage.destination.hdfs.Errors; import com.streamsets.pipeline.stage.destination.hdfs.HdfsDTarget; import org.apache.hadoop.fs.Path; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.io.OutputStream; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.UUID; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.spy; public class TestActiveRecordWriters { private Path testDir; public static class DummyDataGeneratorFactory extends DataGeneratorFactory { protected DummyDataGeneratorFactory(Settings settings) { super(settings); } @Override public DataGenerator getGenerator(OutputStream os) throws IOException { return new DataGenerator() { @Override public void write(Record record) throws IOException, DataGeneratorException { } @Override public void flush() throws IOException { } @Override public void close() throws IOException { } }; } } @Before public void setUpClass() { File dir = new File("target", UUID.randomUUID().toString()).getAbsoluteFile(); Assert.assertTrue(dir.mkdirs()); testDir = new Path(dir.getAbsolutePath()); } private Path getTestDir() { return testDir; } @Test public void testWritersLifecycle() throws Exception { RecordWriterManager mgr = new RecordWriterManagerTestBuilder() .context(ContextInfoCreator.createTargetContext(HdfsDTarget.class, "testWritersLifecycle", false, OnRecordError.TO_ERROR, null)) .dirPathTemplate(getTestDir().toString() + "/${YYYY()}/${MM()}/${DD()}/${hh()}/${mm()}/${ss()}/${record:value('/')}") .build(); ActiveRecordWriters writers = new ActiveRecordWriters(mgr); Date now = new Date(); // record older than cut off Date recordDate = new Date(now.getTime() - 3 * 1000 - 1); Record record = RecordCreator.create(); record.set(Field.create("a")); Assert.assertNull(writers.get(now, recordDate, record)); recordDate = new Date(now.getTime()); RecordWriter writer = writers.get(now, recordDate, record); Assert.assertNotNull(writer); Path tempPath = writer.getPath(); writer.write(record); writers.release(writer, false); //writer should still be open Assert.assertFalse(writer.isClosed()); writer = writers.get(now, recordDate, record); writer.write(record); writers.release(writer, false); //writer should be close because of going over record count threshold Assert.assertTrue(writer.isClosed()); //we should be able to get a new writer as the cutoff didn't kick in yet writer = writers.get(now, recordDate, record); Assert.assertNotNull(writer); writers.purge(); //purging should not close the writer as the cutoff didn't kick in yet Assert.assertFalse(writer.isClosed()); Thread.sleep(3001); writers.purge(); //purging should close the writer as the cutoff kicked in yet Assert.assertTrue(writer.isClosed()); //verifying closeAll() closes writers writer = writers.get(new Date(), new Date(), record); Assert.assertNotNull(writer); writers.closeAll(); Assert.assertTrue(writer.isClosed()); } @Test public void testRenameOnIdle() throws Exception { RecordWriterManager mgr = new RecordWriterManagerTestBuilder() .context(ContextInfoCreator.createTargetContext(HdfsDTarget.class, "testWritersLifecycle", false, OnRecordError.TO_ERROR, null)) .dirPathTemplate(getTestDir().toString()) .build(); mgr.setIdleTimeoutSeconds(1L); ActiveRecordWriters writers = new ActiveRecordWriters(mgr); Date now = new Date(); Record record = RecordCreator.create(); RecordWriter writer = writers.get(now, now, record); Assert.assertNotNull(writer); writer.write(record); writer.flush(); //writer should still be open Assert.assertFalse(writer.isClosed()); Thread.sleep(1500); Assert.assertTrue(writer.isClosed()); File[] files = new File(getTestDir().toString()).listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.startsWith("prefix"); } }); Assert.assertEquals(1, files.length); files = new File(getTestDir().toString()).listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.startsWith("_tmp_"); } }); Assert.assertEquals(0, files.length); } @Test public void testRenameOnIdleFlushNoData() throws Exception { RecordWriterManager mgr = new RecordWriterManagerTestBuilder() .context(ContextInfoCreator.createTargetContext(HdfsDTarget.class, "testWritersLifecycle", false, OnRecordError.TO_ERROR, null)) .dirPathTemplate(getTestDir().toString()) .build(); mgr.setIdleTimeoutSeconds(1L); ActiveRecordWriters writers = new ActiveRecordWriters(mgr); Date now = new Date(); Record record = RecordCreator.create(); Map<String, Field> data = new HashMap<>(); data.put("a", Field.create("blah")); RecordWriter writer = writers.get(now, now, record); Assert.assertNotNull(writer); writer.write(record); writer.flush(); // writer should still be open Assert.assertFalse(writer.isClosed()); Thread.sleep(500); writer.flush(); Thread.sleep(900); Assert.assertTrue(writer.isClosed()); File[] files = new File(getTestDir().toString()).listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.startsWith("prefix"); } }); Assert.assertEquals(1, files.length); files = new File(getTestDir().toString()).listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.startsWith("_tmp_"); } }); Assert.assertEquals(0, files.length); } @Test public void testFailOnFlushFail() throws Exception { RecordWriterManager mgr = new RecordWriterManagerTestBuilder() .context(ContextInfoCreator.createTargetContext(HdfsDTarget.class, "testFailOnFlushFail", false, OnRecordError.TO_ERROR, null)) .dirPathTemplate(getTestDir().toString()) .build(); mgr.setIdleTimeoutSeconds(1L); ActiveRecordWriters writers = new ActiveRecordWriters(mgr); Date now = new Date(); Record record = RecordCreator.create(); Map<String, Field> data = new HashMap<>(); data.put("a", Field.create("blah")); RecordWriter writer = writers.get(now, now, record); // Find the key to the writer in the internal map, so we can replace it with a spy String key = null; for (Map.Entry<String, RecordWriter> writerEntry : writers.writers.entrySet()) { if (writerEntry.getValue() == writer) { key = writerEntry.getKey(); } } writer = spy(writer); doThrow(IOException.class).when(writer).flush(); writers.writers.put(key, writer); writer.write(record); try { writers.flushAll(); Assert.fail("Should have thrown stage exception!"); } catch (StageException ex) { Assert.assertEquals(Errors.HADOOPFS_58, ex.getErrorCode()); Assert.assertTrue(ex.getMessage().contains(writer.getPath().toString())); Assert.assertTrue(ex.getCause() instanceof IOException); } } }
/** * <copyright> * </copyright> * * $Id$ */ package net.opengis.gml.provider; import java.util.Collection; import java.util.List; import net.opengis.gml.GmlFactory; import net.opengis.gml.GmlPackage; import net.opengis.gml.TriangulatedSurfaceType; import org.eclipse.emf.common.notify.AdapterFactory; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.ecore.EStructuralFeature; import org.eclipse.emf.ecore.util.FeatureMap; import org.eclipse.emf.ecore.util.FeatureMapUtil; import org.eclipse.emf.edit.provider.IEditingDomainItemProvider; import org.eclipse.emf.edit.provider.IItemLabelProvider; import org.eclipse.emf.edit.provider.IItemPropertyDescriptor; import org.eclipse.emf.edit.provider.IItemPropertySource; import org.eclipse.emf.edit.provider.IStructuredItemContentProvider; import org.eclipse.emf.edit.provider.ITreeItemContentProvider; import org.eclipse.emf.edit.provider.ViewerNotification; /** * This is the item provider adapter for a {@link net.opengis.gml.TriangulatedSurfaceType} object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public class TriangulatedSurfaceTypeItemProvider extends SurfaceTypeItemProvider implements IEditingDomainItemProvider, IStructuredItemContentProvider, ITreeItemContentProvider, IItemLabelProvider, IItemPropertySource { /** * This constructs an instance from a factory and a notifier. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public TriangulatedSurfaceTypeItemProvider(AdapterFactory adapterFactory) { super(adapterFactory); } /** * This returns the property descriptors for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) { if (itemPropertyDescriptors == null) { super.getPropertyDescriptors(object); } return itemPropertyDescriptors; } /** * This specifies how to implement {@link #getChildren} and is used to deduce an appropriate feature for an * {@link org.eclipse.emf.edit.command.AddCommand}, {@link org.eclipse.emf.edit.command.RemoveCommand} or * {@link org.eclipse.emf.edit.command.MoveCommand} in {@link #createCommand}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Collection<? extends EStructuralFeature> getChildrenFeatures(Object object) { if (childrenFeatures == null) { super.getChildrenFeatures(object); childrenFeatures.add(GmlPackage.eINSTANCE.getTriangulatedSurfaceType_TrianglePatches()); } return childrenFeatures; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EStructuralFeature getChildFeature(Object object, Object child) { // Check the type of the specified child object and return the proper feature to use for // adding (see {@link AddCommand}) it as a child. return super.getChildFeature(object, child); } /** * This returns TriangulatedSurfaceType.gif. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object getImage(Object object) { return overlayImage(object, getResourceLocator().getImage("full/obj16/TriangulatedSurfaceType")); } /** * This returns the label text for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String getText(Object object) { String label = ((TriangulatedSurfaceType)object).getId(); return label == null || label.length() == 0 ? getString("_UI_TriangulatedSurfaceType_type") : getString("_UI_TriangulatedSurfaceType_type") + " " + label; } /** * This handles model notifications by calling {@link #updateChildren} to update any cached * children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void notifyChanged(Notification notification) { updateChildren(notification); switch (notification.getFeatureID(TriangulatedSurfaceType.class)) { case GmlPackage.TRIANGULATED_SURFACE_TYPE__TRIANGLE_PATCHES: fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), true, false)); return; } super.notifyChanged(notification); } /** * This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children * that can be created under this object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) { super.collectNewChildDescriptors(newChildDescriptors, object); newChildDescriptors.add (createChildParameter (GmlPackage.eINSTANCE.getTriangulatedSurfaceType_TrianglePatches(), GmlFactory.eINSTANCE.createTrianglePatchArrayPropertyType())); } /** * This returns the label text for {@link org.eclipse.emf.edit.command.CreateChildCommand}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String getCreateChildText(Object owner, Object feature, Object child, Collection<?> selection) { Object childFeature = feature; Object childObject = child; if (childFeature instanceof EStructuralFeature && FeatureMapUtil.isFeatureMap((EStructuralFeature)childFeature)) { FeatureMap.Entry entry = (FeatureMap.Entry)childObject; childFeature = entry.getEStructuralFeature(); childObject = entry.getValue(); } boolean qualify = childFeature == GmlPackage.eINSTANCE.getAbstractGMLType_Name() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_CoordinateOperationName() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_CsName() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_DatumName() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_EllipsoidName() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_GroupName() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_MeridianName() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_MethodName() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_ParameterName() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_SrsName() || childFeature == GmlPackage.eINSTANCE.getSurfaceType_Patches() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_PolygonPatches() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_TrianglePatches() || childFeature == GmlPackage.eINSTANCE.getTriangulatedSurfaceType_TrianglePatches(); if (qualify) { return getString ("_UI_CreateChild_text2", new Object[] { getTypeText(childObject), getFeatureText(childFeature), getTypeText(owner) }); } return super.getCreateChildText(owner, feature, child, selection); } }
package org.usfirst.frc.team2791.commands; import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; import org.usfirst.frc.team2791.abstractSubsystems.AbstractShakerShooterArm; import org.usfirst.frc.team2791.util.ShakerCamera; import static org.usfirst.frc.team2791.robot.Robot.*; /** * Created by Akhil on 4/27/2016. * This is the code that uses the values from the camera to turn and lineup and fire */ public class AutoLineUpShot extends ShakerCommand implements Runnable { //This will decide which case to run private static final int STAGE_ONE = 0; //use one frame to lineup to the target and shoot private static final int SINGLE_FRAME_SHOT = 1; //single frame no shoot private static final int SINGLE_FRAME_LINEUP = 2; //multiple frames and shoot private static final int MULTIPLE_FRAME_SHOOT = 3; //special case after shooting private static final int AFTER_SHOT_CLEANUP = 4; //reset for anything than ran private static final int GENERAL_RESET = 5; private static final double angleMaxOutput = 0.7; //Settings // to correct any curving of the shot leftward or right ward public static double shootOffset = 0.5; //Run method flags private static boolean useMultipleFrames = false; private static boolean shootAfterAligned = false; private static boolean quickLineUpShot = false; //internal values private double targetTurnAngle = 0; private ShakerCamera.ParticleReport currentTarget; private Timer totalTime; private double frames_used = 0; public AutoLineUpShot() { totalTime = new Timer(); } public void run() { while (running) { SmartDashboard.putNumber("Vision Shot stage: ", counter); switch (counter) { default: case STAGE_ONE: //reset the number of frames that have been used frames_used = 0; //get a new frame reUpdateCurrentTarget(); //reset encoders because that is what we used to turn driveTrain.resetEncoders(); /*prep the shot, runs the shooter wheels to setpoint saves time in firing the useMultipleFrames is there because we always fire if we're using multiple frames*/ if (shootAfterAligned || useMultipleFrames) shooterWheels.prepShot(); /*This decides what case to call depending on the flags that are set true; */ if (useMultipleFrames) { if (shootAfterAligned) counter = MULTIPLE_FRAME_SHOOT; else { printTimeStamp(); System.out.println( "We have no code to line up with multiple frame and not shoot. Shooting anyway."); counter = MULTIPLE_FRAME_SHOOT; } } else { if (shootAfterAligned) counter = SINGLE_FRAME_SHOT; else counter = SINGLE_FRAME_LINEUP; } totalTime.reset(); totalTime.start(); debugSystemOut(); break; case SINGLE_FRAME_SHOT: //uses the single frame and fires if (driveTrain.setAngle(targetTurnAngle, angleMaxOutput, true, true)) { //after the desired angle is reached it will do a complete shot shooterWheels.completeShot(); debugSystemOut(); counter = AFTER_SHOT_CLEANUP; } break; case SINGLE_FRAME_LINEUP: if (driveTrain.setAngle(targetTurnAngle, angleMaxOutput, true, true)) { debugSystemOut(); counter = GENERAL_RESET; } break; case MULTIPLE_FRAME_SHOOT: if (driveTrain.setAngle(targetTurnAngle, angleMaxOutput, true, true)) { reUpdateCurrentTarget(); double camera_error = currentTarget.optimalTurnAngle + shootOffset; double camera_error_threshold = 0.75; if (quickLineUpShot) camera_error_threshold = 1.5; if (Math.abs(camera_error) < camera_error_threshold) { printTimeStamp(); System.out.println("I've found a good angle and am " + "going to busy it while the shooter spins up."); shooterWheels.completeShot(); counter = AFTER_SHOT_CLEANUP; } else if (!(Math.abs(camera_error) < camera_error_threshold)) { printTimeStamp(); System.out.println("I am waiting on camera error"); //the error is still greater than the thresh so update then angle value targetTurnAngle = driveTrain.getAngle() + currentTarget.optimalTurnAngle + shootOffset; } } break; case AFTER_SHOT_CLEANUP: // keep the same angle until we are done shooting if (driveTrain.setAngle(targetTurnAngle, angleMaxOutput, true, true)) { if (!shooterWheels.getIfCompleteShot()) { printTimeStamp(); System.out.println("Done shooting and bringing arm down"); //once we are done shooting do a reset IntakeAndShooterSynergy.setPosition(AbstractShakerShooterArm.ShooterHeight.LOW); counter = GENERAL_RESET; } } break; case GENERAL_RESET: // reset everything printTimeStamp(); System.out.println("Finished auto line up and resetting."); System.out.println("I took " + frames_used + " frames to shoot"); reset(); break; } try { Thread.sleep(100);//Run @ a 100 hz } catch (InterruptedException e) { e.printStackTrace(); } } } /*** * INTERNAL RUN METHODS ****/ /** * Through threading this method tells the camera thread to get * a new frame and then waits on it to process it, when the camera thread is * done it will send a notification to this thread which will then update the getTarget * <p> * This solves problems we had earlier about having the new frame be the same as the previous frame * it also lets us process the frame on the camera thread */ private void reUpdateCurrentTarget() { synchronized (cameraThread) { camera.getNextFrame(); try { cameraThread.wait(); } catch (InterruptedException e) { e.printStackTrace(); run(); } } currentTarget = camera.getTarget(); frames_used++; if (currentTarget == null) { System.out.println("Target Reports are empty so aborting."); counter = 40; return; } else // the target angle == current angle + targetAngleDiff + offset targetTurnAngle = driveTrain.getAngle() + currentTarget.optimalTurnAngle + shootOffset; } private void printTimeStamp() { System.out.print("TimeStamp: " + totalTime.get()); } private void debugSystemOut() { printTimeStamp(); System.out.println(" My target is: " + targetTurnAngle + " Current angle is: " + driveTrain.getAngle() + " Shooter offset is: " + shootOffset); } /************** * End Internal run methods ****************/ public void setUseMultipleFrames(boolean value) { //This will use multiple frames to lineup and fire useMultipleFrames = value; } public void setShootAfterAligned(boolean value) { //this will control whether to shoot after the lineup shootAfterAligned = value; } public void setQuickLineUpShot(boolean value) { //increases the camera error on the mutipleframe case for faster lineup quickLineUpShot = value; } public void start() { if (!running) { //sets the running boolean to true running = true; //puts camera into manual mode meaning take frame by frame when requested camera.setManualCapture(); //actually run the code... this should run on its own thread run(); } } public void reset() { //This sets the camera to automatically update to the dash again camera.setAutomaticCaptureAndUpdate(); //restart the totalTime that counts how long the whole process takes totalTime.reset(); totalTime.stop(); //set the running flag to false running = false; //reset the counter counter = STAGE_ONE; //stop all shooter stuff shooterWheels.resetShooterFlags(); //run method flags useMultipleFrames = false; shootAfterAligned = false; quickLineUpShot = false; driveTrain.forceBreakPID(); } public void updateSmartDash() { //This wasn't necessary either because we chose to spam System.out with info } public void debug() { //There was really nothing to put here....cuz who needs debugging } }
// Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.search_engines.settings; import androidx.preference.Preference; import androidx.preference.PreferenceFragmentCompat; import androidx.test.filters.SmallTest; import org.hamcrest.Matchers; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.RuleChain; import org.junit.runner.RunWith; import org.chromium.base.test.util.CallbackHelper; import org.chromium.base.test.util.Criteria; import org.chromium.base.test.util.CriteriaHelper; import org.chromium.base.test.util.DisableIf; import org.chromium.base.test.util.Feature; import org.chromium.base.test.util.FlakyTest; import org.chromium.chrome.browser.init.ChromeBrowserInitializer; import org.chromium.chrome.browser.search_engines.TemplateUrlServiceFactory; import org.chromium.chrome.browser.settings.MainSettings; import org.chromium.chrome.browser.settings.SettingsActivityTestRule; import org.chromium.chrome.test.ChromeBrowserTestRule; import org.chromium.chrome.test.ChromeJUnit4ClassRunner; import org.chromium.components.browser_ui.settings.ManagedPreferenceDelegate; import org.chromium.components.policy.test.annotations.Policies; import org.chromium.components.search_engines.TemplateUrl; import org.chromium.components.search_engines.TemplateUrlService; import org.chromium.components.search_engines.TemplateUrlService.LoadListener; import org.chromium.content_public.browser.test.util.TestThreadUtils; import java.util.List; import java.util.concurrent.ExecutionException; /** * Tests for Search Engine Settings. */ @RunWith(ChromeJUnit4ClassRunner.class) public class SearchEngineSettingsTest { private final ChromeBrowserTestRule mBrowserTestRule = new ChromeBrowserTestRule(); private final SettingsActivityTestRule<SearchEngineSettings> mSearchEngineSettingsTestRule = new SettingsActivityTestRule<>(SearchEngineSettings.class); private final SettingsActivityTestRule<MainSettings> mMainSettingsTestRule = new SettingsActivityTestRule<>(MainSettings.class); // We need to destroy the SettingsActivity before tearing down the mock sign-in environment // setup in ChromeBrowserTestRule to avoid code crash. @Rule public final RuleChain mRuleChain = RuleChain.outerRule(mBrowserTestRule) .around(mMainSettingsTestRule) .around(mSearchEngineSettingsTestRule); /** * Change search engine and make sure it works correctly. */ @Test @SmallTest @Feature({"Preferences"}) @DisableIf.Build(hardware_is = "sprout", message = "crashes on android-one: crbug.com/540720") public void testSearchEnginePreference() throws Exception { ensureTemplateUrlServiceLoaded(); mSearchEngineSettingsTestRule.startSettingsActivity(); // Set the second search engine as the default using TemplateUrlService. TestThreadUtils.runOnUiThreadBlocking(() -> { SearchEngineSettings pref = mSearchEngineSettingsTestRule.getFragment(); pref.setValueForTesting("1"); // Ensure that the second search engine in the list is selected. Assert.assertNotNull(pref); Assert.assertEquals("1", pref.getValueForTesting()); // Simulate selecting the third search engine, ensure that TemplateUrlService is // updated. String keyword2 = pref.setValueForTesting("2"); TemplateUrlService templateUrlService = TemplateUrlServiceFactory.get(); Assert.assertEquals( keyword2, templateUrlService.getDefaultSearchEngineTemplateUrl().getKeyword()); // Simulate selecting the fourth search engine. String keyword3 = pref.getKeywordFromIndexForTesting(3); String url = templateUrlService.getSearchEngineUrlFromTemplateUrl(keyword3); keyword3 = pref.setValueForTesting("3"); Assert.assertEquals(keyword3, TemplateUrlServiceFactory.get() .getDefaultSearchEngineTemplateUrl() .getKeyword()); }); } @Test @SmallTest @Feature({"Preferences"}) @Policies.Add({ @Policies.Item(key = "DefaultSearchProviderEnabled", string = "false") }) public void testSearchEnginePreference_DisabledIfNoDefaultSearchEngine() throws Exception { TestThreadUtils.runOnUiThreadBlocking( () -> { ChromeBrowserInitializer.getInstance().handleSynchronousStartup(); }); ensureTemplateUrlServiceLoaded(); CriteriaHelper.pollUiThread(() -> TemplateUrlServiceFactory.get().isDefaultSearchManaged()); mMainSettingsTestRule.startSettingsActivity(); final MainSettings mainSettings = mMainSettingsTestRule.getFragment(); final Preference searchEnginePref = waitForPreference(mainSettings, MainSettings.PREF_SEARCH_ENGINE); CriteriaHelper.pollUiThread(() -> { Criteria.checkThat(searchEnginePref.getFragment(), Matchers.nullValue()); }); TestThreadUtils.runOnUiThreadBlocking(() -> { ManagedPreferenceDelegate managedPrefDelegate = mainSettings.getManagedPreferenceDelegateForTest(); Assert.assertTrue(managedPrefDelegate.isPreferenceControlledByPolicy(searchEnginePref)); }); } /** * Make sure that when a user switches to a search engine that uses HTTP, the location * permission is not added. */ /* * @SmallTest * @Feature({"Preferences"}) * BUG=crbug.com/540706 */ @Test @FlakyTest @DisableIf.Build(hardware_is = "sprout", message = "fails on android-one: crbug.com/540706") public void testSearchEnginePreferenceHttp() throws Exception { ensureTemplateUrlServiceLoaded(); mSearchEngineSettingsTestRule.startSettingsActivity(); // Set the first search engine as the default using TemplateUrlService. TestThreadUtils.runOnUiThreadBlocking(() -> { SearchEngineSettings pref = mSearchEngineSettingsTestRule.getFragment(); pref.setValueForTesting("0"); }); TestThreadUtils.runOnUiThreadBlocking(() -> { // Ensure that the first search engine in the list is selected. SearchEngineSettings pref = mSearchEngineSettingsTestRule.getFragment(); Assert.assertNotNull(pref); Assert.assertEquals("0", pref.getValueForTesting()); // Simulate selecting a search engine that uses HTTP. int index = indexOfFirstHttpSearchEngine(pref); String keyword = pref.setValueForTesting(Integer.toString(index)); TemplateUrlService templateUrlService = TemplateUrlServiceFactory.get(); Assert.assertEquals( keyword, templateUrlService.getDefaultSearchEngineTemplateUrl().getKeyword()); }); } private int indexOfFirstHttpSearchEngine(SearchEngineSettings pref) { TemplateUrlService templateUrlService = TemplateUrlServiceFactory.get(); List<TemplateUrl> urls = templateUrlService.getTemplateUrls(); int index; for (index = 0; index < urls.size(); ++index) { String keyword = pref.getKeywordFromIndexForTesting(index); String url = templateUrlService.getSearchEngineUrlFromTemplateUrl(keyword); if (url.startsWith("http:")) { return index; } } Assert.fail(); return index; } private void ensureTemplateUrlServiceLoaded() throws Exception { // Make sure the template_url_service is loaded. final CallbackHelper onTemplateUrlServiceLoadedHelper = new CallbackHelper(); TestThreadUtils.runOnUiThreadBlocking(() -> { if (TemplateUrlServiceFactory.get().isLoaded()) { onTemplateUrlServiceLoadedHelper.notifyCalled(); } else { TemplateUrlServiceFactory.get().registerLoadListener(new LoadListener() { @Override public void onTemplateUrlServiceLoaded() { onTemplateUrlServiceLoadedHelper.notifyCalled(); } }); TemplateUrlServiceFactory.get().load(); } }); onTemplateUrlServiceLoadedHelper.waitForCallback(0); } private static Preference waitForPreference(final PreferenceFragmentCompat prefFragment, final String preferenceKey) throws ExecutionException { CriteriaHelper.pollUiThread(() -> { Criteria.checkThat("Expected valid preference for: " + preferenceKey, prefFragment.findPreference(preferenceKey), Matchers.notNullValue()); }); return TestThreadUtils.runOnUiThreadBlocking( () -> prefFragment.findPreference(preferenceKey)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.adapter.elasticsearch; import org.apache.calcite.jdbc.CalciteConnection; import org.apache.calcite.schema.SchemaPlus; import org.apache.calcite.schema.impl.ViewTable; import org.apache.calcite.schema.impl.ViewTableMacro; import org.apache.calcite.test.CalciteAssert; import org.apache.calcite.test.ElasticsearchChecker; import org.apache.calcite.util.TestUtil; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.parallel.ResourceAccessMode; import org.junit.jupiter.api.parallel.ResourceLock; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.util.Arrays; import java.util.Collections; import java.util.Locale; import java.util.Map; import java.util.function.Consumer; import java.util.regex.PatternSyntaxException; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.fail; /** * Checks renaming of fields (also upper, lower cases) during projections. */ @Disabled("RestClient often timeout in PR CI") @ResourceLock(value = "elasticsearch-scrolls", mode = ResourceAccessMode.READ) class Projection2Test { public static final EmbeddedElasticsearchPolicy NODE = EmbeddedElasticsearchPolicy.create(); private static final String NAME = "nested"; @BeforeAll public static void setupInstance() throws Exception { final Map<String, String> mappings = ImmutableMap.of("a", "long", "b.a", "long", "b.b", "long", "b.c.a", "keyword"); NODE.createIndex(NAME, mappings); String doc = "{'a': 1, 'b':{'a': 2, 'b':'3', 'c':{'a': 'foo'}}}".replace('\'', '"'); NODE.insertDocument(NAME, (ObjectNode) NODE.mapper().readTree(doc)); } private CalciteAssert.ConnectionFactory newConnectionFactory() { return new CalciteAssert.ConnectionFactory() { @Override public Connection createConnection() throws SQLException { final Connection connection = DriverManager.getConnection("jdbc:calcite:"); final SchemaPlus root = connection.unwrap(CalciteConnection.class).getRootSchema(); root.add("elastic", new ElasticsearchSchema(NODE.restClient(), NODE.mapper(), NAME)); // add calcite view programmatically final String viewSql = String.format(Locale.ROOT, "select _MAP['a'] AS \"a\", " + " _MAP['b.a'] AS \"b.a\", " + " _MAP['b.b'] AS \"b.b\", " + " _MAP['b.c.a'] AS \"b.c.a\", " + " _MAP['_id'] AS \"id\" " // _id field is implicit + " from \"elastic\".\"%s\"", NAME); ViewTableMacro macro = ViewTable.viewMacro(root, viewSql, Collections.singletonList("elastic"), Arrays.asList("elastic", "view"), false); root.add("VIEW", macro); return connection; } }; } @Test void projection() { CalciteAssert.that() .with(newConnectionFactory()) .query("select \"a\", \"b.a\", \"b.b\", \"b.c.a\" from view") .returns("a=1; b.a=2; b.b=3; b.c.a=foo\n"); } @Test void projection2() { String sql = String.format(Locale.ROOT, "select _MAP['a'], _MAP['b.a'], _MAP['b.b'], " + "_MAP['b.c.a'], _MAP['missing'], _MAP['b.missing'] from \"elastic\".\"%s\"", NAME); CalciteAssert.that() .with(newConnectionFactory()) .query(sql) .returns("EXPR$0=1; EXPR$1=2; EXPR$2=3; EXPR$3=foo; EXPR$4=null; EXPR$5=null\n"); } @Test void projection3() { CalciteAssert.that() .with(newConnectionFactory()) .query( String.format(Locale.ROOT, "select * from \"elastic\".\"%s\"", NAME)) .returns("_MAP={a=1, b={a=2, b=3, c={a=foo}}}\n"); CalciteAssert.that() .with(newConnectionFactory()) .query( String.format(Locale.ROOT, "select *, _MAP['a'] from \"elastic\".\"%s\"", NAME)) .returns("_MAP={a=1, b={a=2, b=3, c={a=foo}}}; EXPR$1=1\n"); } /** * Test that {@code _id} field is available when queried explicitly. * @see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-id-field.html">ID Field</a> */ @Test void projectionWithIdField() { final CalciteAssert.AssertThat factory = CalciteAssert.that().with(newConnectionFactory()); factory .query("select \"id\" from view") .returns(regexMatch("id=\\p{Graph}+")); factory .query("select \"id\", \"id\" from view") .returns(regexMatch("id=\\p{Graph}+; id=\\p{Graph}+")); factory .query("select \"id\", \"a\" from view") .returns(regexMatch("id=\\p{Graph}+; a=1")); factory .query("select \"a\", \"id\" from view") .returns(regexMatch("a=1; id=\\p{Graph}+")); // single _id column final String sql1 = String.format(Locale.ROOT, "select _MAP['_id'] " + " from \"elastic\".\"%s\"", NAME); factory .query(sql1) .returns(regexMatch("EXPR$0=\\p{Graph}+")); // multiple columns: _id and a final String sql2 = String.format(Locale.ROOT, "select _MAP['_id'], _MAP['a'] " + " from \"elastic\".\"%s\"", NAME); factory .query(sql2) .returns(regexMatch("EXPR$0=\\p{Graph}+; EXPR$1=1")); // multiple _id columns final String sql3 = String.format(Locale.ROOT, "select _MAP['_id'], _MAP['_id'] " + " from \"elastic\".\"%s\"", NAME); factory .query(sql3) .returns(regexMatch("EXPR$0=\\p{Graph}+; EXPR$1=\\p{Graph}+")); // _id column with same alias final String sql4 = String.format(Locale.ROOT, "select _MAP['_id'] as \"_id\" " + " from \"elastic\".\"%s\"", NAME); factory .query(sql4) .returns(regexMatch("_id=\\p{Graph}+")); // _id field not available implicitly factory .query( String.format(Locale.ROOT, "select * from \"elastic\".\"%s\"", NAME)) .returns(regexMatch("_MAP={a=1, b={a=2, b=3, c={a=foo}}}")); factory .query( String.format(Locale.ROOT, "select *, _MAP['_id'] from \"elastic\".\"%s\"", NAME)) .returns(regexMatch("_MAP={a=1, b={a=2, b=3, c={a=foo}}}; EXPR$1=\\p{Graph}+")); } /** * Avoid using scripting for simple projections. * * <p> When projecting simple fields (without expression) no * <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting.html">scripting</a> * should be used just * <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-source-filtering.html">_source</a>. */ @Test void simpleProjectionNoScripting() { CalciteAssert.that() .with(newConnectionFactory()) .query( String.format(Locale.ROOT, "select _MAP['_id'], _MAP['a'], _MAP['b.a'] from " + " \"elastic\".\"%s\" where _MAP['b.a'] = 2", NAME)) .queryContains( ElasticsearchChecker.elasticsearchChecker("'query.constant_score.filter.term.b.a':2", "_source:['a', 'b.a']", "size:5196")) .returns(regexMatch("EXPR$0=\\p{Graph}+; EXPR$1=1; EXPR$2=2")); } /** * Allows values to contain regular expressions instead of exact values. * <pre> * {@code * key1=foo1; key2=\\w+; key4=\\d{3,4} * } * </pre> * @param lines lines with regexp * @return consumer to be used in {@link org.apache.calcite.test.CalciteAssert.AssertQuery} */ private static Consumer<ResultSet> regexMatch(String...lines) { return rset -> { try { final int columnCount = rset.getMetaData().getColumnCount(); final StringBuilder actual = new StringBuilder(); int processedRows = 0; boolean fail = false; while (rset.next()) { if (processedRows >= lines.length) { fail = true; } for (int i = 1; i <= columnCount; i++) { final String name = rset.getMetaData().getColumnName(i); final String value = rset.getString(i); actual.append(name).append('=').append(value); if (i < columnCount) { actual.append("; "); } // don't re-check if already failed if (!fail) { // splitting string of type: key1=val1; key2=val2 final String keyValue = lines[processedRows].split("; ")[i - 1]; final String[] parts = keyValue.split("=", 2); final String expectedName = parts[0]; final String expectedValue = parts[1]; boolean valueMatches = expectedValue.equals(value); if (!valueMatches) { // try regex try { valueMatches = value != null && value.matches(expectedValue); } catch (PatternSyntaxException ignore) { // probably not a regular expression } } fail = !(name.equals(expectedName) && valueMatches); } } processedRows++; } // also check that processed same number of rows fail &= processedRows == lines.length; if (fail) { assertEquals(String.join("\n", Arrays.asList(lines)), actual.toString()); fail("Should have failed on previous line, but for some reason didn't"); } } catch (SQLException e) { throw TestUtil.rethrow(e); } }; } }
package com.beecavegames.common.data; import java.io.IOException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import org.joda.time.DateTime; import org.joda.time.Duration; import com.beecavegames.Game; import com.beecavegames.bjc.PlayerDay; import com.beecavegames.bjc.PlayerHistory; import com.beecavegames.bjc.entities.Player; import com.beecavegames.bjc.handlers.VipClaimPrizeWheelHandler; import com.beecavegames.bjc.util.PlayerUtil; import com.beecavegames.bjc.util.ViralUtil; import com.beecavegames.common.PendingReward; import com.beecavegames.common.Response; import com.beecavegames.common.entities.NotableEvent; import com.beecavegames.common.handlers.admin.VipPrizeWheelResponse; import com.beecavegames.entities.PlayerGrant; import com.beecavegames.entities.PlayerGrant.GrantSource; import com.beecavegames.eris.random.RNG; import com.beecavegames.social.GiftOffer; import com.beecavegames.social.events.GameEvent; import com.beecavegames.stats.CounterEvent; import com.beecavegames.util.Config; import com.beecavegames.util.DateUtil; import com.beecavegames.util.MoneyAmount; import com.beecavegames.util.Version; import com.fasterxml.jackson.annotation.JsonView; import com.google.common.collect.ImmutableMap; import lombok.NoArgsConstructor; import lombok.extern.slf4j.Slf4j; @NoArgsConstructor @Slf4j public class PrizeWheelPendingReward extends PendingReward { private static final int NUMBER_OF_WEDGES = 20; private static final Version MIN_PWPR_VERSION = new Version(4,27,0); public int wedgeIndex; public List<MoneyAmount> wedgeValues; public int activeDays; public boolean levelUp; public int activeFriendsPrize; public int vipPoints; public int level; public GiftOffer giftOffer; public String signedData; @JsonView(Game.Views.Internal.class) public MoneyAmount payout; @JsonView(Game.Views.Internal.class) Map<String, Object> notes; public PrizeWheelPendingReward(PlayerUtil playerUtil, Date created, int wedgeIndex, List<MoneyAmount> wedgeValues, int activeDays, boolean levelUp, int activeFriendsPrize, int vipPoints, int level, GiftOffer giftOffer, String signedData) { super(created); this.wedgeIndex = wedgeIndex; this.wedgeValues = wedgeValues; this.activeDays = activeDays; this.levelUp = levelUp; this.activeFriendsPrize = activeFriendsPrize; this.vipPoints = vipPoints; this.level = level; this.giftOffer = giftOffer; this.signedData = signedData; final MoneyAmount maxWedge = highestWedge(wedgeValues); final MoneyAmount chosenWedge = wedgeValues.get(wedgeIndex); payout = chosenWedge; final PrizeWheelSpec prizeWheelSpec = playerUtil.staticData.get(PrizeWheelSpec.class, 1); final VIPSpec vipSpec = playerUtil.getVIPSpecFromPoints(null, vipPoints); final MoneyAmount dayMultiplier = MoneyAmount.valueOf(prizeWheelSpec.getPrizeWheelDayMultipliers().get(activeDays)); double vipMultiplier = vipSpec.getPrizeWheelVipMultiplier(); final MoneyAmount vipMultiplierAmount = MoneyAmount.valueOf(vipMultiplier); payout = payout.multiply(vipMultiplierAmount); payout = payout.multiply(dayMultiplier); payout = payout.add(MoneyAmount.valueOf(activeFriendsPrize)); notes=new HashMap<>(); notes.put("vipMultiplier", vipMultiplierAmount); notes.put("dayMultiplier", dayMultiplier); notes.put("activeDays", activeDays + 1); notes.put("chosenWedge", chosenWedge); notes.put("maxWedge", maxWedge); notes.put("activeFriendsPrize", activeFriendsPrize); } private MoneyAmount highestWedge(List<MoneyAmount> wedges){ MoneyAmount max = MoneyAmount.ZERO; for (MoneyAmount wedge: wedges) { max = MoneyAmount.max(max, wedge); } return max; } @Override public Map<String, Long> claim(Player p, PlayerUtil playerUtil) { Date now = DateUtil.now(); if (levelUp) { p.add(new CounterEvent(VipClaimPrizeWheelHandler.COUNTER_LEVEL_UP_WHEEL_CLAIM, now, payout.longValue())); } else { p.add(new CounterEvent(VipClaimPrizeWheelHandler.COUNTER_PRIZE_WHEEL_CLAIM, now, payout.longValue())); p.add(new CounterEvent(VipClaimPrizeWheelHandler.COUNTER_ACTIVE_DAYS, now, activeDays + 1)); p.getTimestamps().setLastPrizeWheelReward(DateUtil.nowDT()); } p.getHistory().getToday().addNotableEvent(NotableEvent.Type.WHEEL_SPIN, p.getBeeId(), 0, 0, DateUtil.nowDT(), (int) payout.doubleValue()); PlayerGrant<MoneyAmount> grant = new PlayerGrant<MoneyAmount>(p.getUserId()).withGrantable(payout) .source((levelUp) ? GrantSource.LEVEL_UP_WHEEL : GrantSource.PRIZE_WHEEL).notes(notes); playerUtil.creditToPlayer(p, grant); p.setPendingPrizeWheelIndex(null); return ImmutableMap.of("chips", payout.longValue()); } @Override public boolean isValid(StaticData staticData) { return true; } public static List<MoneyAmount> getWedgeValues(Player p, int level, GameEvent event, PrizeWheelSpec prizeWheelSpec, Map<String,Object> notes) { List<MoneyAmount> result = new ArrayList<>(NUMBER_OF_WEDGES); List<Integer> prizeWheelWedges = prizeWheelSpec.getVipPrizeWheelValues(event, level); final float scaleFactor = prizeWheelSpec.getVipWheelMultiplier(); MoneyAmount maxWedge = MoneyAmount.ZERO; for (long baseValue : prizeWheelWedges) { float amountFloat = baseValue * scaleFactor; long chipAmount = Math.round(amountFloat); MoneyAmount value = MoneyAmount.valueOf(chipAmount); result.add(value); if (value.compareTo(maxWedge) > 0) { maxWedge = value; notes.put("maxWedge", chipAmount); } } if ((Config.getBoolean("bigWedgeOnLevelUp", true) || event == GameEvent.LEVEL_UP) && p.isRecentPayer(Duration.standardDays(Config.getInt( "buyerBonusWedgeDays", 7)))) { notes.put("bigWedgeQualified", true); for (int i = 0; i < result.size(); i++) { if (result.get(i).equals(maxWedge)) { int bigWedgeValue = Config.getInt("buyerBonusWedgeValue",150000); result.set(i, MoneyAmount.valueOf(bigWedgeValue)); notes.put("bigWedgeIndex", i); notes.put("maxWedge", bigWedgeValue); } } } return result; } public static PrizeWheelPendingReward awardPrizeWheelSpin(final GameEvent event, Player p, PlayerUtil playerUtil, GiftOffer giftOffer, int level) { VipPrizeWheelResponse response; if (event != GameEvent.LEVEL_UP && p.prizeWheelAvailable() == false) { response = new VipPrizeWheelResponse(); response.status = Response.Status.FAIL; response.message = "Prize wheel reward unavailable."; log.warn("Prize wheel reward unavailable. isLevelUp:"+event); } final PrizeWheelSpec prizeWheelSpec = playerUtil.staticData.get(PrizeWheelSpec.class, 1); final int wedgeIndex = getWinningWedgeIndex(prizeWheelSpec, event, level); Map<String,Object> notes = new HashMap<>(); List<MoneyAmount> wedgeValues = getWedgeValues(p, level, event, prizeWheelSpec, notes); final int activeDays = getActiveDays(p); String signedData = null; try { signedData = ViralUtil.signData(new OpenGraphStoryRecord(p.getBeeId(), GrantSource.PRIZE_WHEEL)); } catch(IOException e) {} final int activeFriendsPrize = getActiveFriendsAmount(p, prizeWheelSpec); final int vipPoints = p.getVipPoints(); return new PrizeWheelPendingReward(playerUtil, DateUtil.now(), wedgeIndex, wedgeValues, activeDays, event == GameEvent.LEVEL_UP, activeFriendsPrize, vipPoints, level, giftOffer, signedData); } public static int getActiveDays(Player p) { int result = 0; final int NUMBER_OF_DAYS = 5; final DateTime now = new DateTime(); final DateTime fiveDaysAgo = now.minus( Duration.standardDays(NUMBER_OF_DAYS) ); PlayerHistory history = p.getHistory(); for (PlayerDay pd: history.getDays().descendingMap().values()) { DateTime day = new DateTime(pd.getDate()); if (day.isBefore(fiveDaysAgo)) { break; } result ++; } if (result > 0) { result--; // Make it zero-based } return (result < NUMBER_OF_DAYS) ? result : NUMBER_OF_DAYS - 1; } public static int getActiveFriendsAmount( Player p, PrizeWheelSpec prizeWheelSpec) { int maxActiveFriends = prizeWheelSpec.getMaxActiveFriends(); int activeFriendsBasePay = prizeWheelSpec.getActiveFriendsBasePay(); return Math.min(p.getActiveFriendCount(), maxActiveFriends) * activeFriendsBasePay; } private static int getWinningWedgeIndex(PrizeWheelSpec prizeWheelSpec, final GameEvent event, int level) { return prizeWheelSpec.getWeights(event, level).next(RNG.instance()); } @Override public boolean isSupported(Version clientVersion) { return clientVersion.isAtLeast(MIN_PWPR_VERSION); } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.psi.util; import com.intellij.psi.*; import com.intellij.psi.tree.IElementType; import org.jetbrains.annotations.NotNull; import java.util.HashMap; import java.util.Map; public final class PsiPrecedenceUtil { public static final int PARENTHESIZED_PRECEDENCE = 0; public static final int LITERAL_PRECEDENCE = 0; public static final int METHOD_CALL_PRECEDENCE = 1; public static final int POSTFIX_PRECEDENCE = 2; public static final int PREFIX_PRECEDENCE = 3; public static final int TYPE_CAST_PRECEDENCE = 4; public static final int MULTIPLICATIVE_PRECEDENCE = 5; public static final int ADDITIVE_PRECEDENCE = 6; public static final int SHIFT_PRECEDENCE = 7; public static final int RELATIONAL_PRECEDENCE = 8; public static final int EQUALITY_PRECEDENCE = 9; public static final int BINARY_AND_PRECEDENCE = 10; public static final int BINARY_XOR_PRECEDENCE = 11; public static final int BINARY_OR_PRECEDENCE = 12; public static final int AND_PRECEDENCE = 13; public static final int OR_PRECEDENCE = 14; public static final int CONDITIONAL_PRECEDENCE = 15; public static final int ASSIGNMENT_PRECEDENCE = 16; public static final int LAMBDA_PRECEDENCE = 17; // jls-15.2 public static final int NUM_PRECEDENCES = 18; private static final Map<IElementType, Integer> s_binaryOperatorPrecedence = new HashMap<>(NUM_PRECEDENCES); static { s_binaryOperatorPrecedence.put(JavaTokenType.PLUS, ADDITIVE_PRECEDENCE); s_binaryOperatorPrecedence.put(JavaTokenType.MINUS, ADDITIVE_PRECEDENCE); s_binaryOperatorPrecedence.put(JavaTokenType.ASTERISK, MULTIPLICATIVE_PRECEDENCE); s_binaryOperatorPrecedence.put(JavaTokenType.DIV, MULTIPLICATIVE_PRECEDENCE); s_binaryOperatorPrecedence.put(JavaTokenType.PERC, MULTIPLICATIVE_PRECEDENCE); s_binaryOperatorPrecedence.put(JavaTokenType.ANDAND, AND_PRECEDENCE); s_binaryOperatorPrecedence.put(JavaTokenType.OROR, OR_PRECEDENCE); s_binaryOperatorPrecedence.put(JavaTokenType.AND, BINARY_AND_PRECEDENCE); s_binaryOperatorPrecedence.put(JavaTokenType.OR, BINARY_OR_PRECEDENCE); s_binaryOperatorPrecedence.put(JavaTokenType.XOR, BINARY_XOR_PRECEDENCE); s_binaryOperatorPrecedence.put(JavaTokenType.LTLT, SHIFT_PRECEDENCE); s_binaryOperatorPrecedence.put(JavaTokenType.GTGT, SHIFT_PRECEDENCE); s_binaryOperatorPrecedence.put(JavaTokenType.GTGTGT, SHIFT_PRECEDENCE); s_binaryOperatorPrecedence.put(JavaTokenType.GT, RELATIONAL_PRECEDENCE); s_binaryOperatorPrecedence.put(JavaTokenType.GE, RELATIONAL_PRECEDENCE); s_binaryOperatorPrecedence.put(JavaTokenType.LT, RELATIONAL_PRECEDENCE); s_binaryOperatorPrecedence.put(JavaTokenType.LE, RELATIONAL_PRECEDENCE); s_binaryOperatorPrecedence.put(JavaTokenType.EQEQ, EQUALITY_PRECEDENCE); s_binaryOperatorPrecedence.put(JavaTokenType.NE, EQUALITY_PRECEDENCE); } public static boolean isCommutativeOperator(@NotNull IElementType token) { return token == JavaTokenType.PLUS || token == JavaTokenType.ASTERISK || token == JavaTokenType.EQEQ || token == JavaTokenType.NE || token == JavaTokenType.AND || token == JavaTokenType.OR || token == JavaTokenType.XOR; } public static boolean isCommutativeOperation(PsiPolyadicExpression expression) { final IElementType tokenType = expression.getOperationTokenType(); if (!isCommutativeOperator(tokenType)) { return false; } final PsiType type = expression.getType(); return type != null && !type.equalsToText(CommonClassNames.JAVA_LANG_STRING); } public static boolean isAssociativeOperation(PsiPolyadicExpression expression) { final IElementType tokenType = expression.getOperationTokenType(); final PsiType type = expression.getType(); final PsiPrimitiveType primitiveType; if (type instanceof PsiClassType) { primitiveType = PsiPrimitiveType.getUnboxedType(type); if (primitiveType == null) { return false; } } else if (type instanceof PsiPrimitiveType) { primitiveType = (PsiPrimitiveType)type; } else { return false; } if (JavaTokenType.PLUS == tokenType || JavaTokenType.ASTERISK == tokenType) { return !PsiType.FLOAT.equals(primitiveType) && !PsiType.DOUBLE.equals(primitiveType); } else if (JavaTokenType.EQEQ == tokenType || JavaTokenType.NE == tokenType) { return PsiType.BOOLEAN.equals(primitiveType); } else if (JavaTokenType.AND == tokenType || JavaTokenType.OR == tokenType || JavaTokenType.XOR == tokenType) { return true; } else if (JavaTokenType.OROR == tokenType || JavaTokenType.ANDAND == tokenType) { return true; } return false; } public static int getPrecedence(PsiExpression expression) { if (expression instanceof PsiThisExpression || expression instanceof PsiLiteralExpression || expression instanceof PsiSuperExpression || expression instanceof PsiClassObjectAccessExpression || expression instanceof PsiArrayAccessExpression || expression instanceof PsiArrayInitializerExpression) { return LITERAL_PRECEDENCE; } if (expression instanceof PsiReferenceExpression) { final PsiReferenceExpression referenceExpression = (PsiReferenceExpression)expression; if (referenceExpression.getQualifier() != null) { return METHOD_CALL_PRECEDENCE; } else { return LITERAL_PRECEDENCE; } } if (expression instanceof PsiMethodCallExpression || expression instanceof PsiNewExpression) { return METHOD_CALL_PRECEDENCE; } if (expression instanceof PsiTypeCastExpression) { return TYPE_CAST_PRECEDENCE; } if (expression instanceof PsiPrefixExpression) { return PREFIX_PRECEDENCE; } if (expression instanceof PsiPostfixExpression || expression instanceof PsiSwitchExpression) { return POSTFIX_PRECEDENCE; } if (expression instanceof PsiPolyadicExpression) { final PsiPolyadicExpression polyadicExpression = (PsiPolyadicExpression)expression; return getPrecedenceForOperator(polyadicExpression.getOperationTokenType()); } if (expression instanceof PsiInstanceOfExpression) { return RELATIONAL_PRECEDENCE; } if (expression instanceof PsiConditionalExpression) { return CONDITIONAL_PRECEDENCE; } if (expression instanceof PsiAssignmentExpression) { return ASSIGNMENT_PRECEDENCE; } if (expression instanceof PsiParenthesizedExpression) { return PARENTHESIZED_PRECEDENCE; } if (expression instanceof PsiLambdaExpression) { return LAMBDA_PRECEDENCE; } return -1; } public static int getPrecedenceForOperator(@NotNull IElementType operator) { final Integer precedence = s_binaryOperatorPrecedence.get(operator); if (precedence == null) { throw new IllegalArgumentException("unknown operator: " + operator); } return precedence.intValue(); } public static boolean areParenthesesNeeded(PsiParenthesizedExpression expression, boolean ignoreClarifyingParentheses) { final PsiElement parent = expression.getParent(); if (!(parent instanceof PsiExpression)) { return false; } final PsiExpression child = expression.getExpression(); return child == null || areParenthesesNeeded(child, (PsiExpression)parent, ignoreClarifyingParentheses); } public static boolean areParenthesesNeeded(PsiExpression expression, PsiExpression parentExpression, boolean ignoreClarifyingParentheses) { if (parentExpression instanceof PsiParenthesizedExpression || parentExpression instanceof PsiArrayInitializerExpression) { return false; } if (parentExpression instanceof PsiArrayAccessExpression) { final PsiArrayAccessExpression arrayAccessExpression = (PsiArrayAccessExpression)parentExpression; return PsiTreeUtil.isAncestor(arrayAccessExpression.getArrayExpression(), expression, false); } final int parentPrecedence = getPrecedence(parentExpression); final int childPrecedence = getPrecedence(expression); if (parentPrecedence > childPrecedence) { if (ignoreClarifyingParentheses) { if (expression instanceof PsiPolyadicExpression) { if (parentExpression instanceof PsiPolyadicExpression || parentExpression instanceof PsiConditionalExpression || parentExpression instanceof PsiInstanceOfExpression) { return true; } } else if (expression instanceof PsiInstanceOfExpression) { return true; } } return false; } if (parentExpression instanceof PsiPolyadicExpression && expression instanceof PsiPolyadicExpression) { final PsiPolyadicExpression parentPolyadicExpression = (PsiPolyadicExpression)parentExpression; final PsiType parentType = parentPolyadicExpression.getType(); if (parentType == null) { return true; } final PsiPolyadicExpression childPolyadicExpression = (PsiPolyadicExpression)expression; final PsiType childType = childPolyadicExpression.getType(); if (!parentType.equals(childType)) { return true; } if (childType.equalsToText(CommonClassNames.JAVA_LANG_STRING) && !PsiTreeUtil.isAncestor(parentPolyadicExpression.getOperands()[0], childPolyadicExpression, true)) { final PsiExpression[] operands = childPolyadicExpression.getOperands(); return !childType.equals(operands[0].getType()) && !childType.equals(operands[1].getType()); } else if (childType.equals(PsiType.BOOLEAN)) { final PsiExpression[] operands = childPolyadicExpression.getOperands(); for (PsiExpression operand : operands) { PsiType operandType = operand.getType(); if (operandType != null && !PsiType.BOOLEAN.equals(operandType)) { return true; } } } final IElementType parentOperator = parentPolyadicExpression.getOperationTokenType(); final IElementType childOperator = childPolyadicExpression.getOperationTokenType(); if (ignoreClarifyingParentheses) { if (!childOperator.equals(parentOperator)) { return true; } } final PsiExpression[] parentOperands = parentPolyadicExpression.getOperands(); if (!PsiTreeUtil.isAncestor(parentOperands[0], expression, false)) { if (!isAssociativeOperation(parentPolyadicExpression) || JavaTokenType.DIV == childOperator || JavaTokenType.PERC == childOperator) { return true; } } } else if (parentExpression instanceof PsiConditionalExpression && expression instanceof PsiConditionalExpression) { final PsiConditionalExpression conditionalExpression = (PsiConditionalExpression)parentExpression; final PsiExpression condition = conditionalExpression.getCondition(); return PsiTreeUtil.isAncestor(condition, expression, true); } else if (expression instanceof PsiLambdaExpression) { // jls-15.16 if (parentExpression instanceof PsiTypeCastExpression) { return false; } else if (parentExpression instanceof PsiConditionalExpression) { // jls-15.25 final PsiConditionalExpression conditionalExpression = (PsiConditionalExpression)parentExpression; return PsiTreeUtil.isAncestor(conditionalExpression.getCondition(), expression, true); } } return parentPrecedence < childPrecedence; } public static boolean areParenthesesNeeded(PsiJavaToken compoundAssignmentToken, PsiExpression rhs) { if (rhs instanceof PsiPolyadicExpression) { final PsiPolyadicExpression binaryExpression = (PsiPolyadicExpression)rhs; final int precedence1 = getPrecedenceForOperator(binaryExpression.getOperationTokenType()); final IElementType signTokenType = compoundAssignmentToken.getTokenType(); final IElementType newOperatorToken = TypeConversionUtil.convertEQtoOperation(signTokenType); final int precedence2 = getPrecedenceForOperator(newOperatorToken); return precedence1 >= precedence2 || !isCommutativeOperator(newOperatorToken); } else { return rhs instanceof PsiConditionalExpression || rhs instanceof PsiAssignmentExpression || rhs instanceof PsiInstanceOfExpression; } } }
package com.iclockwork.percy.wechat4j.internal.json; import java.io.IOException; import java.io.Writer; /* Copyright (c) 2006 JSON.org Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. The Software shall be used for Good, not Evil. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /** * JSONWriter provides a quick and convenient way of producing JSON text. * The texts produced strictly conform to JSON syntax rules. No whitespace is * added, so the results are ready for transmission or storage. Each instance of * JSONWriter can produce one JSON text. * <p> * A JSONWriter instance provides a <code>value</code> method for appending * values to the * text, and a <code>key</code> * method for adding keys before values in objects. There are <code>array</code> * and <code>endArray</code> methods that make and bound array values, and * <code>object</code> and <code>endObject</code> methods which make and bound * object values. All of these methods return the JSONWriter instance, * permitting a cascade style. For example, <pre> * new JSONWriter(myWriter) * .object() * .key("JSON") * .value("Hello, World!") * .endObject();</pre> which writes <pre> * {"JSON":"Hello, World!"}</pre> * <p> * The first method called must be <code>array</code> or <code>object</code>. * There are no methods for adding commas or colons. JSONWriter adds them for * you. Objects and arrays can be nested up to 20 levels deep. * <p> * This can sometimes be easier than using a JSONObject to build a string. * @author JSON.org * @version 2011-11-24 */ public class JSONWriter { private static final int maxdepth = 200; /** * The comma flag determines if a comma should be output before the next * value. */ private boolean comma; /** * The current mode. Values: * 'a' (array), * 'd' (done), * 'i' (initial), * 'k' (key), * 'o' (object). */ protected char mode; /** * The object/array stack. */ private final JSONObject stack[]; /** * The stack top index. A value of 0 indicates that the stack is empty. */ private int top; /** * The writer that will receive the output. */ protected Writer writer; /** * Make a fresh JSONWriter. It can be used to build one JSON text. */ public JSONWriter(Writer w) { this.comma = false; this.mode = 'i'; this.stack = new JSONObject[maxdepth]; this.top = 0; this.writer = w; } /** * Append a value. * @param string A string value. * @return this * @throws org.json.JSONException If the value is out of sequence. */ private JSONWriter append(String string) throws JSONException { if (string == null) { throw new JSONException("Null pointer"); } if (this.mode == 'o' || this.mode == 'a') { try { if (this.comma && this.mode == 'a') { this.writer.write(','); } this.writer.write(string); } catch (IOException e) { throw new JSONException(e); } if (this.mode == 'o') { this.mode = 'k'; } this.comma = true; return this; } throw new JSONException("Value out of sequence."); } /** * Begin appending a new array. All values until the balancing * <code>endArray</code> will be appended to this array. The * <code>endArray</code> method must be called to mark the array's end. * @return this * @throws org.json.JSONException If the nesting is too deep, or if the object is * started in the wrong place (for example as a key or after the end of the * outermost array or object). */ public JSONWriter array() throws JSONException { if (this.mode == 'i' || this.mode == 'o' || this.mode == 'a') { this.push(null); this.append("["); this.comma = false; return this; } throw new JSONException("Misplaced array."); } /** * End something. * @param mode Mode * @param c Closing character * @return this * @throws org.json.JSONException If unbalanced. */ private JSONWriter end(char mode, char c) throws JSONException { if (this.mode != mode) { throw new JSONException(mode == 'a' ? "Misplaced endArray." : "Misplaced endObject."); } this.pop(mode); try { this.writer.write(c); } catch (IOException e) { throw new JSONException(e); } this.comma = true; return this; } /** * End an array. This method most be called to balance calls to * <code>array</code>. * @return this * @throws org.json.JSONException If incorrectly nested. */ public JSONWriter endArray() throws JSONException { return this.end('a', ']'); } /** * End an object. This method most be called to balance calls to * <code>object</code>. * @return this * @throws org.json.JSONException If incorrectly nested. */ public JSONWriter endObject() throws JSONException { return this.end('k', '}'); } /** * Append a key. The key will be associated with the next value. In an * object, every value must be preceded by a key. * @param string A key string. * @return this * @throws org.json.JSONException If the key is out of place. For example, keys * do not belong in arrays or if the key is null. */ public JSONWriter key(String string) throws JSONException { if (string == null) { throw new JSONException("Null key."); } if (this.mode == 'k') { try { this.stack[this.top - 1].putOnce(string, Boolean.TRUE); if (this.comma) { this.writer.write(','); } this.writer.write(JSONObject.quote(string)); this.writer.write(':'); this.comma = false; this.mode = 'o'; return this; } catch (IOException e) { throw new JSONException(e); } } throw new JSONException("Misplaced key."); } /** * Begin appending a new object. All keys and values until the balancing * <code>endObject</code> will be appended to this object. The * <code>endObject</code> method must be called to mark the object's end. * @return this * @throws org.json.JSONException If the nesting is too deep, or if the object is * started in the wrong place (for example as a key or after the end of the * outermost array or object). */ public JSONWriter object() throws JSONException { if (this.mode == 'i') { this.mode = 'o'; } if (this.mode == 'o' || this.mode == 'a') { this.append("{"); this.push(new JSONObject()); this.comma = false; return this; } throw new JSONException("Misplaced object."); } /** * Pop an array or object scope. * @param c The scope to close. * @throws org.json.JSONException If nesting is wrong. */ private void pop(char c) throws JSONException { if (this.top <= 0) { throw new JSONException("Nesting error."); } char m = this.stack[this.top - 1] == null ? 'a' : 'k'; if (m != c) { throw new JSONException("Nesting error."); } this.top -= 1; this.mode = this.top == 0 ? 'd' : this.stack[this.top - 1] == null ? 'a' : 'k'; } /** * Push an array or object scope. * @param jo The scope to open. * @throws org.json.JSONException If nesting is too deep. */ private void push(JSONObject jo) throws JSONException { if (this.top >= maxdepth) { throw new JSONException("Nesting too deep."); } this.stack[this.top] = jo; this.mode = jo == null ? 'a' : 'k'; this.top += 1; } /** * Append either the value <code>true</code> or the value * <code>false</code>. * @param b A boolean. * @return this * @throws org.json.JSONException */ public JSONWriter value(boolean b) throws JSONException { return this.append(b ? "true" : "false"); } /** * Append a double value. * @param d A double. * @return this * @throws org.json.JSONException If the number is not finite. */ public JSONWriter value(double d) throws JSONException { return this.value(new Double(d)); } /** * Append a long value. * @param l A long. * @return this * @throws org.json.JSONException */ public JSONWriter value(long l) throws JSONException { return this.append(Long.toString(l)); } /** * Append an object value. * @param object The object to append. It can be null, or a Boolean, Number, * String, JSONObject, or JSONArray, or an object that implements JSONString. * @return this * @throws org.json.JSONException If the value is out of sequence. */ public JSONWriter value(Object object) throws JSONException { return this.append(JSONObject.valueToString(object)); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.functionscore; import org.apache.lucene.search.Explanation; import org.apache.lucene.util.English; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.lucene.search.function.CombineFunction; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.rescore.QueryRescoreMode; import org.elasticsearch.search.rescore.QueryRescorerBuilder; import org.elasticsearch.search.rescore.RescoreBuilder; import org.elasticsearch.test.ESIntegTestCase; import java.util.Arrays; import java.util.Comparator; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.lucene.search.function.CombineFunction.REPLACE; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.weightFactorFunction; import static org.elasticsearch.search.rescore.RescoreBuilder.queryRescorer; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFourthHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSecondHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThirdHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasScore; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; /** * */ public class QueryRescorerIT extends ESIntegTestCase { public void testEnforceWindowSize() { createIndex("test"); // this int iters = scaledRandomIntBetween(10, 20); for (int i = 0; i < iters; i ++) { client().prepareIndex("test", "type", Integer.toString(i)).setSource("f", Integer.toString(i)).execute().actionGet(); } ensureYellow(); refresh(); int numShards = getNumShards("test").numPrimaries; for (int j = 0 ; j < iters; j++) { SearchResponse searchResponse = client().prepareSearch() .setQuery(QueryBuilders.matchAllQuery()) .setRescorer(queryRescorer( QueryBuilders.functionScoreQuery(QueryBuilders.matchAllQuery(), ScoreFunctionBuilders.weightFactorFunction(100)).boostMode(CombineFunction.REPLACE)) .setQueryWeight(0.0f).setRescoreQueryWeight(1.0f), 1).setSize(randomIntBetween(2, 10)).execute() .actionGet(); assertSearchResponse(searchResponse); assertFirstHit(searchResponse, hasScore(100.f)); int numDocsWith100AsAScore = 0; for (int i = 0; i < searchResponse.getHits().hits().length; i++) { float score = searchResponse.getHits().hits()[i].getScore(); if (score == 100f) { numDocsWith100AsAScore += 1; } } // we cannot assert that they are equal since some shards might not have docs at all assertThat(numDocsWith100AsAScore, lessThanOrEqualTo(numShards)); } } public void testRescorePhrase() throws Exception { assertAcked(prepareCreate("test") .addMapping( "type1", jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("field1") .field("analyzer", "whitespace").field("type", "text").endObject().endObject().endObject().endObject()) .setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1))); client().prepareIndex("test", "type1", "1").setSource("field1", "the quick brown fox").execute().actionGet(); client().prepareIndex("test", "type1", "2").setSource("field1", "the quick lazy huge brown fox jumps over the tree ").get(); client().prepareIndex("test", "type1", "3") .setSource("field1", "quick huge brown", "field2", "the quick lazy huge brown fox jumps over the tree").get(); ensureYellow(); refresh(); SearchResponse searchResponse = client().prepareSearch() .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) .setRescorer( queryRescorer(QueryBuilders.matchPhraseQuery("field1", "quick brown").slop(2).boost(4.0f)) .setRescoreQueryWeight(2), 5).execute().actionGet(); assertThat(searchResponse.getHits().totalHits(), equalTo(3L)); assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1")); assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("3")); assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("2")); searchResponse = client().prepareSearch() .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) .setRescorer(queryRescorer(QueryBuilders.matchPhraseQuery("field1", "the quick brown").slop(3)), 5) .execute().actionGet(); assertHitCount(searchResponse, 3); assertFirstHit(searchResponse, hasId("1")); assertSecondHit(searchResponse, hasId("2")); assertThirdHit(searchResponse, hasId("3")); searchResponse = client().prepareSearch() .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) .setRescorer(queryRescorer((QueryBuilders.matchPhraseQuery("field1", "the quick brown"))), 5).execute() .actionGet(); assertHitCount(searchResponse, 3); assertFirstHit(searchResponse, hasId("1")); assertSecondHit(searchResponse, hasId("2")); assertThirdHit(searchResponse, hasId("3")); } public void testMoreDocs() throws Exception { Builder builder = Settings.builder(); builder.put("index.analysis.analyzer.synonym.tokenizer", "whitespace"); builder.putArray("index.analysis.analyzer.synonym.filter", "synonym", "lowercase"); builder.put("index.analysis.filter.synonym.type", "synonym"); builder.putArray("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street"); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("field1").field("type", "text").field("analyzer", "whitespace").field("search_analyzer", "synonym") .endObject().endObject().endObject().endObject(); assertAcked(client().admin().indices().prepareCreate("test").addMapping("type1", mapping) .setSettings(builder.put("index.number_of_shards", 1))); client().prepareIndex("test", "type1", "1").setSource("field1", "massachusetts avenue boston massachusetts").execute().actionGet(); client().prepareIndex("test", "type1", "2").setSource("field1", "lexington avenue boston massachusetts").execute().actionGet(); client().prepareIndex("test", "type1", "3").setSource("field1", "boston avenue lexington massachusetts").execute().actionGet(); client().admin().indices().prepareRefresh("test").execute().actionGet(); client().prepareIndex("test", "type1", "4").setSource("field1", "boston road lexington massachusetts").execute().actionGet(); client().prepareIndex("test", "type1", "5").setSource("field1", "lexington street lexington massachusetts").execute().actionGet(); client().prepareIndex("test", "type1", "6").setSource("field1", "massachusetts avenue lexington massachusetts").execute() .actionGet(); client().prepareIndex("test", "type1", "7").setSource("field1", "bosten street san franciso california").execute().actionGet(); client().admin().indices().prepareRefresh("test").execute().actionGet(); client().prepareIndex("test", "type1", "8").setSource("field1", "hollywood boulevard los angeles california").execute().actionGet(); client().prepareIndex("test", "type1", "9").setSource("field1", "1st street boston massachussetts").execute().actionGet(); client().prepareIndex("test", "type1", "10").setSource("field1", "1st street boston massachusetts").execute().actionGet(); client().admin().indices().prepareRefresh("test").execute().actionGet(); client().prepareIndex("test", "type1", "11").setSource("field1", "2st street boston massachusetts").execute().actionGet(); client().prepareIndex("test", "type1", "12").setSource("field1", "3st street boston massachusetts").execute().actionGet(); ensureYellow(); client().admin().indices().prepareRefresh("test").execute().actionGet(); SearchResponse searchResponse = client() .prepareSearch() .setQuery(QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(Operator.OR)) .setFrom(0) .setSize(5) .setRescorer(queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(0.6f).setRescoreQueryWeight(2.0f), 20).execute().actionGet(); assertThat(searchResponse.getHits().hits().length, equalTo(5)); assertHitCount(searchResponse, 9); assertFirstHit(searchResponse, hasId("2")); assertSecondHit(searchResponse, hasId("6")); assertThirdHit(searchResponse, hasId("3")); searchResponse = client() .prepareSearch() .setQuery(QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(Operator.OR)) .setFrom(0) .setSize(5) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setRescorer(queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(0.6f).setRescoreQueryWeight(2.0f), 20).execute().actionGet(); assertThat(searchResponse.getHits().hits().length, equalTo(5)); assertHitCount(searchResponse, 9); assertFirstHit(searchResponse, hasId("2")); assertSecondHit(searchResponse, hasId("6")); assertThirdHit(searchResponse, hasId("3")); // Make sure non-zero from works: searchResponse = client() .prepareSearch() .setQuery(QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(Operator.OR)) .setFrom(2) .setSize(5) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setRescorer(queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(0.6f).setRescoreQueryWeight(2.0f), 20).execute().actionGet(); assertThat(searchResponse.getHits().hits().length, equalTo(5)); assertHitCount(searchResponse, 9); assertFirstHit(searchResponse, hasId("3")); } // Tests a rescore window smaller than number of hits: public void testSmallRescoreWindow() throws Exception { Builder builder = Settings.builder(); builder.put("index.analysis.analyzer.synonym.tokenizer", "whitespace"); builder.putArray("index.analysis.analyzer.synonym.filter", "synonym", "lowercase"); builder.put("index.analysis.filter.synonym.type", "synonym"); builder.putArray("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street"); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("field1").field("type", "text").field("analyzer", "whitespace").field("search_analyzer", "synonym") .endObject().endObject().endObject().endObject(); assertAcked(client().admin().indices().prepareCreate("test").addMapping("type1", mapping) .setSettings(builder.put("index.number_of_shards", 1))); client().prepareIndex("test", "type1", "3").setSource("field1", "massachusetts").execute().actionGet(); client().prepareIndex("test", "type1", "6").setSource("field1", "massachusetts avenue lexington massachusetts").execute() .actionGet(); client().admin().indices().prepareRefresh("test").execute().actionGet(); client().prepareIndex("test", "type1", "1").setSource("field1", "lexington massachusetts avenue").execute().actionGet(); client().prepareIndex("test", "type1", "2").setSource("field1", "lexington avenue boston massachusetts road").execute().actionGet(); ensureYellow(); client().admin().indices().prepareRefresh("test").execute().actionGet(); SearchResponse searchResponse = client() .prepareSearch() .setQuery(QueryBuilders.matchQuery("field1", "massachusetts")) .setFrom(0) .setSize(5).execute().actionGet(); assertThat(searchResponse.getHits().hits().length, equalTo(4)); assertHitCount(searchResponse, 4); assertFirstHit(searchResponse, hasId("3")); assertSecondHit(searchResponse, hasId("6")); assertThirdHit(searchResponse, hasId("1")); assertFourthHit(searchResponse, hasId("2")); // Now, rescore only top 2 hits w/ proximity: searchResponse = client() .prepareSearch() .setQuery(QueryBuilders.matchQuery("field1", "massachusetts")) .setFrom(0) .setSize(5) .setRescorer(queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(0.6f).setRescoreQueryWeight(2.0f), 2).execute().actionGet(); // Only top 2 hits were re-ordered: assertThat(searchResponse.getHits().hits().length, equalTo(4)); assertHitCount(searchResponse, 4); assertFirstHit(searchResponse, hasId("6")); assertSecondHit(searchResponse, hasId("3")); assertThirdHit(searchResponse, hasId("1")); assertFourthHit(searchResponse, hasId("2")); // Now, rescore only top 3 hits w/ proximity: searchResponse = client() .prepareSearch() .setQuery(QueryBuilders.matchQuery("field1", "massachusetts")) .setFrom(0) .setSize(5) .setRescorer(queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(0.6f).setRescoreQueryWeight(2.0f), 3).execute().actionGet(); // Only top 3 hits were re-ordered: assertThat(searchResponse.getHits().hits().length, equalTo(4)); assertHitCount(searchResponse, 4); assertFirstHit(searchResponse, hasId("6")); assertSecondHit(searchResponse, hasId("1")); assertThirdHit(searchResponse, hasId("3")); assertFourthHit(searchResponse, hasId("2")); } // Tests a rescorer that penalizes the scores: public void testRescorerMadeScoresWorse() throws Exception { Builder builder = Settings.builder(); builder.put("index.analysis.analyzer.synonym.tokenizer", "whitespace"); builder.putArray("index.analysis.analyzer.synonym.filter", "synonym", "lowercase"); builder.put("index.analysis.filter.synonym.type", "synonym"); builder.putArray("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street"); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("field1").field("type", "text").field("analyzer", "whitespace").field("search_analyzer", "synonym") .endObject().endObject().endObject().endObject(); assertAcked(client().admin().indices().prepareCreate("test").addMapping("type1", mapping) .setSettings(builder.put("index.number_of_shards", 1))); client().prepareIndex("test", "type1", "3").setSource("field1", "massachusetts").execute().actionGet(); client().prepareIndex("test", "type1", "6").setSource("field1", "massachusetts avenue lexington massachusetts").execute() .actionGet(); client().admin().indices().prepareRefresh("test").execute().actionGet(); client().prepareIndex("test", "type1", "1").setSource("field1", "lexington massachusetts avenue").execute().actionGet(); client().prepareIndex("test", "type1", "2").setSource("field1", "lexington avenue boston massachusetts road").execute().actionGet(); ensureYellow(); client().admin().indices().prepareRefresh("test").execute().actionGet(); SearchResponse searchResponse = client() .prepareSearch() .setQuery(QueryBuilders.matchQuery("field1", "massachusetts").operator(Operator.OR)) .setFrom(0) .setSize(5).execute().actionGet(); assertThat(searchResponse.getHits().hits().length, equalTo(4)); assertHitCount(searchResponse, 4); assertFirstHit(searchResponse, hasId("3")); assertSecondHit(searchResponse, hasId("6")); assertThirdHit(searchResponse, hasId("1")); assertFourthHit(searchResponse, hasId("2")); // Now, penalizing rescore (nothing matches the rescore query): searchResponse = client() .prepareSearch() .setQuery(QueryBuilders.matchQuery("field1", "massachusetts").operator(Operator.OR)) .setFrom(0) .setSize(5) .setRescorer(queryRescorer(QueryBuilders.matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)) .setQueryWeight(1.0f).setRescoreQueryWeight(-1f), 3).execute().actionGet(); // 6 and 1 got worse, and then the hit (2) outside the rescore window were sorted ahead: assertFirstHit(searchResponse, hasId("3")); assertSecondHit(searchResponse, hasId("2")); assertThirdHit(searchResponse, hasId("6")); assertFourthHit(searchResponse, hasId("1")); } // Comparator that sorts hits and rescored hits in the same way. // The rescore uses the docId as tie, while regular search uses the slot the hit is in as a tie if score // and shard id are equal during merging shard results. // This comparator uses a custom tie in case the scores are equal, so that both regular hits and rescored hits // are sorted equally. This is fine since tests only care about the fact the scores should be equal, not ordering. private final static Comparator<SearchHit> searchHitsComparator = new Comparator<SearchHit>() { @Override public int compare(SearchHit hit1, SearchHit hit2) { int cmp = Float.compare(hit2.getScore(), hit1.getScore()); if (cmp == 0) { return hit1.id().compareTo(hit2.id()); } else { return cmp; } } }; private static void assertEquivalent(String query, SearchResponse plain, SearchResponse rescored) { assertNoFailures(plain); assertNoFailures(rescored); SearchHits leftHits = plain.getHits(); SearchHits rightHits = rescored.getHits(); assertThat(leftHits.getTotalHits(), equalTo(rightHits.getTotalHits())); assertThat(leftHits.getHits().length, equalTo(rightHits.getHits().length)); SearchHit[] hits = leftHits.getHits(); SearchHit[] rHits = rightHits.getHits(); Arrays.sort(hits, searchHitsComparator); Arrays.sort(rHits, searchHitsComparator); for (int i = 0; i < hits.length; i++) { assertThat("query: " + query, hits[i].getScore(), equalTo(rHits[i].getScore())); } for (int i = 0; i < hits.length; i++) { if (hits[i].getScore() == hits[hits.length-1].getScore()) { return; // we need to cut off here since this is the tail of the queue and we might not have fetched enough docs } assertThat("query: " + query,hits[i].getId(), equalTo(rHits[i].getId())); } } private static void assertEquivalentOrSubstringMatch(String query, SearchResponse plain, SearchResponse rescored) { assertNoFailures(plain); assertNoFailures(rescored); SearchHits leftHits = plain.getHits(); SearchHits rightHits = rescored.getHits(); assertThat(leftHits.getTotalHits(), equalTo(rightHits.getTotalHits())); assertThat(leftHits.getHits().length, equalTo(rightHits.getHits().length)); SearchHit[] hits = leftHits.getHits(); SearchHit[] otherHits = rightHits.getHits(); if (!hits[0].getId().equals(otherHits[0].getId())) { assertThat(((String) otherHits[0].sourceAsMap().get("field1")).contains(query), equalTo(true)); } else { Arrays.sort(hits, searchHitsComparator); Arrays.sort(otherHits, searchHitsComparator); for (int i = 0; i < hits.length; i++) { if (hits[i].getScore() == hits[hits.length-1].getScore()) { return; // we need to cut off here since this is the tail of the queue and we might not have fetched enough docs } assertThat(query, hits[i].getId(), equalTo(rightHits.getHits()[i].getId())); } } } // forces QUERY_THEN_FETCH because of https://github.com/elastic/elasticsearch/issues/4829 public void testEquivalence() throws Exception { // no dummy docs since merges can change scores while we run queries. int numDocs = indexRandomNumbers("whitespace", -1, false); final int iters = scaledRandomIntBetween(50, 100); for (int i = 0; i < iters; i++) { int resultSize = numDocs; int rescoreWindow = between(1, 3) * resultSize; String intToEnglish = English.intToEnglish(between(0, numDocs-1)); String query = intToEnglish.split(" ")[0]; SearchResponse rescored = client() .prepareSearch() .setSearchType(SearchType.QUERY_THEN_FETCH) .setPreference("test") // ensure we hit the same shards for tie-breaking .setQuery(QueryBuilders.matchQuery("field1", query).operator(Operator.OR)) .setFrom(0) .setSize(resultSize) .setRescorer(queryRescorer(constantScoreQuery(QueryBuilders.matchPhraseQuery("field1", intToEnglish).slop(3))) .setQueryWeight(1.0f) // no weight - so we basically use the same score as the actual query .setRescoreQueryWeight(0.0f), rescoreWindow) .execute().actionGet(); SearchResponse plain = client().prepareSearch() .setSearchType(SearchType.QUERY_THEN_FETCH) .setPreference("test") // ensure we hit the same shards for tie-breaking .setQuery(QueryBuilders.matchQuery("field1", query).operator(Operator.OR)).setFrom(0).setSize(resultSize) .execute().actionGet(); // check equivalence assertEquivalent(query, plain, rescored); rescored = client() .prepareSearch() .setSearchType(SearchType.QUERY_THEN_FETCH) .setPreference("test") // ensure we hit the same shards for tie-breaking .setQuery(QueryBuilders.matchQuery("field1", query).operator(Operator.OR)) .setFrom(0) .setSize(resultSize) .setRescorer(queryRescorer(constantScoreQuery(matchPhraseQuery("field1", "not in the index").slop(3))) .setQueryWeight(1.0f) .setRescoreQueryWeight(1.0f), rescoreWindow).execute() .actionGet(); // check equivalence assertEquivalent(query, plain, rescored); rescored = client() .prepareSearch() .setSearchType(SearchType.QUERY_THEN_FETCH) .setPreference("test") // ensure we hit the same shards for tie-breaking .setQuery(QueryBuilders.matchQuery("field1", query).operator(Operator.OR)) .setFrom(0) .setSize(resultSize) .setRescorer(queryRescorer(matchPhraseQuery("field1", intToEnglish).slop(0)) .setQueryWeight(1.0f).setRescoreQueryWeight(1.0f), 2 * rescoreWindow).execute().actionGet(); // check equivalence or if the first match differs we check if the phrase is a substring of the top doc assertEquivalentOrSubstringMatch(intToEnglish, plain, rescored); } } public void testExplain() throws Exception { assertAcked(prepareCreate("test") .addMapping( "type1", jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("field1") .field("analyzer", "whitespace").field("type", "text").endObject().endObject().endObject().endObject()) ); ensureGreen(); client().prepareIndex("test", "type1", "1").setSource("field1", "the quick brown fox").execute().actionGet(); client().prepareIndex("test", "type1", "2").setSource("field1", "the quick lazy huge brown fox jumps over the tree").execute() .actionGet(); client().prepareIndex("test", "type1", "3") .setSource("field1", "quick huge brown", "field2", "the quick lazy huge brown fox jumps over the tree").execute() .actionGet(); ensureYellow(); refresh(); { SearchResponse searchResponse = client() .prepareSearch() .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) .setRescorer(queryRescorer(matchPhraseQuery("field1", "the quick brown").slop(2).boost(4.0f)) .setQueryWeight(0.5f).setRescoreQueryWeight(0.4f), 5).setExplain(true).execute() .actionGet(); assertHitCount(searchResponse, 3); assertFirstHit(searchResponse, hasId("1")); assertSecondHit(searchResponse, hasId("2")); assertThirdHit(searchResponse, hasId("3")); for (int i = 0; i < 3; i++) { assertThat(searchResponse.getHits().getAt(i).explanation(), notNullValue()); assertThat(searchResponse.getHits().getAt(i).explanation().isMatch(), equalTo(true)); assertThat(searchResponse.getHits().getAt(i).explanation().getDetails().length, equalTo(2)); assertThat(searchResponse.getHits().getAt(i).explanation().getDetails()[0].isMatch(), equalTo(true)); if (i == 2) { assertThat(searchResponse.getHits().getAt(i).explanation().getDetails()[1].getValue(), equalTo(0.5f)); } else { assertThat(searchResponse.getHits().getAt(i).explanation().getDescription(), equalTo("sum of:")); assertThat(searchResponse.getHits().getAt(i).explanation().getDetails()[0].getDetails()[1].getValue(), equalTo(0.5f)); assertThat(searchResponse.getHits().getAt(i).explanation().getDetails()[1].getDetails()[1].getValue(), equalTo(0.4f)); } } } String[] scoreModes = new String[]{ "max", "min", "avg", "total", "multiply", "" }; String[] descriptionModes = new String[]{ "max of:", "min of:", "avg of:", "sum of:", "product of:", "sum of:" }; for (int innerMode = 0; innerMode < scoreModes.length; innerMode++) { QueryRescorerBuilder innerRescoreQuery = queryRescorer(QueryBuilders.matchQuery("field1", "the quick brown").boost(4.0f)) .setQueryWeight(0.5f).setRescoreQueryWeight(0.4f); if (!"".equals(scoreModes[innerMode])) { innerRescoreQuery.setScoreMode(QueryRescoreMode.fromString(scoreModes[innerMode])); } SearchResponse searchResponse = client() .prepareSearch() .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) .setRescorer(innerRescoreQuery, 5).setExplain(true).execute() .actionGet(); assertHitCount(searchResponse, 3); assertFirstHit(searchResponse, hasId("1")); assertSecondHit(searchResponse, hasId("2")); assertThirdHit(searchResponse, hasId("3")); for (int j = 0; j < 3; j++) { assertThat(searchResponse.getHits().getAt(j).explanation().getDescription(), equalTo(descriptionModes[innerMode])); } for (int outerMode = 0; outerMode < scoreModes.length; outerMode++) { QueryRescorerBuilder outerRescoreQuery = queryRescorer(QueryBuilders.matchQuery("field1", "the quick brown").boost(4.0f)) .setQueryWeight(0.5f).setRescoreQueryWeight(0.4f); if (!"".equals(scoreModes[outerMode])) { outerRescoreQuery.setScoreMode(QueryRescoreMode.fromString(scoreModes[outerMode])); } searchResponse = client() .prepareSearch() .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) .addRescorer(innerRescoreQuery, 5).addRescorer(outerRescoreQuery.windowSize(10)) .setExplain(true).get(); assertHitCount(searchResponse, 3); assertFirstHit(searchResponse, hasId("1")); assertSecondHit(searchResponse, hasId("2")); assertThirdHit(searchResponse, hasId("3")); for (int j = 0; j < 3; j++) { Explanation explanation = searchResponse.getHits().getAt(j).explanation(); assertThat(explanation.getDescription(), equalTo(descriptionModes[outerMode])); assertThat(explanation.getDetails()[0].getDetails()[0].getDescription(), equalTo(descriptionModes[innerMode])); } } } } public void testScoring() throws Exception { int numDocs = indexRandomNumbers("keyword"); String[] scoreModes = new String[]{ "max", "min", "avg", "total", "multiply", "" }; float primaryWeight = 1.1f; float secondaryWeight = 1.6f; for (String scoreMode : scoreModes) { for (int i = 0; i < numDocs - 4; i++) { String[] intToEnglish = new String[] { English.intToEnglish(i), English.intToEnglish(i + 1), English.intToEnglish(i + 2), English.intToEnglish(i + 3) }; QueryBuilder query = boolQuery().disableCoord(true) .should(functionScoreQuery(termQuery("field1", intToEnglish[0]), weightFactorFunction(2.0f)).boostMode(REPLACE)) .should(functionScoreQuery(termQuery("field1", intToEnglish[1]), weightFactorFunction(3.0f)).boostMode(REPLACE)) .should(functionScoreQuery(termQuery("field1", intToEnglish[2]), weightFactorFunction(5.0f)).boostMode(REPLACE)) .should(functionScoreQuery(termQuery("field1", intToEnglish[3]), weightFactorFunction(0.2f)).boostMode(REPLACE)); QueryRescorerBuilder rescoreQuery = queryRescorer(boolQuery().disableCoord(true) .should(functionScoreQuery(termQuery("field1", intToEnglish[0]), weightFactorFunction(5.0f)).boostMode(REPLACE)) .should(functionScoreQuery(termQuery("field1", intToEnglish[1]), weightFactorFunction(7.0f)).boostMode(REPLACE)) .should(functionScoreQuery(termQuery("field1", intToEnglish[3]), weightFactorFunction(0.0f)).boostMode(REPLACE))); rescoreQuery.setQueryWeight(primaryWeight).setRescoreQueryWeight(secondaryWeight); if (!"".equals(scoreMode)) { rescoreQuery.setScoreMode(QueryRescoreMode.fromString(scoreMode)); } SearchResponse rescored = client().prepareSearch() .setPreference("test") // ensure we hit the same shards for tie-breaking .setFrom(0).setSize(10).setQuery(query).setRescorer(rescoreQuery, 50).get(); assertHitCount(rescored, 4); if ("total".equals(scoreMode) || "".equals(scoreMode)) { assertFirstHit(rescored, hasId(String.valueOf(i + 1))); assertSecondHit(rescored, hasId(String.valueOf(i))); assertThirdHit(rescored, hasId(String.valueOf(i + 2))); assertThat(rescored.getHits().getHits()[0].getScore(), equalTo(3.0f * primaryWeight + 7.0f * secondaryWeight)); assertThat(rescored.getHits().getHits()[1].getScore(), equalTo(2.0f * primaryWeight + 5.0f * secondaryWeight)); assertThat(rescored.getHits().getHits()[2].getScore(), equalTo(5.0f * primaryWeight)); assertThat(rescored.getHits().getHits()[3].getScore(), equalTo(0.2f * primaryWeight + 0.0f * secondaryWeight)); } else if ("max".equals(scoreMode)) { assertFirstHit(rescored, hasId(String.valueOf(i + 1))); assertSecondHit(rescored, hasId(String.valueOf(i))); assertThirdHit(rescored, hasId(String.valueOf(i + 2))); assertThat(rescored.getHits().getHits()[0].getScore(), equalTo(7.0f * secondaryWeight)); assertThat(rescored.getHits().getHits()[1].getScore(), equalTo(5.0f * secondaryWeight)); assertThat(rescored.getHits().getHits()[2].getScore(), equalTo(5.0f * primaryWeight)); assertThat(rescored.getHits().getHits()[3].getScore(), equalTo(0.2f * primaryWeight)); } else if ("min".equals(scoreMode)) { assertFirstHit(rescored, hasId(String.valueOf(i + 2))); assertSecondHit(rescored, hasId(String.valueOf(i + 1))); assertThirdHit(rescored, hasId(String.valueOf(i))); assertThat(rescored.getHits().getHits()[0].getScore(), equalTo(5.0f * primaryWeight)); assertThat(rescored.getHits().getHits()[1].getScore(), equalTo(3.0f * primaryWeight)); assertThat(rescored.getHits().getHits()[2].getScore(), equalTo(2.0f * primaryWeight)); assertThat(rescored.getHits().getHits()[3].getScore(), equalTo(0.0f * secondaryWeight)); } else if ("avg".equals(scoreMode)) { assertFirstHit(rescored, hasId(String.valueOf(i + 1))); assertSecondHit(rescored, hasId(String.valueOf(i + 2))); assertThirdHit(rescored, hasId(String.valueOf(i))); assertThat(rescored.getHits().getHits()[0].getScore(), equalTo((3.0f * primaryWeight + 7.0f * secondaryWeight) / 2.0f)); assertThat(rescored.getHits().getHits()[1].getScore(), equalTo(5.0f * primaryWeight)); assertThat(rescored.getHits().getHits()[2].getScore(), equalTo((2.0f * primaryWeight + 5.0f * secondaryWeight) / 2.0f)); assertThat(rescored.getHits().getHits()[3].getScore(), equalTo((0.2f * primaryWeight) / 2.0f)); } else if ("multiply".equals(scoreMode)) { assertFirstHit(rescored, hasId(String.valueOf(i + 1))); assertSecondHit(rescored, hasId(String.valueOf(i))); assertThirdHit(rescored, hasId(String.valueOf(i + 2))); assertThat(rescored.getHits().getHits()[0].getScore(), equalTo(3.0f * primaryWeight * 7.0f * secondaryWeight)); assertThat(rescored.getHits().getHits()[1].getScore(), equalTo(2.0f * primaryWeight * 5.0f * secondaryWeight)); assertThat(rescored.getHits().getHits()[2].getScore(), equalTo(5.0f * primaryWeight)); assertThat(rescored.getHits().getHits()[3].getScore(), equalTo(0.2f * primaryWeight * 0.0f * secondaryWeight)); } } } } public void testMultipleRescores() throws Exception { int numDocs = indexRandomNumbers("keyword", 1, true); QueryRescorerBuilder eightIsGreat = RescoreBuilder .queryRescorer(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", English.intToEnglish(8)), ScoreFunctionBuilders.weightFactorFunction(1000.0f)).boostMode(CombineFunction.REPLACE)) .setScoreMode(QueryRescoreMode.Total); QueryRescorerBuilder sevenIsBetter = RescoreBuilder .queryRescorer(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", English.intToEnglish(7)), ScoreFunctionBuilders.weightFactorFunction(10000.0f)).boostMode(CombineFunction.REPLACE)) .setScoreMode(QueryRescoreMode.Total); // First set the rescore window large enough that both rescores take effect SearchRequestBuilder request = client().prepareSearch(); request.addRescorer(eightIsGreat, numDocs).addRescorer(sevenIsBetter, numDocs); SearchResponse response = request.get(); assertFirstHit(response, hasId("7")); assertSecondHit(response, hasId("8")); // Now squash the second rescore window so it never gets to see a seven response = request.setSize(1).clearRescorers().addRescorer(eightIsGreat, numDocs).addRescorer(sevenIsBetter, 1).get(); assertFirstHit(response, hasId("8")); // We have no idea what the second hit will be because we didn't get a chance to look for seven // Now use one rescore to drag the number we're looking for into the window of another QueryRescorerBuilder ninetyIsGood = RescoreBuilder.queryRescorer(QueryBuilders .functionScoreQuery(QueryBuilders.queryStringQuery("*ninety*"), ScoreFunctionBuilders.weightFactorFunction(1000.0f)) .boostMode(CombineFunction.REPLACE)).setScoreMode(QueryRescoreMode.Total); QueryRescorerBuilder oneToo = RescoreBuilder.queryRescorer(QueryBuilders .functionScoreQuery(QueryBuilders.queryStringQuery("*one*"), ScoreFunctionBuilders.weightFactorFunction(1000.0f)) .boostMode(CombineFunction.REPLACE)).setScoreMode(QueryRescoreMode.Total); request.clearRescorers().addRescorer(ninetyIsGood, numDocs).addRescorer(oneToo, 10); response = request.setSize(2).get(); assertFirstHit(response, hasId("91")); assertFirstHit(response, hasScore(2001.0f)); assertSecondHit(response, hasScore(1001.0f)); // Not sure which one it is but it is ninety something } private int indexRandomNumbers(String analyzer) throws Exception { return indexRandomNumbers(analyzer, -1, true); } private int indexRandomNumbers(String analyzer, int shards, boolean dummyDocs) throws Exception { Builder builder = Settings.builder().put(indexSettings()); if (shards > 0) { builder.put(SETTING_NUMBER_OF_SHARDS, shards); } assertAcked(prepareCreate("test") .addMapping( "type1", jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("field1") .field("analyzer", analyzer).field("type", "text").endObject().endObject().endObject().endObject()) .setSettings(builder)); int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i)); } indexRandom(true, dummyDocs, docs); ensureGreen(); return numDocs; } // #11277 public void testFromSize() throws Exception { Builder settings = Settings.builder(); settings.put(SETTING_NUMBER_OF_SHARDS, 1); settings.put(SETTING_NUMBER_OF_REPLICAS, 0); assertAcked(prepareCreate("test").setSettings(settings)); for(int i=0;i<5;i++) { client().prepareIndex("test", "type", ""+i).setSource("text", "hello world").get(); } refresh(); SearchRequestBuilder request = client().prepareSearch(); request.setQuery(QueryBuilders.termQuery("text", "hello")); request.setFrom(1); request.setSize(4); request.addRescorer(RescoreBuilder.queryRescorer(QueryBuilders.matchAllQuery()), 50); assertEquals(4, request.get().getHits().hits().length); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v10/enums/lead_form_desired_intent.proto package com.google.ads.googleads.v10.enums; /** * <pre> * Describes the desired level of intent of generated leads. * </pre> * * Protobuf type {@code google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum} */ public final class LeadFormDesiredIntentEnum extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum) LeadFormDesiredIntentEnumOrBuilder { private static final long serialVersionUID = 0L; // Use LeadFormDesiredIntentEnum.newBuilder() to construct. private LeadFormDesiredIntentEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private LeadFormDesiredIntentEnum() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new LeadFormDesiredIntentEnum(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private LeadFormDesiredIntentEnum( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v10.enums.LeadFormDesiredIntentProto.internal_static_google_ads_googleads_v10_enums_LeadFormDesiredIntentEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v10.enums.LeadFormDesiredIntentProto.internal_static_google_ads_googleads_v10_enums_LeadFormDesiredIntentEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum.class, com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum.Builder.class); } /** * <pre> * Enum describing the desired level of intent of generated leads. * </pre> * * Protobuf enum {@code google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum.LeadFormDesiredIntent} */ public enum LeadFormDesiredIntent implements com.google.protobuf.ProtocolMessageEnum { /** * <pre> * Not specified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ UNSPECIFIED(0), /** * <pre> * Used for return value only. Represents value unknown in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ UNKNOWN(1), /** * <pre> * Deliver more leads at a potentially lower quality. * </pre> * * <code>LOW_INTENT = 2;</code> */ LOW_INTENT(2), /** * <pre> * Deliver leads that are more qualified. * </pre> * * <code>HIGH_INTENT = 3;</code> */ HIGH_INTENT(3), UNRECOGNIZED(-1), ; /** * <pre> * Not specified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ public static final int UNSPECIFIED_VALUE = 0; /** * <pre> * Used for return value only. Represents value unknown in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ public static final int UNKNOWN_VALUE = 1; /** * <pre> * Deliver more leads at a potentially lower quality. * </pre> * * <code>LOW_INTENT = 2;</code> */ public static final int LOW_INTENT_VALUE = 2; /** * <pre> * Deliver leads that are more qualified. * </pre> * * <code>HIGH_INTENT = 3;</code> */ public static final int HIGH_INTENT_VALUE = 3; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static LeadFormDesiredIntent valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static LeadFormDesiredIntent forNumber(int value) { switch (value) { case 0: return UNSPECIFIED; case 1: return UNKNOWN; case 2: return LOW_INTENT; case 3: return HIGH_INTENT; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<LeadFormDesiredIntent> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap< LeadFormDesiredIntent> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<LeadFormDesiredIntent>() { public LeadFormDesiredIntent findValueByNumber(int number) { return LeadFormDesiredIntent.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum.getDescriptor().getEnumTypes().get(0); } private static final LeadFormDesiredIntent[] VALUES = values(); public static LeadFormDesiredIntent valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private LeadFormDesiredIntent(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum.LeadFormDesiredIntent) } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum)) { return super.equals(obj); } com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum other = (com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Describes the desired level of intent of generated leads. * </pre> * * Protobuf type {@code google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum) com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnumOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v10.enums.LeadFormDesiredIntentProto.internal_static_google_ads_googleads_v10_enums_LeadFormDesiredIntentEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v10.enums.LeadFormDesiredIntentProto.internal_static_google_ads_googleads_v10_enums_LeadFormDesiredIntentEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum.class, com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum.Builder.class); } // Construct using com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v10.enums.LeadFormDesiredIntentProto.internal_static_google_ads_googleads_v10_enums_LeadFormDesiredIntentEnum_descriptor; } @java.lang.Override public com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum getDefaultInstanceForType() { return com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum build() { com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum buildPartial() { com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum result = new com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum) { return mergeFrom((com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum other) { if (other == com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum) private static final com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum(); } public static com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<LeadFormDesiredIntentEnum> PARSER = new com.google.protobuf.AbstractParser<LeadFormDesiredIntentEnum>() { @java.lang.Override public LeadFormDesiredIntentEnum parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new LeadFormDesiredIntentEnum(input, extensionRegistry); } }; public static com.google.protobuf.Parser<LeadFormDesiredIntentEnum> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<LeadFormDesiredIntentEnum> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v10.enums.LeadFormDesiredIntentEnum getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * JBoss, Home of Professional Open Source * Copyright 2014, Red Hat, Inc. and/or its affiliates, and individual * contributors by the @authors tag. See the copyright.txt in the * distribution for a full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.aerogear.unifiedpush.quickstart.util; import android.util.Base64; import android.util.Log; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.reflect.TypeToken; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.util.EntityUtils; import org.jboss.aerogear.unifiedpush.quickstart.Constants; import org.jboss.aerogear.unifiedpush.quickstart.model.Contact; import org.jboss.aerogear.unifiedpush.quickstart.model.User; import java.util.ArrayList; import java.util.List; import java.util.Map; public final class WebClient { private final static String TAG = WebClient.class.getName(); private final static DefaultHttpClient httpClient; static { httpClient = new DefaultHttpClient(); } public boolean register(User user) { try { String registerURL = Constants.BASE_URL + "/rest/security/registration"; HttpPost post = new HttpPost(registerURL); post.setEntity(new StringEntity(new Gson().toJson(user))); post.setHeader("Accept", "application/json"); post.setHeader("Content-type", "application/json"); httpClient.execute(post); return true; } catch (Exception e) { Log.e(TAG, e.getMessage()); return false; } } public User authenticate(String username, String password) { try { String loginURL = Constants.BASE_URL + "/rest/security/user/info"; String credentials = username + ":" + password; String base64EncodedCredentials = Base64.encodeToString(credentials.getBytes(), Base64.NO_WRAP); HttpGet get = new HttpGet(loginURL); get.setHeader("Authorization", "Basic " + base64EncodedCredentials); get.setHeader("Accept", "application/json"); get.setHeader("Content-type", "application/json"); HttpResponse response = httpClient.execute(get); if (isStatusCodeOk(response)) { String responseData = EntityUtils.toString(response.getEntity()); Gson gson = new GsonBuilder().create(); Map<String, Object> rootNode = gson.fromJson(responseData, Map.class); String innerJson = gson.toJson(rootNode.get("account")); return gson.fromJson(innerJson, User.class); } else { return null; } } catch (Exception e) { Log.e(TAG, e.getMessage()); return null; } } public void logout() { try { String logoutURL = Constants.BASE_URL + "rest/security/logout"; HttpPost post = new HttpPost(logoutURL); post.setHeader("Accept", "application/json"); post.setHeader("Content-type", "application/json"); httpClient.execute(post); } catch (Exception e) { Log.e(TAG, e.getMessage()); } } public List<Contact> contacts() { try { String contactsURL = Constants.BASE_URL + "/rest/contacts"; HttpGet get = new HttpGet(contactsURL); get.setHeader("Accept", "application/json"); get.setHeader("Content-type", "application/json"); HttpResponse response = httpClient.execute(get); String responseData = EntityUtils.toString(response.getEntity()); return new Gson().fromJson(responseData, new TypeToken<List<Contact>>() { }.getType()); } catch (Exception e) { Log.e(TAG, e.getMessage()); return new ArrayList<Contact>(); } } public Boolean saveContact(Contact contact) { if (contact.getId() != null) { return updateContact(contact); } else { return newContact(contact); } } private Boolean newContact(Contact contact) { try { String contactsURL = Constants.BASE_URL + "/rest/contacts"; HttpPost post = new HttpPost(contactsURL); post.setEntity(new StringEntity(new Gson().toJson(contact))); post.setHeader("Accept", "application/json"); post.setHeader("Content-type", "application/json"); HttpResponse response = httpClient.execute(post); if (isStatusCodeOk(response)) { return true; } else { return false; } } catch (Exception e) { Log.e(TAG, e.getMessage()); return false; } } private Boolean updateContact(Contact contact) { try { String updateURL = Constants.BASE_URL + "/rest/contacts/" + String.valueOf(contact.getId()); HttpPut put = new HttpPut(updateURL); put.setEntity(new StringEntity(new Gson().toJson(contact))); put.setHeader("Accept", "application/json"); put.setHeader("Content-type", "application/json"); HttpResponse response = httpClient.execute(put); if (isStatusCodeOk(response)) { return true; } else { return false; } } catch (Exception e) { Log.e(TAG, e.getMessage()); return false; } } public boolean delete(Contact contact) { try { String deleteURL = Constants.BASE_URL + "/rest/contacts/" + String.valueOf(contact.getId()); HttpDelete delete = new HttpDelete(deleteURL); delete.setHeader("Accept", "application/json"); delete.setHeader("Content-type", "application/json"); HttpResponse response = httpClient.execute(delete); if (isStatusCodeOk(response)) { return true; } else { return false; } } catch (Exception e) { Log.e(TAG, e.getMessage()); return false; } } private boolean isStatusCodeOk(HttpResponse response) { return ((response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) || (response.getStatusLine().getStatusCode() == HttpStatus.SC_NO_CONTENT)); } }
package com.mcgowan.timetable.android; import android.app.ProgressDialog; import android.content.BroadcastReceiver; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.IntentFilter; import android.os.Bundle; import android.support.design.widget.NavigationView; import android.support.design.widget.Snackbar; import android.support.design.widget.TabLayout; import android.support.v4.view.GravityCompat; import android.support.v4.view.ViewPager; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.app.AlertDialog; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.TextView; import com.mcgowan.timetable.android.sync.TimetableSyncAdapter; import com.mcgowan.timetable.android.utility.Utility; public class MainActivity extends AppCompatActivity implements NavigationView.OnNavigationItemSelectedListener { public static final String TIMETABLE_URL = "https://itsligo.ie/student-hub/my-timetable/"; public static final String LABS_URL = "https://itsligo.ie/student-hub/computer-labs/"; public static final String LOG_TAG = MainActivity.class.getSimpleName(); public static final String SYNC_UPDATE = "SYNC_STATUS"; public static final int NEW_INSTALL = 1; public static final int UPDATE_VERSION = 2; private SyncReceiver mSyncReciever; private ProgressDialog mProgress; private IntentFilter mSyncFilter; private TabPagesAdapter mTabsPagesAdapter; private ViewPager mViewPager; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); switch (AppVersionCheck.checkAppStart(this)) { // case NORMAL: // // No need to do anything // break; case FIRST_TIME_VERSION: displayUpdateMessage(UPDATE_VERSION); break; case FIRST_TIME: displayUpdateMessage(NEW_INSTALL); break; default: break; } TimetableSyncAdapter.initializeSyncAdapter(this); DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout); ActionBarDrawerToggle toggle = new ActionBarDrawerToggle( this, drawer, toolbar, R.string.navigation_drawer_open, R.string.navigation_drawer_close); drawer.setDrawerListener(toggle); toggle.syncState(); NavigationView navigationView = (NavigationView) findViewById(R.id.nav_view); navigationView.setNavigationItemSelectedListener(this); mSyncReciever = new SyncReceiver(); mSyncFilter = new IntentFilter(TimetableSyncAdapter.INTENT_SYNC_ACTION); registerReceiver(mSyncReciever, mSyncFilter); } @Override protected void onResume() { super.onResume(); String studentId = Utility.getStudentId(this); if (studentId.equals("")) { showNoStudentIdDialog(); } else { mTabsPagesAdapter = new TabPagesAdapter(getSupportFragmentManager(), this); mViewPager = (ViewPager) findViewById(R.id.container); mViewPager.setAdapter(mTabsPagesAdapter); TabLayout tabLayout = (TabLayout) findViewById(R.id.tabbar); tabLayout.setupWithViewPager(mViewPager); } registerReceiver(mSyncReciever, mSyncFilter); } @Override protected void onPause() { super.onPause(); unregisterReceiver(mSyncReciever); } @Override public boolean onCreateOptionsMenu(Menu menu) { getSupportActionBar().setDisplayShowHomeEnabled(true); getSupportActionBar().setDisplayShowTitleEnabled(false); return true; } /** * launches settings as an intent * * @return */ public boolean openSettingsDetail() { Intent settingsIntent = new Intent(this, SettingsActivity.class); startActivity(settingsIntent); return true; } /** * No Student ID set dialog launcher */ private void showNoStudentIdDialog() { LayoutInflater inflater = LayoutInflater.from(this); View view = inflater.inflate(R.layout.dialog_main, null); AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setView(view).setTitle(getString(R.string.no_id_dialog_title)); builder.setCancelable(false); builder.setPositiveButton(getString(R.string.ok), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { openSettingsDetail(); } }); builder.create().show(); } @Override public boolean onNavigationItemSelected(MenuItem menuItem) { DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout); int id = menuItem.getItemId(); switch (id) { case R.id.nav_settings_general: drawer.closeDrawer(GravityCompat.START); launchSettingsActivity(); break; case R.id.nav_settings_about: drawer.closeDrawer(GravityCompat.START); launchAboutActivity(); break; case R.id.nav_settings_version: displayVersion(); break; case R.id.action_refresh: TimetableSyncAdapter.syncImmediately(this); drawer.closeDrawer(GravityCompat.START); break; default: drawer.closeDrawer(GravityCompat.START); } return true; } private void launchSettingsActivity() { Intent intent = new Intent(this, SettingsActivity.class); startActivity(intent); } private void launchAboutActivity() { Intent intent = new Intent(this, AboutActivity.class); startActivity(intent); } private void displayVersion() { LayoutInflater inflater = LayoutInflater.from(this); View view = inflater.inflate(R.layout.dialog_main, null); AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setView(view).setTitle(getString(R.string.app_version_title)); TextView content = (TextView) view.findViewById(R.id.dialog_main_text_view); content.setText(BuildConfig.VERSION_NAME); builder.setPositiveButton(getString(R.string.ok), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.dismiss(); } }); builder.create().show(); } private void displayLoading(String msg) { mProgress = ProgressDialog.show(this, "", msg, true); } private void dismissLoading(String status) { if (mProgress != null && mProgress.isShowing()) mProgress.dismiss(); Snackbar.make(findViewById(R.id.drawer_layout), status, Snackbar.LENGTH_LONG).show(); } private void displayUpdateMessage(int state) { LayoutInflater inflater = LayoutInflater.from(this); View view = inflater.inflate(R.layout.dialog_main, null); AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setView(view).setTitle(getString(R.string.welcome_title)); TextView content = (TextView) view.findViewById(R.id.dialog_main_text_view); switch (state){ case NEW_INSTALL: content.setText(R.string.welcome_message); break; case UPDATE_VERSION: content.setText(R.string.update_message); break; default: content.setText(R.string.welcome_message); break; } builder.setPositiveButton(getString(R.string.ok), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.dismiss(); } }); builder.create().show(); } public class SyncReceiver extends BroadcastReceiver { @Override public void onReceive(Context context, Intent intent) { Bundle extras = intent.getExtras(); String status = extras.getString(SYNC_UPDATE); if (status.equals(TimetableSyncAdapter.LOADING_MESSAGE)) { displayLoading(status); } else { dismissLoading(status); } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.plugins; import org.apache.lucene.search.Query; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.search.function.ScoreFunction; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParser; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; import org.elasticsearch.search.SearchExtBuilder; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.subphase.highlight.Highlighter; import org.elasticsearch.search.rescore.RescorerBuilder; import org.elasticsearch.search.rescore.Rescorer; import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.SuggestionBuilder; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.function.BiConsumer; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; /** * Plugin for extending search time behavior. */ public interface SearchPlugin { /** * The new {@link ScoreFunction}s defined by this plugin. */ default List<ScoreFunctionSpec<?>> getScoreFunctions() { return emptyList(); } /** * The new {@link SignificanceHeuristic}s defined by this plugin. {@linkplain SignificanceHeuristic}s are used by the * {@link SignificantTerms} aggregation to pick which terms are significant for a given query. */ default List<SearchExtensionSpec<SignificanceHeuristic, SignificanceHeuristicParser>> getSignificanceHeuristics() { return emptyList(); } /** * The new {@link MovAvgModel}s defined by this plugin. {@linkplain MovAvgModel}s are used by the {@link MovAvgPipelineAggregator} to * model trends in data. */ default List<SearchExtensionSpec<MovAvgModel, MovAvgModel.AbstractModelParser>> getMovingAverageModels() { return emptyList(); } /** * The new {@link FetchSubPhase}s defined by this plugin. */ default List<FetchSubPhase> getFetchSubPhases(FetchPhaseConstructionContext context) { return emptyList(); } /** * The new {@link SearchExtBuilder}s defined by this plugin. */ default List<SearchExtSpec<?>> getSearchExts() { return emptyList(); } /** * Get the {@link Highlighter}s defined by this plugin. */ default Map<String, Highlighter> getHighlighters() { return emptyMap(); } /** * The new {@link Suggester}s defined by this plugin. */ default List<SuggesterSpec<?>> getSuggesters() { return emptyList(); } /** * The new {@link Query}s defined by this plugin. */ default List<QuerySpec<?>> getQueries() { return emptyList(); } /** * The new {@link Aggregation}s added by this plugin. */ default List<AggregationSpec> getAggregations() { return emptyList(); } /** * The new {@link PipelineAggregator}s added by this plugin. */ default List<PipelineAggregationSpec> getPipelineAggregations() { return emptyList(); } /** * The next {@link Rescorer}s added by this plugin. */ default List<RescorerSpec<?>> getRescorers() { return emptyList(); } /** * The new search response listeners in the form of {@link BiConsumer}s added by this plugin. * The listeners are invoked on the coordinating node, at the very end of the search request. * This provides a convenient location if you wish to inspect/modify the final response (took time, etc). * The BiConsumers are passed the original {@link SearchRequest} and the final {@link SearchResponse} */ default List<BiConsumer<SearchRequest, SearchResponse>> getSearchResponseListeners() { return emptyList(); } /** * Specification of custom {@link ScoreFunction}. */ class ScoreFunctionSpec<T extends ScoreFunctionBuilder<T>> extends SearchExtensionSpec<T, ScoreFunctionParser<T>> { public ScoreFunctionSpec(ParseField name, Writeable.Reader<T> reader, ScoreFunctionParser<T> parser) { super(name, reader, parser); } public ScoreFunctionSpec(String name, Writeable.Reader<T> reader, ScoreFunctionParser<T> parser) { super(name, reader, parser); } } /** * Specification for a {@link Suggester}. */ class SuggesterSpec<T extends SuggestionBuilder<T>> extends SearchExtensionSpec<T, CheckedFunction<XContentParser, T, IOException>> { /** * Specification of custom {@link Suggester}. * * @param name holds the names by which this suggester might be parsed. The {@link ParseField#getPreferredName()} is special as it * is the name by under which the reader is registered. So it is the name that the query should use as its * {@link NamedWriteable#getWriteableName()} too. * @param reader the reader registered for this suggester's builder. Typically a reference to a constructor that takes a * {@link StreamInput} * @param parser the parser the reads the query suggester from xcontent */ public SuggesterSpec(ParseField name, Writeable.Reader<T> reader, CheckedFunction<XContentParser, T, IOException> parser) { super(name, reader, parser); } /** * Specification of custom {@link Suggester}. * * @param name the name by which this suggester might be parsed or deserialized. Make sure that the query builder returns this name * for {@link NamedWriteable#getWriteableName()}. * @param reader the reader registered for this suggester's builder. Typically a reference to a constructor that takes a * {@link StreamInput} * @param parser the parser the reads the suggester builder from xcontent */ public SuggesterSpec(String name, Writeable.Reader<T> reader, CheckedFunction<XContentParser, T, IOException> parser) { super(name, reader, parser); } } /** * Specification of custom {@link Query}. */ class QuerySpec<T extends QueryBuilder> extends SearchExtensionSpec<T, QueryParser<T>> { /** * Specification of custom {@link Query}. * * @param name holds the names by which this query might be parsed. The {@link ParseField#getPreferredName()} is special as it * is the name by under which the reader is registered. So it is the name that the query should use as its * {@link NamedWriteable#getWriteableName()} too. * @param reader the reader registered for this query's builder. Typically a reference to a constructor that takes a * {@link StreamInput} * @param parser the parser the reads the query builder from xcontent */ public QuerySpec(ParseField name, Writeable.Reader<T> reader, QueryParser<T> parser) { super(name, reader, parser); } /** * Specification of custom {@link Query}. * * @param name the name by which this query might be parsed or deserialized. Make sure that the query builder returns this name for * {@link NamedWriteable#getWriteableName()}. * @param reader the reader registered for this query's builder. Typically a reference to a constructor that takes a * {@link StreamInput} * @param parser the parser the reads the query builder from xcontent */ public QuerySpec(String name, Writeable.Reader<T> reader, QueryParser<T> parser) { super(name, reader, parser); } } /** * Specification for an {@link Aggregation}. */ class AggregationSpec extends SearchExtensionSpec<AggregationBuilder, Aggregator.Parser> { private final Map<String, Writeable.Reader<? extends InternalAggregation>> resultReaders = new TreeMap<>(); /** * Specification for an {@link Aggregation}. * * @param name holds the names by which this aggregation might be parsed. The {@link ParseField#getPreferredName()} is special as it * is the name by under which the reader is registered. So it is the name that the {@link AggregationBuilder} should return * from {@link NamedWriteable#getWriteableName()}. * @param reader the reader registered for this aggregation's builder. Typically a reference to a constructor that takes a * {@link StreamInput} * @param parser the parser the reads the aggregation builder from xcontent */ public AggregationSpec(ParseField name, Writeable.Reader<? extends AggregationBuilder> reader, Aggregator.Parser parser) { super(name, reader, parser); } /** * Specification for an {@link Aggregation}. * * @param name the name by which this aggregation might be parsed or deserialized. Make sure that the {@link AggregationBuilder} * returns this from {@link NamedWriteable#getWriteableName()}. * @param reader the reader registered for this aggregation's builder. Typically a reference to a constructor that takes a * {@link StreamInput} * @param parser the parser the reads the aggregation builder from xcontent */ public AggregationSpec(String name, Writeable.Reader<? extends AggregationBuilder> reader, Aggregator.Parser parser) { super(name, reader, parser); } /** * Add a reader for the shard level results of the aggregation with {@linkplain #getName}'s {@link ParseField#getPreferredName()} as * the {@link NamedWriteable#getWriteableName()}. */ public AggregationSpec addResultReader(Writeable.Reader<? extends InternalAggregation> resultReader) { return addResultReader(getName().getPreferredName(), resultReader); } /** * Add a reader for the shard level results of the aggregation. */ public AggregationSpec addResultReader(String writeableName, Writeable.Reader<? extends InternalAggregation> resultReader) { resultReaders.put(writeableName, resultReader); return this; } /** * Get the readers that must be registered for this aggregation's results. */ public Map<String, Writeable.Reader<? extends InternalAggregation>> getResultReaders() { return resultReaders; } } /** * Specification for a {@link PipelineAggregator}. */ class PipelineAggregationSpec extends SearchExtensionSpec<PipelineAggregationBuilder, PipelineAggregator.Parser> { private final Map<String, Writeable.Reader<? extends InternalAggregation>> resultReaders = new TreeMap<>(); private final Writeable.Reader<? extends PipelineAggregator> aggregatorReader; /** * Specification of a {@link PipelineAggregator}. * * @param name holds the names by which this aggregation might be parsed. The {@link ParseField#getPreferredName()} is special as it * is the name by under which the readers are registered. So it is the name that the {@link PipelineAggregationBuilder} and * {@link PipelineAggregator} should return from {@link NamedWriteable#getWriteableName()}. * @param builderReader the reader registered for this aggregation's builder. Typically a reference to a constructor that takes a * {@link StreamInput} * @param aggregatorReader reads the {@link PipelineAggregator} from a stream * @param parser reads the aggregation builder from XContent */ public PipelineAggregationSpec(ParseField name, Writeable.Reader<? extends PipelineAggregationBuilder> builderReader, Writeable.Reader<? extends PipelineAggregator> aggregatorReader, PipelineAggregator.Parser parser) { super(name, builderReader, parser); this.aggregatorReader = aggregatorReader; } /** * Specification of a {@link PipelineAggregator}. * * @param name name by which this aggregation might be parsed or deserialized. Make sure it is the name that the * {@link PipelineAggregationBuilder} and {@link PipelineAggregator} should return from * {@link NamedWriteable#getWriteableName()}. * @param builderReader the reader registered for this aggregation's builder. Typically a reference to a constructor that takes a * {@link StreamInput} * @param aggregatorReader reads the {@link PipelineAggregator} from a stream * @param parser reads the aggregation builder from XContent */ public PipelineAggregationSpec(String name, Writeable.Reader<? extends PipelineAggregationBuilder> builderReader, Writeable.Reader<? extends PipelineAggregator> aggregatorReader, PipelineAggregator.Parser parser) { super(name, builderReader, parser); this.aggregatorReader = aggregatorReader; } /** * Add a reader for the shard level results of the aggregation with {@linkplain #getName()}'s {@link ParseField#getPreferredName()} * as the {@link NamedWriteable#getWriteableName()}. */ public PipelineAggregationSpec addResultReader(Writeable.Reader<? extends InternalAggregation> resultReader) { return addResultReader(getName().getPreferredName(), resultReader); } /** * Add a reader for the shard level results of the aggregation. */ public PipelineAggregationSpec addResultReader(String writeableName, Writeable.Reader<? extends InternalAggregation> resultReader) { resultReaders.put(writeableName, resultReader); return this; } /** * The reader for the {@link PipelineAggregator}. */ public Writeable.Reader<? extends PipelineAggregator> getAggregatorReader() { return aggregatorReader; } /** * Get the readers that must be registered for this aggregation's results. */ public Map<String, Writeable.Reader<? extends InternalAggregation>> getResultReaders() { return resultReaders; } } /** * Specification for a {@link SearchExtBuilder} which represents an additional section that can be * parsed in a search request (within the ext element). */ class SearchExtSpec<T extends SearchExtBuilder> extends SearchExtensionSpec<T, CheckedFunction<XContentParser, T, IOException>> { public SearchExtSpec(ParseField name, Writeable.Reader<? extends T> reader, CheckedFunction<XContentParser, T, IOException> parser) { super(name, reader, parser); } public SearchExtSpec(String name, Writeable.Reader<? extends T> reader, CheckedFunction<XContentParser, T, IOException> parser) { super(name, reader, parser); } } class RescorerSpec<T extends RescorerBuilder<T>> extends SearchExtensionSpec<T, CheckedFunction<XContentParser, T, IOException>> { public RescorerSpec(ParseField name, Writeable.Reader<? extends T> reader, CheckedFunction<XContentParser, T, IOException> parser) { super(name, reader, parser); } public RescorerSpec(String name, Writeable.Reader<? extends T> reader, CheckedFunction<XContentParser, T, IOException> parser) { super(name, reader, parser); } } /** * Specification of search time behavior extension like a custom {@link MovAvgModel} or {@link ScoreFunction}. * * @param <W> the type of the main {@link NamedWriteable} for this spec. All specs have this but it isn't always *for* the same thing * though, usually it is some sort of builder sent from the coordinating node to the data nodes executing the behavior * @param <P> the type of the parser for this spec. The parser runs on the coordinating node, converting {@link XContent} into the * behavior to execute */ class SearchExtensionSpec<W extends NamedWriteable, P> { private final ParseField name; private final Writeable.Reader<? extends W> reader; private final P parser; /** * Build the spec with a {@linkplain ParseField}. * * @param name the name of the behavior as a {@linkplain ParseField}. The parser is registered under all names specified by the * {@linkplain ParseField} but the reader is only registered under the {@link ParseField#getPreferredName()} so be sure that * that is the name that W's {@link NamedWriteable#getWriteableName()} returns. * @param reader reader that reads the behavior from the internode protocol * @param parser parser that read the behavior from a REST request */ public SearchExtensionSpec(ParseField name, Writeable.Reader<? extends W> reader, P parser) { this.name = name; this.reader = reader; this.parser = parser; } /** * Build the spec with a String. * * @param name the name of the behavior. The parser and the reader are are registered under this name so be sure that that is the * name that W's {@link NamedWriteable#getWriteableName()} returns. * @param reader reader that reads the behavior from the internode protocol * @param parser parser that read the behavior from a REST request */ public SearchExtensionSpec(String name, Writeable.Reader<? extends W> reader, P parser) { this(new ParseField(name), reader, parser); } /** * The name of the thing being specified as a {@link ParseField}. This allows it to have deprecated names. */ public ParseField getName() { return name; } /** * The reader responsible for reading the behavior from the internode protocol. */ public Writeable.Reader<? extends W> getReader() { return reader; } /** * The parser responsible for converting {@link XContent} into the behavior. */ public P getParser() { return parser; } } /** * Context available during fetch phase construction. */ class FetchPhaseConstructionContext { private final Map<String, Highlighter> highlighters; public FetchPhaseConstructionContext(Map<String, Highlighter> highlighters) { this.highlighters = highlighters; } public Map<String, Highlighter> getHighlighters() { return highlighters; } } }
package com.netcetera.trema.eclipse.editors; import java.io.IOException; import java.io.StringWriter; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IMarker; import org.eclipse.core.resources.IResource; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.jface.action.IMenuListener; import org.eclipse.jface.action.IMenuManager; import org.eclipse.jface.action.MenuManager; import org.eclipse.jface.action.Separator; import org.eclipse.jface.action.ToolBarManager; import org.eclipse.jface.commands.ActionHandler; import org.eclipse.jface.resource.ImageDescriptor; import org.eclipse.jface.text.DocumentEvent; import org.eclipse.jface.text.IDocumentListener; import org.eclipse.jface.viewers.CellEditor; import org.eclipse.jface.viewers.ColumnPixelData; import org.eclipse.jface.viewers.ColumnViewerEditor; import org.eclipse.jface.viewers.ColumnViewerEditorActivationEvent; import org.eclipse.jface.viewers.ColumnViewerEditorActivationStrategy; import org.eclipse.jface.viewers.ColumnViewerToolTipSupport; import org.eclipse.jface.viewers.ColumnWeightData; import org.eclipse.jface.viewers.ComboBoxCellEditor; import org.eclipse.jface.viewers.ICellEditorListener; import org.eclipse.jface.viewers.ISelectionChangedListener; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.viewers.SelectionChangedEvent; import org.eclipse.jface.viewers.StructuredSelection; import org.eclipse.jface.viewers.TableLayout; import org.eclipse.jface.viewers.TextCellEditor; import org.eclipse.jface.viewers.TreeViewer; import org.eclipse.jface.viewers.TreeViewerColumn; import org.eclipse.jface.viewers.TreeViewerEditor; import org.eclipse.jface.viewers.TreeViewerFocusCellManager; import org.eclipse.swt.SWT; import org.eclipse.swt.events.MouseEvent; import org.eclipse.swt.layout.FillLayout; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Menu; import org.eclipse.swt.widgets.Tree; import org.eclipse.swt.widgets.TreeColumn; import org.eclipse.ui.IEditorActionBarContributor; import org.eclipse.ui.IEditorInput; import org.eclipse.ui.IEditorPart; import org.eclipse.ui.IEditorSite; import org.eclipse.ui.IFileEditorInput; import org.eclipse.ui.ISharedImages; import org.eclipse.ui.IWorkbenchActionConstants; import org.eclipse.ui.PartInitException; import org.eclipse.ui.PlatformUI; import org.eclipse.ui.contexts.IContextService; import org.eclipse.ui.dialogs.FilteredTree; import org.eclipse.ui.dialogs.PatternFilter; import org.eclipse.ui.editors.text.TextEditor; import org.eclipse.ui.handlers.IHandlerService; import org.eclipse.ui.ide.FileStoreEditorInput; import org.eclipse.ui.ide.IGotoMarker; import org.eclipse.ui.part.MultiPageEditorPart; import org.eclipse.ui.texteditor.IStatusField; import org.eclipse.ui.texteditor.MarkerUtilities; import com.netcetera.trema.core.ParseException; import com.netcetera.trema.core.ParseWarning; import com.netcetera.trema.core.Status; import com.netcetera.trema.core.XMLDatabase; import com.netcetera.trema.core.api.IDatabase; import com.netcetera.trema.eclipse.TremaEclipseUtil; import com.netcetera.trema.eclipse.TremaPlugin; import com.netcetera.trema.eclipse.TremaUtilEclipse; import com.netcetera.trema.eclipse.actions.AddTextNodeAction; import com.netcetera.trema.eclipse.actions.AddValueNodeAction; import com.netcetera.trema.eclipse.actions.EditAction; import com.netcetera.trema.eclipse.actions.ExpandCollapseAction; import com.netcetera.trema.eclipse.actions.ExportAction; import com.netcetera.trema.eclipse.actions.ImportAction; import com.netcetera.trema.eclipse.actions.MoveDownAction; import com.netcetera.trema.eclipse.actions.MoveUpAction; import com.netcetera.trema.eclipse.actions.RemoveAction; import com.netcetera.trema.eclipse.actions.SelectAllAction; import com.netcetera.trema.eclipse.actions.TremaEditorAction; import com.netcetera.trema.eclipse.editors.xmleditor.XMLEditor; import com.netcetera.trema.eclipse.validators.LanguageValidator; /** * The Trema multipage editor. The first page shows a table tree view of a Trema * database, the second one consists of a nested text editor to display the raw * XML source. When the trema database cannot be successfully built from the * source file, then the general behaviour is to switch to the source view, so * that the error can be corrected there * */ public class TremaEditor extends MultiPageEditorPart implements IGotoMarker { /** Property for the key column. */ public static final String PROP_FIRST_COLUMN = "firstColumn"; /** Property for the value column. */ public static final String PROP_SECOND_COLUMN = "secondColumn"; /** Property for the status column. */ public static final String PROP_THIRD_COLUMN = "thirdColumn"; /** Status field key for the "size" status field. */ public static final String STATUS_FIELD_SIZE_KEY = "statusFieldSize"; /** * The database model. This is the only time the database object is * instanciated. */ private IDatabase db = new XMLDatabase(); /** Nested text editor for the source page. */ private TextEditor textEditor = null; /** The content provider and database listener. */ private TremaContentProvider contentProvider = null; /** The cell editor for the second column. */ private TextCellEditor secondColumnCellEditor = null; /** The validator for the master language. */ private LanguageValidator masterLanguageValidator = null; /** Table tree columns. */ private TreeColumn firstColumn = null; private TreeColumn secondColumn = null; private TreeColumn thirdColumn = null; /** * Index of the table tree viewer page, will be reassigned by 'addPage'. */ private int tableTreeViewerPageIndex = 0; /** Index of the source page, will be reassigned by 'addPage'. */ private int sourcePageIndex = 0; /** Flag indicating modification in the table tree page. */ private boolean tableTreeModified = false; private TreeViewerFocusCellManager focusCellManager; /** * Flag indicating if the pages are out of sync. This flag has got nothing to * do with the dirty state of the editor, e.g. although the source page may * have been saved (i.e. not dirty), the table tree page might still be not * up-to-date. This flag is used during page changes. */ private boolean pagesOutOfSync = true; /** * Temporary storage to keep track of the expanded elements in the table tree * viewer on page changes. */ private Object[] expandedElements = null; private TreeViewer treeViewer; /** Central status field repository. */ private Map<String, IStatusField> statusFieldMap = new HashMap<String, IStatusField>(); /** Central Trema editor action repository. */ private Map<String, TremaEditorAction> actionMap = new HashMap<String, TremaEditorAction>(); /** {@inheritDoc} */ @Override public void init(IEditorSite site, IEditorInput editorInput) throws PartInitException { // if we open an external file or run the editor as RCP application, we get // a JavaFileEditorInput // fixme: YT 2004-09-17: handle JavaFileEditorInput correctly (see other // fix-me tasks) if (!(editorInput instanceof IFileEditorInput) && !(editorInput instanceof FileStoreEditorInput)) { throw new PartInitException("Invalid input: must be FileStoreEditorInput"); } super.init(site, editorInput); } /** {@inheritDoc} */ @Override protected void createPages() { activateContext(); createTableTreeViewerPage(); createSourcePage(); MenuManager menuManager = new MenuManager(); menuManager.setRemoveAllWhenShown(true); // create the context menu dynamically to allow for contributions menuManager.addMenuListener(new IMenuListener() { public void menuAboutToShow(IMenuManager menuManager) { fillContextMenu(menuManager); } }); Control tree= treeViewer.getTree(); Menu menu = menuManager.createContextMenu(tree); tree.setMenu(menu); getSite().registerContextMenu(menuManager, treeViewer); configureDatabase(); initTreeViewerInput(); updateTitle(); updateColumnTexts(); // register as selection provider in order for the handler // enablement/disablement to work } private void activateContext() { IContextService contextService = (IContextService) getSite().getService(IContextService.class); contextService.activateContext("com.netcetera.trema.eclipse.contexts.tremaContext"); } /** * Initializes the table tree viewer input. In case of an error while building * the database from the source, a switch to the sourceview is done. */ private void initTreeViewerInput() { try { updateTableTreeFromTextEditor(); treeViewer.setSelection(new StructuredSelection(db)); } catch (ParseException e) { setActivePage(sourcePageIndex); } } /** Configures the database. */ private void configureDatabase() { db.addListener(contentProvider); } /** Creates the table tree view page. */ private void createTableTreeViewerPage() { Control page = createTableTreeViewer(); tableTreeViewerPageIndex = addPage(page); setPageText(tableTreeViewerPageIndex, "Table Tree View"); } /** * Creates the table tree viewer. This is the only time the table tree viewer * object is instantiated. */ private Control createTableTreeViewer() { Composite parent= new Composite(getContainer(), SWT.None); parent.setBackground(parent.getDisplay().getSystemColor(SWT.COLOR_WHITE)); parent.setLayout(new GridLayout()); PatternFilter filter = new TremaTreePatternFilter(); filter.setIncludeLeadingWildcard(true); Composite toolbarContainer= new Composite(parent, SWT.BORDER); toolbarContainer.setLayout(new FillLayout()); toolbarContainer.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); ToolBarManager toolbarManager = new ToolBarManager(SWT.FLAT | SWT.RIGHT); toolbarManager.createControl(toolbarContainer); //SWT.MULTI and FocusCellOwnerDrawHighlighter dont work together nicely, see //https://bugs.eclipse.org/bugs/show_bug.cgi?id=206692 //https://bugs.eclipse.org/bugs/show_bug.cgi?id=268135 //A patched version of FocusCellOwnerDrawHighlighter is used, which can probably be removed, as soon as there is a better solution FilteredTree tree = new FilteredTree(parent, SWT.FULL_SELECTION | SWT.BORDER | SWT.MULTI, filter, true); tree.setLayoutData(new GridData(GridData.FILL_BOTH)); treeViewer = tree.getViewer(); //treeViewer = new TreeViewer(getContainer(), SWT.FULL_SELECTION | SWT.MULTI); configureTable(treeViewer); addTableTreeViewerListeners(treeViewer); setCellEditorsAndCellModifier(treeViewer); // content provider and label provider contentProvider = new TremaContentProvider(this); ColumnViewerToolTipSupport.enableFor(treeViewer); treeViewer.setContentProvider(contentProvider); //treeViewer.setLabelProvider(new TremaLabelProvider()); // various properties treeViewer.setAutoExpandLevel(2); treeViewer.setUseHashlookup(true); getSite().setSelectionProvider(treeViewer); createActions(treeViewer); createToolbarManager(toolbarManager); return parent; } private void createToolbarManager(ToolBarManager toolbarManager) { toolbarManager.add(getAction(TremaEditorActionConstants.EXPAND)); toolbarManager.add(getAction(TremaEditorActionConstants.COLLAPSE)); toolbarManager.add(new Separator()); toolbarManager.add(getAction(TremaEditorActionConstants.EDIT)); toolbarManager.add(getAction(TremaEditorActionConstants.ADD_VALUE_NODE)); toolbarManager.add(getAction(TremaEditorActionConstants.MOVE_UP)); toolbarManager.add(getAction(TremaEditorActionConstants.MOVE_DOWN)); toolbarManager.add(getAction(TremaEditorActionConstants.REMOVE)); toolbarManager.add(new Separator()); toolbarManager.add(getAction(TremaEditorActionConstants.ADD_TEXT_NODE)); toolbarManager.add(getAction(TremaEditorActionConstants.IMPORT)); toolbarManager.add(getAction(TremaEditorActionConstants.EXPORT)); toolbarManager.add(new Separator(IWorkbenchActionConstants.MB_ADDITIONS)); toolbarManager.update(true); } /** * Configures the table of the table tree viewer. * * @param treeViewer */ private void configureTable(TreeViewer treeViewer) { Tree tree = treeViewer.getTree(); tree.setHeaderVisible(true); tree.setLinesVisible(true); TreeViewerColumn firstViewerColumn = new TreeViewerColumn(treeViewer, SWT.LEFT); firstViewerColumn.setLabelProvider(new TremaLabelProvider()); firstColumn = firstViewerColumn.getColumn(); // add the columns and set the column properties firstColumn.setText("Key"); TreeViewerColumn secondViewerColumn = new TreeViewerColumn(treeViewer, SWT.LEFT); secondViewerColumn.setLabelProvider(new TremaLabelProvider()); secondColumn = secondViewerColumn.getColumn(); secondColumn.setText("Context"); TreeViewerColumn thirdViewerColumn = new TreeViewerColumn(treeViewer, SWT.LEFT); thirdViewerColumn.setLabelProvider(new TremaLabelProvider()); thirdColumn = thirdViewerColumn.getColumn(); thirdColumn.setText(""); thirdColumn.setResizable(false); TableLayout tableLayout = new TableLayout(); tableLayout.addColumnData(new ColumnWeightData(30, true)); tableLayout.addColumnData(new ColumnWeightData(70, true)); tableLayout.addColumnData(new ColumnPixelData(90, false)); tree.setLayout(tableLayout); treeViewer.setColumnProperties(new String[]{PROP_FIRST_COLUMN, PROP_SECOND_COLUMN, PROP_THIRD_COLUMN}); // code below defines how cell editing is triggered, eg. by navigating into // a table cell with tabs etc. focusCellManager = new TreeViewerFocusCellManager(treeViewer, new FocusCellOwnerDrawHighlighterForMultiselection(treeViewer)); ColumnViewerEditorActivationStrategy actSupport = new ColumnViewerEditorActivationStrategy( treeViewer) { @Override protected boolean isEditorActivationEvent(ColumnViewerEditorActivationEvent event) { // Editing mode is activated when traversing with Tab (while already // editing), left mouse click into the cell, enter key or program return event.eventType == ColumnViewerEditorActivationEvent.TRAVERSAL || (event.eventType == ColumnViewerEditorActivationEvent.MOUSE_CLICK_SELECTION && ((MouseEvent) event.sourceEvent).button == 1) || (event.eventType == ColumnViewerEditorActivationEvent.KEY_PRESSED && event.keyCode == SWT.CR) || event.eventType == ColumnViewerEditorActivationEvent.PROGRAMMATIC; } }; TreeViewerEditor.create(treeViewer, focusCellManager, actSupport, ColumnViewerEditor.TABBING_HORIZONTAL | ColumnViewerEditor.TABBING_MOVE_TO_ROW_NEIGHBOR | ColumnViewerEditor.TABBING_VERTICAL | ColumnViewerEditor.KEYBOARD_ACTIVATION); } /** Hooks up event listeners to the table tree viewer. */ private void addTableTreeViewerListeners(final TreeViewer treeViewer) { treeViewer.addSelectionChangedListener(new ISelectionChangedListener() { public void selectionChanged(SelectionChangedEvent event) { if (event.getSource() == treeViewer) { updateColumnTexts(); updateActionEnablements(); updateCellEditorValidators(); } } }); } /** * Sets the cell editors and the cell modifier of the table tree viewer. * * @param treeViewer */ private void setCellEditorsAndCellModifier(TreeViewer treeViewer) { secondColumnCellEditor = new TextCellEditor(treeViewer.getTree()); secondColumnCellEditor.addListener(new ICellEditorListener() { // listener displaying error messages in the status bar public void applyEditorValue() { setErrorMessage(null); } public void cancelEditor() { setErrorMessage(null); } public void editorValueChanged(boolean oldValidState, boolean newValidState) { setErrorMessage(secondColumnCellEditor.getErrorMessage()); } private void setErrorMessage(String errorMessage) { getEditorSite().getActionBars().getStatusLineManager().setErrorMessage(errorMessage); } }); // the validator will be added dynamically if the master language is going // to be edited masterLanguageValidator = new LanguageValidator(); ComboBoxCellEditor statusEditor = new ComboBoxCellEditor(treeViewer.getTree(), Status.getAvailableStatusNames(), SWT.READ_ONLY); treeViewer.setCellEditors(new CellEditor[]{new TextCellEditor(treeViewer.getTree()), secondColumnCellEditor, statusEditor}); treeViewer.setCellModifier(new TremaCellModifier()); } /** Creates the Trema editor actions used by the table tree viewer page. */ @SuppressWarnings("deprecation") private void createActions(TreeViewer treeViewer) { // getEditorSite().getKeyBindingService().registerAction(action) needs to be // called in order for the enablement / disablement to work in the toolbar // edit action ImageDescriptor image = TremaPlugin.getDefault().getImageDescriptor("icons/edit.gif"); TremaEditorAction action = new EditAction(treeViewer, "&Edit...", image); registerAction(TremaEditorActionConstants.EDIT, action); getEditorSite().getKeyBindingService().registerAction(action); // remove action image = PlatformUI.getWorkbench() .getSharedImages() .getImageDescriptor(ISharedImages.IMG_TOOL_DELETE); action = new RemoveAction(treeViewer, "&Remove", image); registerAction(TremaEditorActionConstants.REMOVE, action); // getEditorSite().getKeyBindingService().registerAction(action); IHandlerService handlerService = (IHandlerService) getSite().getService(IHandlerService.class); handlerService.activateHandler(action.getActionDefinitionId(), new ActionHandler(action)); // add text node action image = TremaPlugin.getDefault().getImageDescriptor("icons/add.gif"); action = new AddTextNodeAction(treeViewer, "Add &Text Node...", image); registerAction(TremaEditorActionConstants.ADD_TEXT_NODE, action); getEditorSite().getKeyBindingService().registerAction(action); // add value node action image = TremaPlugin.getDefault().getImageDescriptor("icons/add.gif"); action = new AddValueNodeAction(treeViewer, "Add &Value Node...", image); registerAction(TremaEditorActionConstants.ADD_VALUE_NODE, action); getEditorSite().getKeyBindingService().registerAction(action); // move up action image = TremaPlugin.getDefault().getImageDescriptor("icons/up.gif"); action = new MoveUpAction(treeViewer, "Move &Up", image); registerAction(TremaEditorActionConstants.MOVE_UP, action); getEditorSite().getKeyBindingService().registerAction(action); // move down action image = TremaPlugin.getDefault().getImageDescriptor("icons/down.gif"); action = new MoveDownAction(treeViewer, "Move &Down", image); registerAction(TremaEditorActionConstants.MOVE_DOWN, action); getEditorSite().getKeyBindingService().registerAction(action); // select all action action = new SelectAllAction(treeViewer, "Select &all", null); registerAction(TremaEditorActionConstants.SELECT_ALL, action); // expand action image = TremaPlugin.getDefault().getImageDescriptor("icons/expand.gif"); action = new ExpandCollapseAction(treeViewer, "E&xpand", image, true); registerAction(TremaEditorActionConstants.EXPAND, action); getEditorSite().getKeyBindingService().registerAction(action); // collapse action image = TremaPlugin.getDefault().getImageDescriptor("icons/collapse.gif"); action = new ExpandCollapseAction(treeViewer, "&Collapse", image, false); registerAction(TremaEditorActionConstants.COLLAPSE, action); getEditorSite().getKeyBindingService().registerAction(action); // import action image = TremaPlugin.getDefault().getImageDescriptor("icons/import.gif"); action = new ImportAction(getEditorSite().getWorkbenchWindow(), this, "&Import...", image); registerAction(TremaEditorActionConstants.IMPORT, action); getEditorSite().getKeyBindingService().registerAction(action); // export action image = TremaPlugin.getDefault().getImageDescriptor("icons/export.gif"); action = new ExportAction(getEditorSite().getWorkbenchWindow(), this, "&Export...", image); registerAction(TremaEditorActionConstants.EXPORT, action); getEditorSite().getKeyBindingService().registerAction(action); } /** * Updates the column text headers according to the current selection. */ protected final void updateColumnTexts() { int result = TremaUtilEclipse.analyzeSelection((IStructuredSelection) treeViewer.getSelection()); if (TremaUtilEclipse.hasJustDatabases(result)) { firstColumn.setText(""); secondColumn.setText("Master Language"); thirdColumn.setText(""); } else if (TremaUtilEclipse.hasJustTextNodes(result)) { firstColumn.setText("Key"); secondColumn.setText("Context"); thirdColumn.setText(""); } else if (TremaUtilEclipse.hasJustValueNodes(result)) { firstColumn.setText("Language"); secondColumn.setText("Value"); thirdColumn.setText("Status"); } else if (TremaUtilEclipse.hasJustDatabasesAndTextNodes(result)) { firstColumn.setText("Key"); secondColumn.setText("Master Language / Context"); thirdColumn.setText(""); } else if (TremaUtilEclipse.hasJustDatabasesAndValueNodes(result)) { firstColumn.setText("Language"); secondColumn.setText("Master Language / Value"); thirdColumn.setText("Status"); } else if (TremaUtilEclipse.hasJustTextAndValueNodes(result)) { firstColumn.setText("Key / Language"); secondColumn.setText("Context / Value"); thirdColumn.setText("Status"); } else { firstColumn.setText("Key / Language"); secondColumn.setText("Master Language / Context / Value"); thirdColumn.setText("Status"); } } /** * Sets the enablements of the registered Trema editor actions according to * the current selection. */ protected void updateActionEnablements() { IStructuredSelection selection = (IStructuredSelection) treeViewer.getSelection(); int analysisResult = TremaUtilEclipse.analyzeSelection(selection); Iterator<TremaEditorAction> iterator = actionMap.values().iterator(); while (iterator.hasNext()) { iterator.next().updateEnablement(selection, analysisResult); } getEditorSite().getActionBars().getToolBarManager().update(true); } /** Disables all registered Trema editor actions. */ protected void disableActions() { Iterator<TremaEditorAction> iterator = actionMap.values().iterator(); while (iterator.hasNext()) { TremaEditorAction action = iterator.next(); action.setEnabled(false); } } /** * Adds the cell editor validators depending on the current selection. */ protected void updateCellEditorValidators() { IStructuredSelection selection = (IStructuredSelection) treeViewer.getSelection(); int result = TremaUtilEclipse.analyzeSelection(selection); // add a LanguageValidator only if editing the master language if (TremaUtilEclipse.hasJustDatabases(result)) { secondColumnCellEditor.setValidator(masterLanguageValidator); } else { secondColumnCellEditor.setValidator(null); } } /** Creates the source view page with a nested text editor. */ private void createSourcePage() { try { textEditor = new XMLEditor(); sourcePageIndex = addPage(textEditor, getEditorInput()); setPageText(sourcePageIndex, "Source"); IEditorInput editorInput = textEditor.getEditorInput(); // fixme: YT 2004-09-16: ...getDocument(editorInput) returns null for a // JavaFileEditorInput textEditor.getDocumentProvider() .getDocument(editorInput) .addDocumentListener(new IDocumentListener() { public void documentAboutToBeChanged(DocumentEvent event) { // nothing to do } public void documentChanged(DocumentEvent event) { pagesOutOfSync = true; // set the flag } }); } catch (PartInitException e) { TremaPlugin.logError(e); } } /** * This method should be called by the event listeners if any change is made * to the table tree. */ public void tableTreeModified() { tableTreeModified(false); } /** * This method should be called by the event listeners if any change is made * to the table tree. * * @param sizeChanged true if the size of the database was changed */ public void tableTreeModified(boolean sizeChanged) { tableTreeModified = true; pagesOutOfSync = true; // fire a property change event in case only the table tree has // changed (changes in the source page already trigger such an // event) if (!super.isDirty()) { firePropertyChange(IEditorPart.PROP_DIRTY); } updateActionEnablements(); if (sizeChanged) { updateStatusField(STATUS_FIELD_SIZE_KEY); } } /** {@inheritDoc} */ @Override public boolean isDirty() { return tableTreeModified || super.isDirty(); } /** Updates the title to be displayed in the editor tab. */ private void updateTitle() { IEditorInput input = getEditorInput(); setPartName(input.getName()); setTitleToolTip(input.getToolTipText()); } /** * Updates the database model and consequently the table tree page from the * source page by re-parsing the XML code. * * @throws ParseException if any parse errors occur during parsing */ private void updateTableTreeFromTextEditor() throws ParseException { // note that the text file encoding is automatically recognized by // the eclipse core for *.xml and *.trm files since they are // associated with the "org.eclipse.core.runtime.xml" content type String editorInputText = textEditor.getDocumentProvider() .getDocument(textEditor.getEditorInput()) .get(); try { deleteMarkers(); ((XMLDatabase) db).build(editorInputText, false); pagesOutOfSync = false; treeViewer.setInput(new DatabaseContainer(db)); addWarningMarkers(((XMLDatabase) db).getParseWarnings()); } catch (ParseException e) { treeViewer.setInput(null); addMarker(e.getMessage(), e.getLineNumber(), IMarker.SEVERITY_ERROR); TremaPlugin.logError(e); throw e; } catch (IOException e) { treeViewer.setInput(null); // IOException is serious and will trigger plattfrom error handling TremaPlugin.logError(e); throw new RuntimeException(e); } } /** * Updates the source page from the table tree page by generating XML code * from the database model. */ private void updateTextEditorFromTableTree() { // only update the textEditor if the treeEditor contains valid data if (!((XMLDatabase) db).isXmlInternalized()) { return; } String encoding = null; try { IEditorInput editorInput = textEditor.getEditorInput(); if (editorInput instanceof IFileEditorInput) { encoding = ((IFileEditorInput) textEditor.getEditorInput()).getFile().getCharset(); } else if (editorInput instanceof FileStoreEditorInput) { encoding = "UTF-8"; } } catch (CoreException e) { TremaPlugin.logError(e.getMessage()); encoding = "UTF-8"; } String lineSeparator = TremaEclipseUtil.getDefaultLineSeparator(); // use the // default // platform line // separator StringWriter stringWriter = new StringWriter(); try { ((XMLDatabase) db).writeXML(stringWriter, encoding, " ", lineSeparator); textEditor.getDocumentProvider() .getDocument(textEditor.getEditorInput()) .set(stringWriter.toString()); pagesOutOfSync = false; } catch (IOException e) { TremaPlugin.logError(e.getMessage()); } } private void deleteMarkers() { try { getInputFile().deleteMarkers(IMarker.PROBLEM, true, IResource.DEPTH_INFINITE); } catch (CoreException e1) { throw new RuntimeException(e1); } } /** * Adds the ParseWarnings as Markers in the Editor. * * @param warnings the warnings to add */ private void addWarningMarkers(ParseWarning[] warnings) { for (ParseWarning w : warnings) { addMarker(w.getMessage(), w.getLineNumber(), IMarker.SEVERITY_WARNING); } } /** * Convenience method to add a marker to the editor. * * @param message the message * @param linenumber the linemarker to add the marker to * @param severity the severity */ protected void addMarker(String message, int linenumber, int severity) { Map<Object, Object> map = new HashMap<Object, Object>(); int displayedLineNumber = linenumber; if (displayedLineNumber == 0) { // marker on line 0 cannot be seen. displayedLineNumber = 1; } MarkerUtilities.setLineNumber(map, displayedLineNumber); MarkerUtilities.setMessage(map, message); map.put(IMarker.MESSAGE, message); map.put(IMarker.SEVERITY, Integer.valueOf(severity)); try { MarkerUtilities.createMarker(getInputFile(), map, IMarker.PROBLEM); } catch (CoreException e) { throw new RuntimeException(e); } } /** * @return the Editor input file */ protected IFile getInputFile() { IFileEditorInput ife = (IFileEditorInput) textEditor.getEditorInput(); if (ife != null) { return ife.getFile(); } else { return null; } } @Override protected void setActivePage(int pageIndex) { if (pageIndex != getActivePage()) { super.setActivePage(pageIndex); } } /** {@inheritDoc} */ @Override protected void pageChange(int newPageIndex) { if (newPageIndex == tableTreeViewerPageIndex) { updateActionEnablements(); if (pagesOutOfSync || !isDirty()) { try { // if the source page contains errors, don't change the page updateTableTreeFromTextEditor(); if (expandedElements != null) { treeViewer.setExpandedElements(expandedElements); } } catch (ParseException e) { setActivePage(sourcePageIndex); return; // if the source page contains parse errorr, don't change the // page } } } else if (newPageIndex == sourcePageIndex) { disableActions(); // the table tree viewer actions should not be runnable // from within the source page expandedElements = treeViewer.getExpandedElements(); if (pagesOutOfSync) { updateTextEditorFromTableTree(); } } super.pageChange(newPageIndex); // since our contributor is not an instance of // MultiPageEditorActionContributor, // the pageChange method of the superclass will not call any contributor // method IEditorActionBarContributor contributor = getEditorSite().getActionBarContributor(); if (contributor instanceof TremaEditorContributor) { ((TremaEditorContributor) contributor).setActivePage(this, newPageIndex); } } /** * Registeres a status field with a given key. * * @param key the key of the status field * @param statusField the status field */ public void registerStatusField(String key, IStatusField statusField) { if (statusField == null) { statusFieldMap.remove(key); } else { statusFieldMap.put(key, statusField); } } /** * Updates a status field with a given key. * * @param key the key for the status field to update */ public void updateStatusField(String key) { if (!statusFieldMap.containsKey(key)) { return; } IStatusField statusField = statusFieldMap.get(key); if (key.equals(STATUS_FIELD_SIZE_KEY)) { int dbSize = db.getSize(); statusField.setText(dbSize + ((dbSize == 1) ? " Record" : " Records")); } } /** {@inheritDoc} */ @Override public void doSave(IProgressMonitor monitor) { // possibly update the text editor if (getActivePage() == tableTreeViewerPageIndex && pagesOutOfSync) { updateTextEditorFromTableTree(); } tableTreeModified = false; // delegate to the text editor's doSave method textEditor.doSave(monitor); // if the user saved from the source page, the tree viewer model needs to be // updated now if (getActivePage() == sourcePageIndex) { try { // make sure that the tree viewer is in sync with the text editor after // saving initTreeViewerInput(); } catch (RuntimeException e) { // ignore, it is ok to save, even if the xml/trm file that is invalid. } } } /** {@inheritDoc} */ @Override public void doSaveAs() { // possibly update the text editor if (getActivePage() == tableTreeViewerPageIndex && pagesOutOfSync) { updateTextEditorFromTableTree(); } tableTreeModified = false; // delegate to the text editor's doSaveAs method textEditor.doSaveAs(); setInput(textEditor.getEditorInput()); updateTitle(); // if the user saved from the source page, the tree viewer model needs to be // updated now if (getActivePage() == sourcePageIndex) { try { // make sure that the tree viewer is in sync with the text editor after // saving initTreeViewerInput(); } catch (RuntimeException e) { // ignore, it is ok to save, even if the xml/trm file that is invalid. } } } /** {@inheritDoc} */ @Override public boolean isSaveAsAllowed() { return true; } /** {@inheritDoc} */ public void gotoMarker(IMarker marker) { setActivePage(sourcePageIndex); IGotoMarker gotoMarker = (IGotoMarker) textEditor.getAdapter(IGotoMarker.class); if (gotoMarker != null) { gotoMarker.gotoMarker(marker); } } /** * Gets the index of the table tree viewer page. * * @return the index of the table tree viewer page. */ public int getTableTreeViewerPageIndex() { return tableTreeViewerPageIndex; } /** * Gets the index of the source page. * * @return the index of the source page. */ public int getSourcePageIndex() { return sourcePageIndex; } /** * Registers a trema editor action. * * @param id the action id * @param action the action */ public void registerAction(String id, TremaEditorAction action) { actionMap.put(id, action); } /** * Gets a trema editor action to a given id. * * @param id the action id * @return the action or <code>null</code> if none found */ public TremaEditorAction getAction(String id) { return actionMap.get(id); } /** {@inheritDoc} */ @Override public int getActivePage() { // to increase the visibility return super.getActivePage(); } /** * Gets the text editor from the source page. * * @return the text editor from the source page. */ public TextEditor getTextEditor() { return textEditor; } /** * Gets the table tree viewer from the table tree viewer page. * * @return the table tree viewer from the table tree viewer page. */ public TreeViewer getTreeViewer() { return treeViewer; } private void fillContextMenu(IMenuManager menuManager) { menuManager.add(getAction(TremaEditorActionConstants.EXPAND)); menuManager.add(getAction(TremaEditorActionConstants.COLLAPSE)); menuManager.add(new Separator()); menuManager.add(getAction(TremaEditorActionConstants.EDIT)); menuManager.add(getAction(TremaEditorActionConstants.ADD_VALUE_NODE)); menuManager.add(getAction(TremaEditorActionConstants.MOVE_UP)); menuManager.add(getAction(TremaEditorActionConstants.MOVE_DOWN)); menuManager.add(getAction(TremaEditorActionConstants.REMOVE)); menuManager.add(new Separator()); menuManager.add(getAction(TremaEditorActionConstants.ADD_TEXT_NODE)); menuManager.add(getAction(TremaEditorActionConstants.IMPORT)); menuManager.add(getAction(TremaEditorActionConstants.EXPORT)); menuManager.add(new Separator(IWorkbenchActionConstants.MB_ADDITIONS)); } }
/* * Licensed to ElasticSearch and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. ElasticSearch licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.monitor.network; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import java.io.IOException; import java.io.Serializable; /** * */ public class NetworkStats implements Streamable, Serializable, ToXContent { long timestamp; Tcp tcp = null; NetworkStats() { } static final class Fields { static final XContentBuilderString NETWORK = new XContentBuilderString("network"); static final XContentBuilderString TCP = new XContentBuilderString("tcp"); static final XContentBuilderString ACTIVE_OPENS = new XContentBuilderString("active_opens"); static final XContentBuilderString PASSIVE_OPENS = new XContentBuilderString("passive_opens"); static final XContentBuilderString CURR_ESTAB = new XContentBuilderString("curr_estab"); static final XContentBuilderString IN_SEGS = new XContentBuilderString("in_segs"); static final XContentBuilderString OUT_SEGS = new XContentBuilderString("out_segs"); static final XContentBuilderString RETRANS_SEGS = new XContentBuilderString("retrans_segs"); static final XContentBuilderString ESTAB_RESETS = new XContentBuilderString("estab_resets"); static final XContentBuilderString ATTEMPT_FAILS = new XContentBuilderString("attempt_fails"); static final XContentBuilderString IN_ERRS = new XContentBuilderString("in_errs"); static final XContentBuilderString OUT_RSTS = new XContentBuilderString("out_rsts"); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(Fields.NETWORK); if (tcp != null) { builder.startObject(Fields.TCP); builder.field(Fields.ACTIVE_OPENS, tcp.getActiveOpens()); builder.field(Fields.PASSIVE_OPENS, tcp.getPassiveOpens()); builder.field(Fields.CURR_ESTAB, tcp.getCurrEstab()); builder.field(Fields.IN_SEGS, tcp.getInSegs()); builder.field(Fields.OUT_SEGS, tcp.getOutSegs()); builder.field(Fields.RETRANS_SEGS, tcp.getRetransSegs()); builder.field(Fields.ESTAB_RESETS, tcp.getEstabResets()); builder.field(Fields.ATTEMPT_FAILS, tcp.getAttemptFails()); builder.field(Fields.IN_ERRS, tcp.getInErrs()); builder.field(Fields.OUT_RSTS, tcp.getOutRsts()); builder.endObject(); } builder.endObject(); return builder; } public static NetworkStats readNetworkStats(StreamInput in) throws IOException { NetworkStats stats = new NetworkStats(); stats.readFrom(in); return stats; } @Override public void readFrom(StreamInput in) throws IOException { timestamp = in.readVLong(); if (in.readBoolean()) { tcp = Tcp.readNetworkTcp(in); } } @Override public void writeTo(StreamOutput out) throws IOException { out.writeVLong(timestamp); if (tcp == null) { out.writeBoolean(false); } else { out.writeBoolean(true); tcp.writeTo(out); } } public long timestamp() { return timestamp; } public long getTimestamp() { return timestamp(); } public Tcp tcp() { return tcp; } public Tcp getTcp() { return tcp(); } public static class Tcp implements Serializable, Streamable { long activeOpens; long passiveOpens; long attemptFails; long estabResets; long currEstab; long inSegs; long outSegs; long retransSegs; long inErrs; long outRsts; public static Tcp readNetworkTcp(StreamInput in) throws IOException { Tcp tcp = new Tcp(); tcp.readFrom(in); return tcp; } @Override public void readFrom(StreamInput in) throws IOException { activeOpens = in.readLong(); passiveOpens = in.readLong(); attemptFails = in.readLong(); estabResets = in.readLong(); currEstab = in.readLong(); inSegs = in.readLong(); outSegs = in.readLong(); retransSegs = in.readLong(); inErrs = in.readLong(); outRsts = in.readLong(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeLong(activeOpens); out.writeLong(passiveOpens); out.writeLong(attemptFails); out.writeLong(estabResets); out.writeLong(currEstab); out.writeLong(inSegs); out.writeLong(outSegs); out.writeLong(retransSegs); out.writeLong(inErrs); out.writeLong(outRsts); } public long activeOpens() { return this.activeOpens; } public long getActiveOpens() { return activeOpens(); } public long passiveOpens() { return passiveOpens; } public long getPassiveOpens() { return passiveOpens(); } public long attemptFails() { return attemptFails; } public long getAttemptFails() { return attemptFails(); } public long estabResets() { return estabResets; } public long getEstabResets() { return estabResets(); } public long currEstab() { return currEstab; } public long getCurrEstab() { return currEstab(); } public long inSegs() { return inSegs; } public long getInSegs() { return inSegs(); } public long outSegs() { return outSegs; } public long getOutSegs() { return outSegs(); } public long retransSegs() { return retransSegs; } public long getRetransSegs() { return retransSegs(); } public long inErrs() { return inErrs; } public long getInErrs() { return inErrs(); } public long outRsts() { return outRsts; } public long getOutRsts() { return outRsts(); } } }
/* * Copyright (C) 2014 Google, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.squareup.javapoet; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.*; import java.io.Serializable; import java.nio.charset.Charset; import java.util.List; import java.util.Map; import java.util.Set; import javax.lang.model.element.TypeElement; import javax.lang.model.element.TypeParameterElement; import javax.lang.model.type.DeclaredType; import javax.lang.model.type.TypeKind; import javax.lang.model.type.TypeMirror; import javax.lang.model.type.WildcardType; import javax.lang.model.util.Elements; import javax.lang.model.util.Types; import org.junit.Test; public abstract class AbstractTypesTest { protected abstract Elements getElements(); protected abstract Types getTypes(); private TypeElement getElement(Class<?> clazz) { return getElements().getTypeElement(clazz.getCanonicalName()); } private TypeMirror getMirror(Class<?> clazz) { return getElement(clazz).asType(); } @Test public void getBasicTypeMirror() { assertThat(TypeName.get(getMirror(Object.class))) .isEqualTo(ClassName.get(Object.class)); assertThat(TypeName.get(getMirror(Charset.class))) .isEqualTo(ClassName.get(Charset.class)); assertThat(TypeName.get(getMirror(AbstractTypesTest.class))) .isEqualTo(ClassName.get(AbstractTypesTest.class)); } @Test public void getParameterizedTypeMirror() { DeclaredType setType = getTypes().getDeclaredType(getElement(Set.class), getMirror(Object.class)); assertThat(TypeName.get(setType)) .isEqualTo(ParameterizedTypeName.get(ClassName.get(Set.class), ClassName.OBJECT)); } static class Parameterized< Simple, ExtendsClass extends Number, ExtendsInterface extends Runnable, ExtendsTypeVariable extends Simple, Intersection extends Number & Runnable, IntersectionOfInterfaces extends Runnable & Serializable> {} @Test public void getTypeVariableTypeMirror() { List<? extends TypeParameterElement> typeVariables = getElement(Parameterized.class).getTypeParameters(); // Members of converted types use ClassName and not Class<?>. ClassName number = ClassName.get(Number.class); ClassName runnable = ClassName.get(Runnable.class); ClassName serializable = ClassName.get(Serializable.class); assertThat(TypeName.get(typeVariables.get(0).asType())) .isEqualTo(TypeVariableName.get("Simple")); assertThat(TypeName.get(typeVariables.get(1).asType())) .isEqualTo(TypeVariableName.get("ExtendsClass", number)); assertThat(TypeName.get(typeVariables.get(2).asType())) .isEqualTo(TypeVariableName.get("ExtendsInterface", runnable)); assertThat(TypeName.get(typeVariables.get(3).asType())) .isEqualTo(TypeVariableName.get("ExtendsTypeVariable", TypeVariableName.get("Simple"))); assertThat(TypeName.get(typeVariables.get(4).asType())) .isEqualTo(TypeVariableName.get("Intersection", number, runnable)); assertThat(TypeName.get(typeVariables.get(5).asType())) .isEqualTo(TypeVariableName.get("IntersectionOfInterfaces", runnable, serializable)); assertThat(((TypeVariableName) TypeName.get(typeVariables.get(4).asType())).bounds) .containsExactly(number, runnable); } @Test public void getPrimitiveTypeMirror() { assertThat(TypeName.get(getTypes().getPrimitiveType(TypeKind.BOOLEAN))) .isEqualTo(TypeName.BOOLEAN); assertThat(TypeName.get(getTypes().getPrimitiveType(TypeKind.BYTE))) .isEqualTo(TypeName.BYTE); assertThat(TypeName.get(getTypes().getPrimitiveType(TypeKind.SHORT))) .isEqualTo(TypeName.SHORT); assertThat(TypeName.get(getTypes().getPrimitiveType(TypeKind.INT))) .isEqualTo(TypeName.INT); assertThat(TypeName.get(getTypes().getPrimitiveType(TypeKind.LONG))) .isEqualTo(TypeName.LONG); assertThat(TypeName.get(getTypes().getPrimitiveType(TypeKind.CHAR))) .isEqualTo(TypeName.CHAR); assertThat(TypeName.get(getTypes().getPrimitiveType(TypeKind.FLOAT))) .isEqualTo(TypeName.FLOAT); assertThat(TypeName.get(getTypes().getPrimitiveType(TypeKind.DOUBLE))) .isEqualTo(TypeName.DOUBLE); } @Test public void getArrayTypeMirror() { assertThat(TypeName.get(getTypes().getArrayType(getMirror(Object.class)))) .isEqualTo(ArrayTypeName.of(ClassName.OBJECT)); } @Test public void getVoidTypeMirror() { assertThat(TypeName.get(getTypes().getNoType(TypeKind.VOID))) .isEqualTo(TypeName.VOID); } @Test public void getNullTypeMirror() { try { TypeName.get(getTypes().getNullType()); fail(); } catch (IllegalArgumentException expected) { } } @Test public void parameterizedType() throws Exception { ParameterizedTypeName type = ParameterizedTypeName.get(Map.class, String.class, Long.class); assertThat(type.toString()).isEqualTo("java.util.Map<java.lang.String, java.lang.Long>"); } @Test public void arrayType() throws Exception { ArrayTypeName type = ArrayTypeName.of(String.class); assertThat(type.toString()).isEqualTo("java.lang.String[]"); } @Test public void wildcardExtendsType() throws Exception { WildcardTypeName type = WildcardTypeName.subtypeOf(CharSequence.class); assertThat(type.toString()).isEqualTo("? extends java.lang.CharSequence"); } @Test public void wildcardExtendsObject() throws Exception { WildcardTypeName type = WildcardTypeName.subtypeOf(Object.class); assertThat(type.toString()).isEqualTo("?"); } @Test public void wildcardSuperType() throws Exception { WildcardTypeName type = WildcardTypeName.supertypeOf(String.class); assertThat(type.toString()).isEqualTo("? super java.lang.String"); } @Test public void wildcardMirrorNoBounds() throws Exception { WildcardType wildcard = getTypes().getWildcardType(null, null); TypeName type = TypeName.get(wildcard); assertThat(type.toString()).isEqualTo("?"); } @Test public void wildcardMirrorExtendsType() throws Exception { Types types = getTypes(); Elements elements = getElements(); TypeMirror charSequence = elements.getTypeElement(CharSequence.class.getName()).asType(); WildcardType wildcard = types.getWildcardType(charSequence, null); TypeName type = TypeName.get(wildcard); assertThat(type.toString()).isEqualTo("? extends java.lang.CharSequence"); } @Test public void wildcardMirrorSuperType() throws Exception { Types types = getTypes(); Elements elements = getElements(); TypeMirror string = elements.getTypeElement(String.class.getName()).asType(); WildcardType wildcard = types.getWildcardType(null, string); TypeName type = TypeName.get(wildcard); assertThat(type.toString()).isEqualTo("? super java.lang.String"); } @Test public void typeVariable() throws Exception { TypeVariableName type = TypeVariableName.get("T", CharSequence.class); assertThat(type.toString()).isEqualTo("T"); // (Bounds are only emitted in declaration.) } @Test public void box() throws Exception { assertThat(TypeName.INT.box()).isEqualTo(ClassName.get(Integer.class)); assertThat(TypeName.VOID.box()).isEqualTo(ClassName.get(Void.class)); assertThat(ClassName.get(Integer.class).box()).isEqualTo(ClassName.get(Integer.class)); assertThat(ClassName.get(Void.class).box()).isEqualTo(ClassName.get(Void.class)); assertThat(TypeName.OBJECT.box()).isEqualTo(TypeName.OBJECT); assertThat(ClassName.get(String.class).box()).isEqualTo(ClassName.get(String.class)); } @Test public void unbox() throws Exception { assertThat(TypeName.INT).isEqualTo(TypeName.INT.unbox()); assertThat(TypeName.VOID).isEqualTo(TypeName.VOID.unbox()); assertThat(ClassName.get(Integer.class).unbox()).isEqualTo(TypeName.INT.unbox()); assertThat(ClassName.get(Void.class).unbox()).isEqualTo(TypeName.VOID.unbox()); try { TypeName.OBJECT.unbox(); fail(); } catch (UnsupportedOperationException expected) { } try { ClassName.get(String.class).unbox(); fail(); } catch (UnsupportedOperationException expected) { } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // -------------------------------------------------------------- // THIS IS A GENERATED SOURCE FILE. DO NOT EDIT! // GENERATED FROM org.apache.flink.api.java.tuple.TupleGenerator. // -------------------------------------------------------------- package org.apache.flink.api.java.tuple; import org.apache.flink.annotation.Public; import org.apache.flink.util.StringUtils; /** * A tuple with 19 fields. Tuples are strongly typed; each field may be of a separate type. The * fields of the tuple can be accessed directly as public fields (f0, f1, ...) or via their position * through the {@link #getField(int)} method. The tuple field positions start at zero. * * <p>Tuples are mutable types, meaning that their fields can be re-assigned. This allows functions * that work with Tuples to reuse objects in order to reduce pressure on the garbage collector. * * <p>Warning: If you subclass Tuple19, then be sure to either * * <ul> * <li>not add any new fields, or * <li>make it a POJO, and always declare the element type of your DataStreams/DataSets to your * descendant type. (That is, if you have a "class Foo extends Tuple19", then don't use * instances of Foo in a DataStream&lt;Tuple19&gt; / DataSet&lt;Tuple19&gt;, but declare it as * DataStream&lt;Foo&gt; / DataSet&lt;Foo&gt;.) * </ul> * * @see Tuple * @param <T0> The type of field 0 * @param <T1> The type of field 1 * @param <T2> The type of field 2 * @param <T3> The type of field 3 * @param <T4> The type of field 4 * @param <T5> The type of field 5 * @param <T6> The type of field 6 * @param <T7> The type of field 7 * @param <T8> The type of field 8 * @param <T9> The type of field 9 * @param <T10> The type of field 10 * @param <T11> The type of field 11 * @param <T12> The type of field 12 * @param <T13> The type of field 13 * @param <T14> The type of field 14 * @param <T15> The type of field 15 * @param <T16> The type of field 16 * @param <T17> The type of field 17 * @param <T18> The type of field 18 */ @Public public class Tuple19< T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> extends Tuple { private static final long serialVersionUID = 1L; /** Field 0 of the tuple. */ public T0 f0; /** Field 1 of the tuple. */ public T1 f1; /** Field 2 of the tuple. */ public T2 f2; /** Field 3 of the tuple. */ public T3 f3; /** Field 4 of the tuple. */ public T4 f4; /** Field 5 of the tuple. */ public T5 f5; /** Field 6 of the tuple. */ public T6 f6; /** Field 7 of the tuple. */ public T7 f7; /** Field 8 of the tuple. */ public T8 f8; /** Field 9 of the tuple. */ public T9 f9; /** Field 10 of the tuple. */ public T10 f10; /** Field 11 of the tuple. */ public T11 f11; /** Field 12 of the tuple. */ public T12 f12; /** Field 13 of the tuple. */ public T13 f13; /** Field 14 of the tuple. */ public T14 f14; /** Field 15 of the tuple. */ public T15 f15; /** Field 16 of the tuple. */ public T16 f16; /** Field 17 of the tuple. */ public T17 f17; /** Field 18 of the tuple. */ public T18 f18; /** Creates a new tuple where all fields are null. */ public Tuple19() {} /** * Creates a new tuple and assigns the given values to the tuple's fields. * * @param value0 The value for field 0 * @param value1 The value for field 1 * @param value2 The value for field 2 * @param value3 The value for field 3 * @param value4 The value for field 4 * @param value5 The value for field 5 * @param value6 The value for field 6 * @param value7 The value for field 7 * @param value8 The value for field 8 * @param value9 The value for field 9 * @param value10 The value for field 10 * @param value11 The value for field 11 * @param value12 The value for field 12 * @param value13 The value for field 13 * @param value14 The value for field 14 * @param value15 The value for field 15 * @param value16 The value for field 16 * @param value17 The value for field 17 * @param value18 The value for field 18 */ public Tuple19( T0 value0, T1 value1, T2 value2, T3 value3, T4 value4, T5 value5, T6 value6, T7 value7, T8 value8, T9 value9, T10 value10, T11 value11, T12 value12, T13 value13, T14 value14, T15 value15, T16 value16, T17 value17, T18 value18) { this.f0 = value0; this.f1 = value1; this.f2 = value2; this.f3 = value3; this.f4 = value4; this.f5 = value5; this.f6 = value6; this.f7 = value7; this.f8 = value8; this.f9 = value9; this.f10 = value10; this.f11 = value11; this.f12 = value12; this.f13 = value13; this.f14 = value14; this.f15 = value15; this.f16 = value16; this.f17 = value17; this.f18 = value18; } @Override public int getArity() { return 19; } @Override @SuppressWarnings("unchecked") public <T> T getField(int pos) { switch (pos) { case 0: return (T) this.f0; case 1: return (T) this.f1; case 2: return (T) this.f2; case 3: return (T) this.f3; case 4: return (T) this.f4; case 5: return (T) this.f5; case 6: return (T) this.f6; case 7: return (T) this.f7; case 8: return (T) this.f8; case 9: return (T) this.f9; case 10: return (T) this.f10; case 11: return (T) this.f11; case 12: return (T) this.f12; case 13: return (T) this.f13; case 14: return (T) this.f14; case 15: return (T) this.f15; case 16: return (T) this.f16; case 17: return (T) this.f17; case 18: return (T) this.f18; default: throw new IndexOutOfBoundsException(String.valueOf(pos)); } } @Override @SuppressWarnings("unchecked") public <T> void setField(T value, int pos) { switch (pos) { case 0: this.f0 = (T0) value; break; case 1: this.f1 = (T1) value; break; case 2: this.f2 = (T2) value; break; case 3: this.f3 = (T3) value; break; case 4: this.f4 = (T4) value; break; case 5: this.f5 = (T5) value; break; case 6: this.f6 = (T6) value; break; case 7: this.f7 = (T7) value; break; case 8: this.f8 = (T8) value; break; case 9: this.f9 = (T9) value; break; case 10: this.f10 = (T10) value; break; case 11: this.f11 = (T11) value; break; case 12: this.f12 = (T12) value; break; case 13: this.f13 = (T13) value; break; case 14: this.f14 = (T14) value; break; case 15: this.f15 = (T15) value; break; case 16: this.f16 = (T16) value; break; case 17: this.f17 = (T17) value; break; case 18: this.f18 = (T18) value; break; default: throw new IndexOutOfBoundsException(String.valueOf(pos)); } } /** * Sets new values to all fields of the tuple. * * @param value0 The value for field 0 * @param value1 The value for field 1 * @param value2 The value for field 2 * @param value3 The value for field 3 * @param value4 The value for field 4 * @param value5 The value for field 5 * @param value6 The value for field 6 * @param value7 The value for field 7 * @param value8 The value for field 8 * @param value9 The value for field 9 * @param value10 The value for field 10 * @param value11 The value for field 11 * @param value12 The value for field 12 * @param value13 The value for field 13 * @param value14 The value for field 14 * @param value15 The value for field 15 * @param value16 The value for field 16 * @param value17 The value for field 17 * @param value18 The value for field 18 */ public void setFields( T0 value0, T1 value1, T2 value2, T3 value3, T4 value4, T5 value5, T6 value6, T7 value7, T8 value8, T9 value9, T10 value10, T11 value11, T12 value12, T13 value13, T14 value14, T15 value15, T16 value16, T17 value17, T18 value18) { this.f0 = value0; this.f1 = value1; this.f2 = value2; this.f3 = value3; this.f4 = value4; this.f5 = value5; this.f6 = value6; this.f7 = value7; this.f8 = value8; this.f9 = value9; this.f10 = value10; this.f11 = value11; this.f12 = value12; this.f13 = value13; this.f14 = value14; this.f15 = value15; this.f16 = value16; this.f17 = value17; this.f18 = value18; } // ------------------------------------------------------------------------------------------------- // standard utilities // ------------------------------------------------------------------------------------------------- /** * Creates a string representation of the tuple in the form (f0, f1, f2, f3, f4, f5, f6, f7, f8, * f9, f10, f11, f12, f13, f14, f15, f16, f17, f18), where the individual fields are the value * returned by calling {@link Object#toString} on that field. * * @return The string representation of the tuple. */ @Override public String toString() { return "(" + StringUtils.arrayAwareToString(this.f0) + "," + StringUtils.arrayAwareToString(this.f1) + "," + StringUtils.arrayAwareToString(this.f2) + "," + StringUtils.arrayAwareToString(this.f3) + "," + StringUtils.arrayAwareToString(this.f4) + "," + StringUtils.arrayAwareToString(this.f5) + "," + StringUtils.arrayAwareToString(this.f6) + "," + StringUtils.arrayAwareToString(this.f7) + "," + StringUtils.arrayAwareToString(this.f8) + "," + StringUtils.arrayAwareToString(this.f9) + "," + StringUtils.arrayAwareToString(this.f10) + "," + StringUtils.arrayAwareToString(this.f11) + "," + StringUtils.arrayAwareToString(this.f12) + "," + StringUtils.arrayAwareToString(this.f13) + "," + StringUtils.arrayAwareToString(this.f14) + "," + StringUtils.arrayAwareToString(this.f15) + "," + StringUtils.arrayAwareToString(this.f16) + "," + StringUtils.arrayAwareToString(this.f17) + "," + StringUtils.arrayAwareToString(this.f18) + ")"; } /** * Deep equality for tuples by calling equals() on the tuple members. * * @param o the object checked for equality * @return true if this is equal to o. */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof Tuple19)) { return false; } @SuppressWarnings("rawtypes") Tuple19 tuple = (Tuple19) o; if (f0 != null ? !f0.equals(tuple.f0) : tuple.f0 != null) { return false; } if (f1 != null ? !f1.equals(tuple.f1) : tuple.f1 != null) { return false; } if (f2 != null ? !f2.equals(tuple.f2) : tuple.f2 != null) { return false; } if (f3 != null ? !f3.equals(tuple.f3) : tuple.f3 != null) { return false; } if (f4 != null ? !f4.equals(tuple.f4) : tuple.f4 != null) { return false; } if (f5 != null ? !f5.equals(tuple.f5) : tuple.f5 != null) { return false; } if (f6 != null ? !f6.equals(tuple.f6) : tuple.f6 != null) { return false; } if (f7 != null ? !f7.equals(tuple.f7) : tuple.f7 != null) { return false; } if (f8 != null ? !f8.equals(tuple.f8) : tuple.f8 != null) { return false; } if (f9 != null ? !f9.equals(tuple.f9) : tuple.f9 != null) { return false; } if (f10 != null ? !f10.equals(tuple.f10) : tuple.f10 != null) { return false; } if (f11 != null ? !f11.equals(tuple.f11) : tuple.f11 != null) { return false; } if (f12 != null ? !f12.equals(tuple.f12) : tuple.f12 != null) { return false; } if (f13 != null ? !f13.equals(tuple.f13) : tuple.f13 != null) { return false; } if (f14 != null ? !f14.equals(tuple.f14) : tuple.f14 != null) { return false; } if (f15 != null ? !f15.equals(tuple.f15) : tuple.f15 != null) { return false; } if (f16 != null ? !f16.equals(tuple.f16) : tuple.f16 != null) { return false; } if (f17 != null ? !f17.equals(tuple.f17) : tuple.f17 != null) { return false; } if (f18 != null ? !f18.equals(tuple.f18) : tuple.f18 != null) { return false; } return true; } @Override public int hashCode() { int result = f0 != null ? f0.hashCode() : 0; result = 31 * result + (f1 != null ? f1.hashCode() : 0); result = 31 * result + (f2 != null ? f2.hashCode() : 0); result = 31 * result + (f3 != null ? f3.hashCode() : 0); result = 31 * result + (f4 != null ? f4.hashCode() : 0); result = 31 * result + (f5 != null ? f5.hashCode() : 0); result = 31 * result + (f6 != null ? f6.hashCode() : 0); result = 31 * result + (f7 != null ? f7.hashCode() : 0); result = 31 * result + (f8 != null ? f8.hashCode() : 0); result = 31 * result + (f9 != null ? f9.hashCode() : 0); result = 31 * result + (f10 != null ? f10.hashCode() : 0); result = 31 * result + (f11 != null ? f11.hashCode() : 0); result = 31 * result + (f12 != null ? f12.hashCode() : 0); result = 31 * result + (f13 != null ? f13.hashCode() : 0); result = 31 * result + (f14 != null ? f14.hashCode() : 0); result = 31 * result + (f15 != null ? f15.hashCode() : 0); result = 31 * result + (f16 != null ? f16.hashCode() : 0); result = 31 * result + (f17 != null ? f17.hashCode() : 0); result = 31 * result + (f18 != null ? f18.hashCode() : 0); return result; } /** * Shallow tuple copy. * * @return A new Tuple with the same fields as this. */ @Override @SuppressWarnings("unchecked") public Tuple19< T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> copy() { return new Tuple19<>( this.f0, this.f1, this.f2, this.f3, this.f4, this.f5, this.f6, this.f7, this.f8, this.f9, this.f10, this.f11, this.f12, this.f13, this.f14, this.f15, this.f16, this.f17, this.f18); } /** * Creates a new tuple and assigns the given values to the tuple's fields. This is more * convenient than using the constructor, because the compiler can infer the generic type * arguments implicitly. For example: {@code Tuple3.of(n, x, s)} instead of {@code new * Tuple3<Integer, Double, String>(n, x, s)} */ public static < T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> Tuple19< T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18> of( T0 value0, T1 value1, T2 value2, T3 value3, T4 value4, T5 value5, T6 value6, T7 value7, T8 value8, T9 value9, T10 value10, T11 value11, T12 value12, T13 value13, T14 value14, T15 value15, T16 value16, T17 value17, T18 value18) { return new Tuple19<>( value0, value1, value2, value3, value4, value5, value6, value7, value8, value9, value10, value11, value12, value13, value14, value15, value16, value17, value18); } }
package jarshrink; import java.io.BufferedReader; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.InputStream; import java.io.InputStreamReader; import java.io.PrintStream; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.zip.ZipFile; import visionCore.io.MultiOutputStream; import visionCore.io.MultiPrintStream; import visionCore.util.Files; import visionCore.util.Jars; import visionCore.util.Zipper; /** * JarShrink's Main class. * @author Deconimus */ public class Main { public static String abspath, javaHome; public static String jar, out, keep[], tmpdir; public static boolean printStatus, printDependencyList; static { printStatus = false; printDependencyList = true; } public static void main(String[] args) { if (args.length == 0 || (args[0].trim().equals("-h") || args[0].trim().endsWith("-help"))) { help(); return; } setAbspath(); setJavaHome(); parseArgs(args); if (jar == null) { System.out.println("Jar not specified. Run \"-help\" for more information."); return; } File jarFile = new File(jar); if (!jarFile.exists()) { System.out.println("Jar not found."); return; } JarShrinker shrinker = new JarShrinker(new File(tmpdir)); shrinker.setPrintStatus(printStatus); shrinker.setPrintDependencyList(printDependencyList); try { PrintStream logOut = new PrintStream(new FileOutputStream(abspath+File.separator+"log.txt")); PrintStream multiOut = new MultiPrintStream(System.out, logOut); shrinker.setPrintStream(multiOut); } catch (FileNotFoundException e) { } shrinker.shrink(jarFile, new File(out), keep); } private static void parseArgs(String[] args) { if (args.length <= 0) { return; } jar = cleanArg(args[0]); List<String> keep = new ArrayList<String>(); for (int i = 1; i < args.length; i++) { String arg = cleanArg(args[i]); String nextArg = (i < args.length-1) ? cleanArg(args[i+1]) : null; if (nextArg != null && nextArg.startsWith("-")) { nextArg = null; } if (arg.equals("-s") || arg.equals("-status")) { printStatus = true; } else if (arg.equals("-n") || arg.equals("-nolist")) { printDependencyList = false; } else if (nextArg != null) { if (arg.equals("-o") || arg.equals("-out")) { out = nextArg; i++; } else if (arg.equals("-k") || arg.equals("-keep")) { while (nextArg.endsWith("*")) { nextArg = nextArg.substring(0, nextArg.length()-1); } while (nextArg.endsWith("..")) { nextArg = nextArg.substring(0, nextArg.length()-1); } keep.add(nextArg); i++; } else if (arg.equals("-t") || arg.equals("-tmp")) { tmpdir = nextArg; i++; } } } if (out == null || out.isEmpty()) { out = jar.substring(0, Math.max(0, jar.lastIndexOf('.')))+"_shrunken.jar"; } if (tmpdir == null || tmpdir.isEmpty()) { tmpdir = abspath; } Main.keep = keep.toArray(new String[keep.size()]); } private static String cleanArg(String arg) { arg = arg.trim(); if (arg.startsWith("\"") && arg.endsWith("\"")) { arg = arg.substring(1, arg.length()-1).trim(); } return arg; } private static void help() { System.out.println("\nJarShrink by Deconimus\n"); System.out.println("Grammar:\n"); System.out.println("\tjarShrink <jarFile> [<argumentName> <argumentValue>]"); System.out.println("\nArguments:\n"); System.out.println("\t-o | -out\tSpecifies the output-file for the newly created jar."); System.out.println("\t-k | -keep\tSpecifies a package or class that will be retained together with it's"); System.out.println("\t\t\tdependencies. Can be called multiple times."); System.out.println("\t-s | -status\tPrint status information."); System.out.println("\t-n | -nolist\tDon't print the dependency list."); System.out.println(); } private static void setAbspath() { try { abspath = new File(Main.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getAbsolutePath(); if (abspath.endsWith(File.separator+"bin")) { abspath = abspath.substring(0, abspath.indexOf(File.separator+"bin")); } if (abspath.endsWith(".jar")) { abspath = new File(abspath).getParentFile().getAbsolutePath(); } } catch (Exception e) { e.printStackTrace(); } } private static void setJavaHome() { File home = new File(System.getProperty("java.home")); if (home.getName().equals("jre") && home.getParentFile().getName().startsWith("jdk")) { home = home.getParentFile(); } else if (!home.getName().startsWith("jdk")) { File d = home.getParentFile(); long ver = -1L; for (File f : d.listFiles()) { if (f.isDirectory() && f.getName().startsWith("jdk")) { long v = -1L; try { v = (long)Double.parseDouble(f.getName().substring(3).replace(".", "").replace("_", "")); } catch (Exception e) {} if (v > ver) { ver = v; home = f; } } } } javaHome = home.getAbsolutePath(); } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * ReportQuery.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.admanager.axis.v202108; /** * A {@code ReportQuery} object allows you to specify the selection * criteria for * generating a report. Only reports with at least one {@link * Column} are supported. */ public class ReportQuery implements java.io.Serializable { /* The list of break-down types being requested in the report. * The generated report will contain the dimensions in the same order * as requested. * This field is required. */ private com.google.api.ads.admanager.axis.v202108.Dimension[] dimensions; /* The ad unit view for the report. Defaults to {@link AdUnitView#TOP_LEVEL}. */ private com.google.api.ads.admanager.axis.v202108.ReportQueryAdUnitView adUnitView; /* The list of trafficking statistics and revenue information * being requested * in the report. The generated report will contain the * columns in the same * order as requested. This field is required. */ private com.google.api.ads.admanager.axis.v202108.Column[] columns; /* The list of break-down attributes being requested in this report. * Some * {@link DimensionAttribute} values can only be used * with certain * {@link Dimension} values that must be included in * the {@link #dimensions} * attribute. The generated report will contain the attributes * in the same * order as requested. */ private com.google.api.ads.admanager.axis.v202108.DimensionAttribute[] dimensionAttributes; /* The list of {@link CustomField#id} being requested in this * report. * To add a {@link CustomField} to the report, you must * include * its corresponding {@link Dimension}, determined by * the {@link CustomField#entityType}, * as a {@link #dimensions dimension}. * * <table> * <tr> * <th scope="col" colspan="2">{@link CustomFieldEntityType#entityType}</th> * </tr> * <tr> * <td>{@link CustomFieldEntityType#LINE_ITEM}</td><td>{@link * Dimension#LINE_ITEM_NAME}</td> * </tr> * <tr> * <td>{@link CustomFieldEntityType#ORDER}</td><td>{@link * Dimension#ORDER_NAME}</td> * </tr> * <tr> * <td>{@link CustomFieldEntityType#CREATIVE}</td><td>{@link * Dimension#CREATIVE_NAME}</td> * </tr> * </table> */ private long[] customFieldIds; /* The list of content CMS metadata key {@link CmsMetadataKey#id * IDs} * being requested in this report. Each of these IDs * must have been defined in the * {@link CmsMetadataKey CMS metadata key}. This will * include * dimensions in the form of {@code CMS_METADATA_KEY[id]_ID} * and * {@code CMS_METADATA_KEY[id]_VALUE} where where {@code * ID} is the ID of * the {@link CmsMetadataValue#id CMS metadata value} * and {@code VALUE} is the * {@link CmsMetadataValue#valueName name}. * <p> * To add {@link CmsMetadataKey#id IDs}, you must include * {@link Dimension#CMS_METADATA} * in {@link #dimensions}, and specify a non-empty list * of content CMS metadata key IDs. * The order of content CMS metadata columns in the report * correspond to the place of * {@link Dimension#CMS_METADATA} in {@link #dimensions}. * For example, if {@link #dimensions} contains the following dimensions * in the order: * {@link Dimension#ADVERTISER_NAME}, * {@link Dimension#CMS_METADATA} and {@link Dimension#COUNTRY_NAME}, * and * {@link #cmsMetadataKeyIds} contains the following * IDs in the * order: 1001 and 1002. The order of dimensions in the * report will be: * Dimension.ADVERTISER_NAME, * Dimension.CMS_METADATA_KEY[1001]_VALUE, * Dimension.CMS_METADATA_KEY[1002]_VALUE, * Dimension.COUNTRY_NAME, * Dimension.ADVERTISER_ID, * Dimension.CMS_METADATA_KEY[1001]_ID, * Dimension.CMS_METADATA_KEY[1002]_ID, * Dimension.COUNTRY_CRITERIA_ID */ private long[] cmsMetadataKeyIds; /* The list of custom dimension custom targeting key {@link CustomTargetingKey#id * IDs} being * requested in this report. This will include dimensions * in the form of * {@code TOP_LEVEL_DIMENSION_KEY[id]_ID} and {@code * TOP_LEVEL_DIMENSION_KEY[id]_VALUE} where * {@code ID} is the ID of the {@link CustomTargetingValue#id * custom targeting value} and * {@code VALUE} is the {@link CustomTargetingValue#name * name}. * * <p>To add {@link CustomTargetingKey#id IDs}, you must * include * {@link Dimension#CUSTOM_DIMENSION} in {@link #dimensions}, * and specify a non-empty list of * custom targeting key IDs. The order of cusotm dimension * columns in the report correspond to the * place of {@link Dimension#CUSTOM_DIMENSION} in {@link * #dimensions}. For example, if * {@link #dimensions} contains the following dimensions * in the order: * {@link Dimension#ADVERTISER_NAME}, {@link Dimension#CUSTOM_DIMENSION} * and * {@link Dimension#COUNTRY_NAME}, and {@link #customCriteriaCustomTargetingKeyIds} * contains the * following IDs in the order: 1001 and 1002. The order * of dimensions in the report will be: * Dimension.ADVERTISER_NAME, Dimension.TOP_LEVEL_DIMENSION_KEY[1001]_VALUE, * Dimension.TOP_LEVEL_DIMENSION_KEY[1002]_VALUE, Dimension.COUNTRY_NAME, * Dimension.ADVERTISER_ID, * Dimension.TOP_LEVEL_DIMENSION_KEY[1001]_ID, Dimension.TOP_LEVEL_DIMENSION_KEY[1002]_ID, * Dimension.COUNTRY_CRITERIA_ID. */ private long[] customDimensionKeyIds; /* The start date from which the reporting information is gathered. * The * {@code ReportQuery#dateRangeType} field must be set * to * {@link DateRangeType#CUSTOM_DATE} in order to use * this. */ private com.google.api.ads.admanager.axis.v202108.Date startDate; /* The end date upto which the reporting information is gathered. * The * {@code ReportQuery#dateRangeType} field must be set * to * {@link DateRangeType#CUSTOM_DATE} in order to use * this. */ private com.google.api.ads.admanager.axis.v202108.Date endDate; /* The period of time for which the reporting data is being generated. * In * order to define custom time periods, set this to * {@link DateRangeType#CUSTOM_DATE}. If set to {@link * DateRangeType#CUSTOM_DATE}, then * {@link ReportQuery#startDate} and {@link ReportQuery#endDate} * will be used. */ private com.google.api.ads.admanager.axis.v202108.DateRangeType dateRangeType; /* Specifies a filter to use for reporting on data. This filter * will be used * in conjunction (joined with an AND statement) with * the date range selected * through {@link #dateRangeType}, {@link #startDate}, * and {@link #endDate}. * * The syntax currently allowed for {@link Statement#query} * is<br> * <code> [WHERE <condition> {AND <condition> ...}]<code><br> * <p><condition></code><br> * &nbsp;&nbsp;&nbsp;&nbsp; <code>:= <property> = <value></code><br> * <code><condition></code><br> * &nbsp;&nbsp;&nbsp;&nbsp; <code>:= <property> = <bind * variable></code><br> * <code><condition> := <property> IN <list></code><br> * <code><bind variable> := :<name></code><br> * </code> where property is the enumeration name of * a {@link Dimension} that * can be filtered. * <p> * For example, the statement "WHERE LINE_ITEM_ID IN * (34344, 23235)" can be used * to generate a report for a specific set of line items * <p> * Filtering on IDs is highly recommended over filtering * on names, especially * for geographical entities. When filtering on names, * matching is case * sensitive. */ private com.google.api.ads.admanager.axis.v202108.Statement statement; /* The currency for Ad Exchange revenue metrics. This field is * only valid for Ad Exchange metrics, * and an exception will be thrown if this field is used * with non-Ad Exchange metrics. Defaults to * the network currency if left {@code null}. The supported * currency codes can be found in * <a href="https://support.google.com/adxseller/answer/6019533"> * this Help Center article.</a> */ private java.lang.String adxReportCurrency; /* Gets the {@link TimeZoneType} for this report, which determines * the time zone used for the * report's date range. Defaults to {@link TimeZoneType.PUBLISHER}. */ private com.google.api.ads.admanager.axis.v202108.TimeZoneType timeZoneType; public ReportQuery() { } public ReportQuery( com.google.api.ads.admanager.axis.v202108.Dimension[] dimensions, com.google.api.ads.admanager.axis.v202108.ReportQueryAdUnitView adUnitView, com.google.api.ads.admanager.axis.v202108.Column[] columns, com.google.api.ads.admanager.axis.v202108.DimensionAttribute[] dimensionAttributes, long[] customFieldIds, long[] cmsMetadataKeyIds, long[] customDimensionKeyIds, com.google.api.ads.admanager.axis.v202108.Date startDate, com.google.api.ads.admanager.axis.v202108.Date endDate, com.google.api.ads.admanager.axis.v202108.DateRangeType dateRangeType, com.google.api.ads.admanager.axis.v202108.Statement statement, java.lang.String adxReportCurrency, com.google.api.ads.admanager.axis.v202108.TimeZoneType timeZoneType) { this.dimensions = dimensions; this.adUnitView = adUnitView; this.columns = columns; this.dimensionAttributes = dimensionAttributes; this.customFieldIds = customFieldIds; this.cmsMetadataKeyIds = cmsMetadataKeyIds; this.customDimensionKeyIds = customDimensionKeyIds; this.startDate = startDate; this.endDate = endDate; this.dateRangeType = dateRangeType; this.statement = statement; this.adxReportCurrency = adxReportCurrency; this.timeZoneType = timeZoneType; } @Override public String toString() { return com.google.common.base.MoreObjects.toStringHelper(this.getClass()) .omitNullValues() .add("adUnitView", getAdUnitView()) .add("adxReportCurrency", getAdxReportCurrency()) .add("cmsMetadataKeyIds", getCmsMetadataKeyIds()) .add("columns", getColumns()) .add("customDimensionKeyIds", getCustomDimensionKeyIds()) .add("customFieldIds", getCustomFieldIds()) .add("dateRangeType", getDateRangeType()) .add("dimensionAttributes", getDimensionAttributes()) .add("dimensions", getDimensions()) .add("endDate", getEndDate()) .add("startDate", getStartDate()) .add("statement", getStatement()) .add("timeZoneType", getTimeZoneType()) .toString(); } /** * Gets the dimensions value for this ReportQuery. * * @return dimensions * The list of break-down types being requested in the report. * The generated report will contain the dimensions in the same order * as requested. * This field is required. */ public com.google.api.ads.admanager.axis.v202108.Dimension[] getDimensions() { return dimensions; } /** * Sets the dimensions value for this ReportQuery. * * @param dimensions * The list of break-down types being requested in the report. * The generated report will contain the dimensions in the same order * as requested. * This field is required. */ public void setDimensions(com.google.api.ads.admanager.axis.v202108.Dimension[] dimensions) { this.dimensions = dimensions; } public com.google.api.ads.admanager.axis.v202108.Dimension getDimensions(int i) { return this.dimensions[i]; } public void setDimensions(int i, com.google.api.ads.admanager.axis.v202108.Dimension _value) { this.dimensions[i] = _value; } /** * Gets the adUnitView value for this ReportQuery. * * @return adUnitView * The ad unit view for the report. Defaults to {@link AdUnitView#TOP_LEVEL}. */ public com.google.api.ads.admanager.axis.v202108.ReportQueryAdUnitView getAdUnitView() { return adUnitView; } /** * Sets the adUnitView value for this ReportQuery. * * @param adUnitView * The ad unit view for the report. Defaults to {@link AdUnitView#TOP_LEVEL}. */ public void setAdUnitView(com.google.api.ads.admanager.axis.v202108.ReportQueryAdUnitView adUnitView) { this.adUnitView = adUnitView; } /** * Gets the columns value for this ReportQuery. * * @return columns * The list of trafficking statistics and revenue information * being requested * in the report. The generated report will contain the * columns in the same * order as requested. This field is required. */ public com.google.api.ads.admanager.axis.v202108.Column[] getColumns() { return columns; } /** * Sets the columns value for this ReportQuery. * * @param columns * The list of trafficking statistics and revenue information * being requested * in the report. The generated report will contain the * columns in the same * order as requested. This field is required. */ public void setColumns(com.google.api.ads.admanager.axis.v202108.Column[] columns) { this.columns = columns; } public com.google.api.ads.admanager.axis.v202108.Column getColumns(int i) { return this.columns[i]; } public void setColumns(int i, com.google.api.ads.admanager.axis.v202108.Column _value) { this.columns[i] = _value; } /** * Gets the dimensionAttributes value for this ReportQuery. * * @return dimensionAttributes * The list of break-down attributes being requested in this report. * Some * {@link DimensionAttribute} values can only be used * with certain * {@link Dimension} values that must be included in * the {@link #dimensions} * attribute. The generated report will contain the attributes * in the same * order as requested. */ public com.google.api.ads.admanager.axis.v202108.DimensionAttribute[] getDimensionAttributes() { return dimensionAttributes; } /** * Sets the dimensionAttributes value for this ReportQuery. * * @param dimensionAttributes * The list of break-down attributes being requested in this report. * Some * {@link DimensionAttribute} values can only be used * with certain * {@link Dimension} values that must be included in * the {@link #dimensions} * attribute. The generated report will contain the attributes * in the same * order as requested. */ public void setDimensionAttributes(com.google.api.ads.admanager.axis.v202108.DimensionAttribute[] dimensionAttributes) { this.dimensionAttributes = dimensionAttributes; } public com.google.api.ads.admanager.axis.v202108.DimensionAttribute getDimensionAttributes(int i) { return this.dimensionAttributes[i]; } public void setDimensionAttributes(int i, com.google.api.ads.admanager.axis.v202108.DimensionAttribute _value) { this.dimensionAttributes[i] = _value; } /** * Gets the customFieldIds value for this ReportQuery. * * @return customFieldIds * The list of {@link CustomField#id} being requested in this * report. * To add a {@link CustomField} to the report, you must * include * its corresponding {@link Dimension}, determined by * the {@link CustomField#entityType}, * as a {@link #dimensions dimension}. * * <table> * <tr> * <th scope="col" colspan="2">{@link CustomFieldEntityType#entityType}</th> * </tr> * <tr> * <td>{@link CustomFieldEntityType#LINE_ITEM}</td><td>{@link * Dimension#LINE_ITEM_NAME}</td> * </tr> * <tr> * <td>{@link CustomFieldEntityType#ORDER}</td><td>{@link * Dimension#ORDER_NAME}</td> * </tr> * <tr> * <td>{@link CustomFieldEntityType#CREATIVE}</td><td>{@link * Dimension#CREATIVE_NAME}</td> * </tr> * </table> */ public long[] getCustomFieldIds() { return customFieldIds; } /** * Sets the customFieldIds value for this ReportQuery. * * @param customFieldIds * The list of {@link CustomField#id} being requested in this * report. * To add a {@link CustomField} to the report, you must * include * its corresponding {@link Dimension}, determined by * the {@link CustomField#entityType}, * as a {@link #dimensions dimension}. * * <table> * <tr> * <th scope="col" colspan="2">{@link CustomFieldEntityType#entityType}</th> * </tr> * <tr> * <td>{@link CustomFieldEntityType#LINE_ITEM}</td><td>{@link * Dimension#LINE_ITEM_NAME}</td> * </tr> * <tr> * <td>{@link CustomFieldEntityType#ORDER}</td><td>{@link * Dimension#ORDER_NAME}</td> * </tr> * <tr> * <td>{@link CustomFieldEntityType#CREATIVE}</td><td>{@link * Dimension#CREATIVE_NAME}</td> * </tr> * </table> */ public void setCustomFieldIds(long[] customFieldIds) { this.customFieldIds = customFieldIds; } public long getCustomFieldIds(int i) { return this.customFieldIds[i]; } public void setCustomFieldIds(int i, long _value) { this.customFieldIds[i] = _value; } /** * Gets the cmsMetadataKeyIds value for this ReportQuery. * * @return cmsMetadataKeyIds * The list of content CMS metadata key {@link CmsMetadataKey#id * IDs} * being requested in this report. Each of these IDs * must have been defined in the * {@link CmsMetadataKey CMS metadata key}. This will * include * dimensions in the form of {@code CMS_METADATA_KEY[id]_ID} * and * {@code CMS_METADATA_KEY[id]_VALUE} where where {@code * ID} is the ID of * the {@link CmsMetadataValue#id CMS metadata value} * and {@code VALUE} is the * {@link CmsMetadataValue#valueName name}. * <p> * To add {@link CmsMetadataKey#id IDs}, you must include * {@link Dimension#CMS_METADATA} * in {@link #dimensions}, and specify a non-empty list * of content CMS metadata key IDs. * The order of content CMS metadata columns in the report * correspond to the place of * {@link Dimension#CMS_METADATA} in {@link #dimensions}. * For example, if {@link #dimensions} contains the following dimensions * in the order: * {@link Dimension#ADVERTISER_NAME}, * {@link Dimension#CMS_METADATA} and {@link Dimension#COUNTRY_NAME}, * and * {@link #cmsMetadataKeyIds} contains the following * IDs in the * order: 1001 and 1002. The order of dimensions in the * report will be: * Dimension.ADVERTISER_NAME, * Dimension.CMS_METADATA_KEY[1001]_VALUE, * Dimension.CMS_METADATA_KEY[1002]_VALUE, * Dimension.COUNTRY_NAME, * Dimension.ADVERTISER_ID, * Dimension.CMS_METADATA_KEY[1001]_ID, * Dimension.CMS_METADATA_KEY[1002]_ID, * Dimension.COUNTRY_CRITERIA_ID */ public long[] getCmsMetadataKeyIds() { return cmsMetadataKeyIds; } /** * Sets the cmsMetadataKeyIds value for this ReportQuery. * * @param cmsMetadataKeyIds * The list of content CMS metadata key {@link CmsMetadataKey#id * IDs} * being requested in this report. Each of these IDs * must have been defined in the * {@link CmsMetadataKey CMS metadata key}. This will * include * dimensions in the form of {@code CMS_METADATA_KEY[id]_ID} * and * {@code CMS_METADATA_KEY[id]_VALUE} where where {@code * ID} is the ID of * the {@link CmsMetadataValue#id CMS metadata value} * and {@code VALUE} is the * {@link CmsMetadataValue#valueName name}. * <p> * To add {@link CmsMetadataKey#id IDs}, you must include * {@link Dimension#CMS_METADATA} * in {@link #dimensions}, and specify a non-empty list * of content CMS metadata key IDs. * The order of content CMS metadata columns in the report * correspond to the place of * {@link Dimension#CMS_METADATA} in {@link #dimensions}. * For example, if {@link #dimensions} contains the following dimensions * in the order: * {@link Dimension#ADVERTISER_NAME}, * {@link Dimension#CMS_METADATA} and {@link Dimension#COUNTRY_NAME}, * and * {@link #cmsMetadataKeyIds} contains the following * IDs in the * order: 1001 and 1002. The order of dimensions in the * report will be: * Dimension.ADVERTISER_NAME, * Dimension.CMS_METADATA_KEY[1001]_VALUE, * Dimension.CMS_METADATA_KEY[1002]_VALUE, * Dimension.COUNTRY_NAME, * Dimension.ADVERTISER_ID, * Dimension.CMS_METADATA_KEY[1001]_ID, * Dimension.CMS_METADATA_KEY[1002]_ID, * Dimension.COUNTRY_CRITERIA_ID */ public void setCmsMetadataKeyIds(long[] cmsMetadataKeyIds) { this.cmsMetadataKeyIds = cmsMetadataKeyIds; } public long getCmsMetadataKeyIds(int i) { return this.cmsMetadataKeyIds[i]; } public void setCmsMetadataKeyIds(int i, long _value) { this.cmsMetadataKeyIds[i] = _value; } /** * Gets the customDimensionKeyIds value for this ReportQuery. * * @return customDimensionKeyIds * The list of custom dimension custom targeting key {@link CustomTargetingKey#id * IDs} being * requested in this report. This will include dimensions * in the form of * {@code TOP_LEVEL_DIMENSION_KEY[id]_ID} and {@code * TOP_LEVEL_DIMENSION_KEY[id]_VALUE} where * {@code ID} is the ID of the {@link CustomTargetingValue#id * custom targeting value} and * {@code VALUE} is the {@link CustomTargetingValue#name * name}. * * <p>To add {@link CustomTargetingKey#id IDs}, you must * include * {@link Dimension#CUSTOM_DIMENSION} in {@link #dimensions}, * and specify a non-empty list of * custom targeting key IDs. The order of cusotm dimension * columns in the report correspond to the * place of {@link Dimension#CUSTOM_DIMENSION} in {@link * #dimensions}. For example, if * {@link #dimensions} contains the following dimensions * in the order: * {@link Dimension#ADVERTISER_NAME}, {@link Dimension#CUSTOM_DIMENSION} * and * {@link Dimension#COUNTRY_NAME}, and {@link #customCriteriaCustomTargetingKeyIds} * contains the * following IDs in the order: 1001 and 1002. The order * of dimensions in the report will be: * Dimension.ADVERTISER_NAME, Dimension.TOP_LEVEL_DIMENSION_KEY[1001]_VALUE, * Dimension.TOP_LEVEL_DIMENSION_KEY[1002]_VALUE, Dimension.COUNTRY_NAME, * Dimension.ADVERTISER_ID, * Dimension.TOP_LEVEL_DIMENSION_KEY[1001]_ID, Dimension.TOP_LEVEL_DIMENSION_KEY[1002]_ID, * Dimension.COUNTRY_CRITERIA_ID. */ public long[] getCustomDimensionKeyIds() { return customDimensionKeyIds; } /** * Sets the customDimensionKeyIds value for this ReportQuery. * * @param customDimensionKeyIds * The list of custom dimension custom targeting key {@link CustomTargetingKey#id * IDs} being * requested in this report. This will include dimensions * in the form of * {@code TOP_LEVEL_DIMENSION_KEY[id]_ID} and {@code * TOP_LEVEL_DIMENSION_KEY[id]_VALUE} where * {@code ID} is the ID of the {@link CustomTargetingValue#id * custom targeting value} and * {@code VALUE} is the {@link CustomTargetingValue#name * name}. * * <p>To add {@link CustomTargetingKey#id IDs}, you must * include * {@link Dimension#CUSTOM_DIMENSION} in {@link #dimensions}, * and specify a non-empty list of * custom targeting key IDs. The order of cusotm dimension * columns in the report correspond to the * place of {@link Dimension#CUSTOM_DIMENSION} in {@link * #dimensions}. For example, if * {@link #dimensions} contains the following dimensions * in the order: * {@link Dimension#ADVERTISER_NAME}, {@link Dimension#CUSTOM_DIMENSION} * and * {@link Dimension#COUNTRY_NAME}, and {@link #customCriteriaCustomTargetingKeyIds} * contains the * following IDs in the order: 1001 and 1002. The order * of dimensions in the report will be: * Dimension.ADVERTISER_NAME, Dimension.TOP_LEVEL_DIMENSION_KEY[1001]_VALUE, * Dimension.TOP_LEVEL_DIMENSION_KEY[1002]_VALUE, Dimension.COUNTRY_NAME, * Dimension.ADVERTISER_ID, * Dimension.TOP_LEVEL_DIMENSION_KEY[1001]_ID, Dimension.TOP_LEVEL_DIMENSION_KEY[1002]_ID, * Dimension.COUNTRY_CRITERIA_ID. */ public void setCustomDimensionKeyIds(long[] customDimensionKeyIds) { this.customDimensionKeyIds = customDimensionKeyIds; } public long getCustomDimensionKeyIds(int i) { return this.customDimensionKeyIds[i]; } public void setCustomDimensionKeyIds(int i, long _value) { this.customDimensionKeyIds[i] = _value; } /** * Gets the startDate value for this ReportQuery. * * @return startDate * The start date from which the reporting information is gathered. * The * {@code ReportQuery#dateRangeType} field must be set * to * {@link DateRangeType#CUSTOM_DATE} in order to use * this. */ public com.google.api.ads.admanager.axis.v202108.Date getStartDate() { return startDate; } /** * Sets the startDate value for this ReportQuery. * * @param startDate * The start date from which the reporting information is gathered. * The * {@code ReportQuery#dateRangeType} field must be set * to * {@link DateRangeType#CUSTOM_DATE} in order to use * this. */ public void setStartDate(com.google.api.ads.admanager.axis.v202108.Date startDate) { this.startDate = startDate; } /** * Gets the endDate value for this ReportQuery. * * @return endDate * The end date upto which the reporting information is gathered. * The * {@code ReportQuery#dateRangeType} field must be set * to * {@link DateRangeType#CUSTOM_DATE} in order to use * this. */ public com.google.api.ads.admanager.axis.v202108.Date getEndDate() { return endDate; } /** * Sets the endDate value for this ReportQuery. * * @param endDate * The end date upto which the reporting information is gathered. * The * {@code ReportQuery#dateRangeType} field must be set * to * {@link DateRangeType#CUSTOM_DATE} in order to use * this. */ public void setEndDate(com.google.api.ads.admanager.axis.v202108.Date endDate) { this.endDate = endDate; } /** * Gets the dateRangeType value for this ReportQuery. * * @return dateRangeType * The period of time for which the reporting data is being generated. * In * order to define custom time periods, set this to * {@link DateRangeType#CUSTOM_DATE}. If set to {@link * DateRangeType#CUSTOM_DATE}, then * {@link ReportQuery#startDate} and {@link ReportQuery#endDate} * will be used. */ public com.google.api.ads.admanager.axis.v202108.DateRangeType getDateRangeType() { return dateRangeType; } /** * Sets the dateRangeType value for this ReportQuery. * * @param dateRangeType * The period of time for which the reporting data is being generated. * In * order to define custom time periods, set this to * {@link DateRangeType#CUSTOM_DATE}. If set to {@link * DateRangeType#CUSTOM_DATE}, then * {@link ReportQuery#startDate} and {@link ReportQuery#endDate} * will be used. */ public void setDateRangeType(com.google.api.ads.admanager.axis.v202108.DateRangeType dateRangeType) { this.dateRangeType = dateRangeType; } /** * Gets the statement value for this ReportQuery. * * @return statement * Specifies a filter to use for reporting on data. This filter * will be used * in conjunction (joined with an AND statement) with * the date range selected * through {@link #dateRangeType}, {@link #startDate}, * and {@link #endDate}. * * The syntax currently allowed for {@link Statement#query} * is<br> * <code> [WHERE <condition> {AND <condition> ...}]<code><br> * <p><condition></code><br> * &nbsp;&nbsp;&nbsp;&nbsp; <code>:= <property> = <value></code><br> * <code><condition></code><br> * &nbsp;&nbsp;&nbsp;&nbsp; <code>:= <property> = <bind * variable></code><br> * <code><condition> := <property> IN <list></code><br> * <code><bind variable> := :<name></code><br> * </code> where property is the enumeration name of * a {@link Dimension} that * can be filtered. * <p> * For example, the statement "WHERE LINE_ITEM_ID IN * (34344, 23235)" can be used * to generate a report for a specific set of line items * <p> * Filtering on IDs is highly recommended over filtering * on names, especially * for geographical entities. When filtering on names, * matching is case * sensitive. */ public com.google.api.ads.admanager.axis.v202108.Statement getStatement() { return statement; } /** * Sets the statement value for this ReportQuery. * * @param statement * Specifies a filter to use for reporting on data. This filter * will be used * in conjunction (joined with an AND statement) with * the date range selected * through {@link #dateRangeType}, {@link #startDate}, * and {@link #endDate}. * * The syntax currently allowed for {@link Statement#query} * is<br> * <code> [WHERE <condition> {AND <condition> ...}]<code><br> * <p><condition></code><br> * &nbsp;&nbsp;&nbsp;&nbsp; <code>:= <property> = <value></code><br> * <code><condition></code><br> * &nbsp;&nbsp;&nbsp;&nbsp; <code>:= <property> = <bind * variable></code><br> * <code><condition> := <property> IN <list></code><br> * <code><bind variable> := :<name></code><br> * </code> where property is the enumeration name of * a {@link Dimension} that * can be filtered. * <p> * For example, the statement "WHERE LINE_ITEM_ID IN * (34344, 23235)" can be used * to generate a report for a specific set of line items * <p> * Filtering on IDs is highly recommended over filtering * on names, especially * for geographical entities. When filtering on names, * matching is case * sensitive. */ public void setStatement(com.google.api.ads.admanager.axis.v202108.Statement statement) { this.statement = statement; } /** * Gets the adxReportCurrency value for this ReportQuery. * * @return adxReportCurrency * The currency for Ad Exchange revenue metrics. This field is * only valid for Ad Exchange metrics, * and an exception will be thrown if this field is used * with non-Ad Exchange metrics. Defaults to * the network currency if left {@code null}. The supported * currency codes can be found in * <a href="https://support.google.com/adxseller/answer/6019533"> * this Help Center article.</a> */ public java.lang.String getAdxReportCurrency() { return adxReportCurrency; } /** * Sets the adxReportCurrency value for this ReportQuery. * * @param adxReportCurrency * The currency for Ad Exchange revenue metrics. This field is * only valid for Ad Exchange metrics, * and an exception will be thrown if this field is used * with non-Ad Exchange metrics. Defaults to * the network currency if left {@code null}. The supported * currency codes can be found in * <a href="https://support.google.com/adxseller/answer/6019533"> * this Help Center article.</a> */ public void setAdxReportCurrency(java.lang.String adxReportCurrency) { this.adxReportCurrency = adxReportCurrency; } /** * Gets the timeZoneType value for this ReportQuery. * * @return timeZoneType * Gets the {@link TimeZoneType} for this report, which determines * the time zone used for the * report's date range. Defaults to {@link TimeZoneType.PUBLISHER}. */ public com.google.api.ads.admanager.axis.v202108.TimeZoneType getTimeZoneType() { return timeZoneType; } /** * Sets the timeZoneType value for this ReportQuery. * * @param timeZoneType * Gets the {@link TimeZoneType} for this report, which determines * the time zone used for the * report's date range. Defaults to {@link TimeZoneType.PUBLISHER}. */ public void setTimeZoneType(com.google.api.ads.admanager.axis.v202108.TimeZoneType timeZoneType) { this.timeZoneType = timeZoneType; } private java.lang.Object __equalsCalc = null; public synchronized boolean equals(java.lang.Object obj) { if (!(obj instanceof ReportQuery)) return false; ReportQuery other = (ReportQuery) obj; if (obj == null) return false; if (this == obj) return true; if (__equalsCalc != null) { return (__equalsCalc == obj); } __equalsCalc = obj; boolean _equals; _equals = true && ((this.dimensions==null && other.getDimensions()==null) || (this.dimensions!=null && java.util.Arrays.equals(this.dimensions, other.getDimensions()))) && ((this.adUnitView==null && other.getAdUnitView()==null) || (this.adUnitView!=null && this.adUnitView.equals(other.getAdUnitView()))) && ((this.columns==null && other.getColumns()==null) || (this.columns!=null && java.util.Arrays.equals(this.columns, other.getColumns()))) && ((this.dimensionAttributes==null && other.getDimensionAttributes()==null) || (this.dimensionAttributes!=null && java.util.Arrays.equals(this.dimensionAttributes, other.getDimensionAttributes()))) && ((this.customFieldIds==null && other.getCustomFieldIds()==null) || (this.customFieldIds!=null && java.util.Arrays.equals(this.customFieldIds, other.getCustomFieldIds()))) && ((this.cmsMetadataKeyIds==null && other.getCmsMetadataKeyIds()==null) || (this.cmsMetadataKeyIds!=null && java.util.Arrays.equals(this.cmsMetadataKeyIds, other.getCmsMetadataKeyIds()))) && ((this.customDimensionKeyIds==null && other.getCustomDimensionKeyIds()==null) || (this.customDimensionKeyIds!=null && java.util.Arrays.equals(this.customDimensionKeyIds, other.getCustomDimensionKeyIds()))) && ((this.startDate==null && other.getStartDate()==null) || (this.startDate!=null && this.startDate.equals(other.getStartDate()))) && ((this.endDate==null && other.getEndDate()==null) || (this.endDate!=null && this.endDate.equals(other.getEndDate()))) && ((this.dateRangeType==null && other.getDateRangeType()==null) || (this.dateRangeType!=null && this.dateRangeType.equals(other.getDateRangeType()))) && ((this.statement==null && other.getStatement()==null) || (this.statement!=null && this.statement.equals(other.getStatement()))) && ((this.adxReportCurrency==null && other.getAdxReportCurrency()==null) || (this.adxReportCurrency!=null && this.adxReportCurrency.equals(other.getAdxReportCurrency()))) && ((this.timeZoneType==null && other.getTimeZoneType()==null) || (this.timeZoneType!=null && this.timeZoneType.equals(other.getTimeZoneType()))); __equalsCalc = null; return _equals; } private boolean __hashCodeCalc = false; public synchronized int hashCode() { if (__hashCodeCalc) { return 0; } __hashCodeCalc = true; int _hashCode = 1; if (getDimensions() != null) { for (int i=0; i<java.lang.reflect.Array.getLength(getDimensions()); i++) { java.lang.Object obj = java.lang.reflect.Array.get(getDimensions(), i); if (obj != null && !obj.getClass().isArray()) { _hashCode += obj.hashCode(); } } } if (getAdUnitView() != null) { _hashCode += getAdUnitView().hashCode(); } if (getColumns() != null) { for (int i=0; i<java.lang.reflect.Array.getLength(getColumns()); i++) { java.lang.Object obj = java.lang.reflect.Array.get(getColumns(), i); if (obj != null && !obj.getClass().isArray()) { _hashCode += obj.hashCode(); } } } if (getDimensionAttributes() != null) { for (int i=0; i<java.lang.reflect.Array.getLength(getDimensionAttributes()); i++) { java.lang.Object obj = java.lang.reflect.Array.get(getDimensionAttributes(), i); if (obj != null && !obj.getClass().isArray()) { _hashCode += obj.hashCode(); } } } if (getCustomFieldIds() != null) { for (int i=0; i<java.lang.reflect.Array.getLength(getCustomFieldIds()); i++) { java.lang.Object obj = java.lang.reflect.Array.get(getCustomFieldIds(), i); if (obj != null && !obj.getClass().isArray()) { _hashCode += obj.hashCode(); } } } if (getCmsMetadataKeyIds() != null) { for (int i=0; i<java.lang.reflect.Array.getLength(getCmsMetadataKeyIds()); i++) { java.lang.Object obj = java.lang.reflect.Array.get(getCmsMetadataKeyIds(), i); if (obj != null && !obj.getClass().isArray()) { _hashCode += obj.hashCode(); } } } if (getCustomDimensionKeyIds() != null) { for (int i=0; i<java.lang.reflect.Array.getLength(getCustomDimensionKeyIds()); i++) { java.lang.Object obj = java.lang.reflect.Array.get(getCustomDimensionKeyIds(), i); if (obj != null && !obj.getClass().isArray()) { _hashCode += obj.hashCode(); } } } if (getStartDate() != null) { _hashCode += getStartDate().hashCode(); } if (getEndDate() != null) { _hashCode += getEndDate().hashCode(); } if (getDateRangeType() != null) { _hashCode += getDateRangeType().hashCode(); } if (getStatement() != null) { _hashCode += getStatement().hashCode(); } if (getAdxReportCurrency() != null) { _hashCode += getAdxReportCurrency().hashCode(); } if (getTimeZoneType() != null) { _hashCode += getTimeZoneType().hashCode(); } __hashCodeCalc = false; return _hashCode; } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(ReportQuery.class, true); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "ReportQuery")); org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("dimensions"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "dimensions")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "Dimension")); elemField.setMinOccurs(0); elemField.setNillable(false); elemField.setMaxOccursUnbounded(true); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("adUnitView"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "adUnitView")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "ReportQuery.AdUnitView")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("columns"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "columns")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "Column")); elemField.setMinOccurs(0); elemField.setNillable(false); elemField.setMaxOccursUnbounded(true); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("dimensionAttributes"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "dimensionAttributes")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "DimensionAttribute")); elemField.setMinOccurs(0); elemField.setNillable(false); elemField.setMaxOccursUnbounded(true); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("customFieldIds"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "customFieldIds")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long")); elemField.setMinOccurs(0); elemField.setNillable(false); elemField.setMaxOccursUnbounded(true); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("cmsMetadataKeyIds"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "cmsMetadataKeyIds")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long")); elemField.setMinOccurs(0); elemField.setNillable(false); elemField.setMaxOccursUnbounded(true); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("customDimensionKeyIds"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "customDimensionKeyIds")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long")); elemField.setMinOccurs(0); elemField.setNillable(false); elemField.setMaxOccursUnbounded(true); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("startDate"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "startDate")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "Date")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("endDate"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "endDate")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "Date")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("dateRangeType"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "dateRangeType")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "DateRangeType")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("statement"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "statement")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "Statement")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("adxReportCurrency"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "adxReportCurrency")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "string")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("timeZoneType"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "timeZoneType")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202108", "TimeZoneType")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } /** * Get Custom Serializer */ public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanSerializer( _javaType, _xmlType, typeDesc); } /** * Get Custom Deserializer */ public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanDeserializer( _javaType, _xmlType, typeDesc); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jasper.compiler; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import javax.servlet.jsp.tagext.FunctionInfo; import org.apache.jasper.JasperException; /** * This class defines internal representation for an EL Expression * * It currently only defines functions. It can be expanded to define * all the components of an EL expression, if need to. * * @author Kin-man Chung */ abstract class ELNode { public abstract void accept(Visitor v) throws JasperException; /** * Child classes */ /** * Represents an EL expression: anything in ${ and }. */ public static class Root extends ELNode { private ELNode.Nodes expr; private char type; Root(ELNode.Nodes expr, char type) { this.expr = expr; this.type = type; } @Override public void accept(Visitor v) throws JasperException { v.visit(this); } public ELNode.Nodes getExpression() { return expr; } public char getType() { return type; } } /** * Represents text outside of EL expression. */ public static class Text extends ELNode { private String text; Text(String text) { this.text = text; } @Override public void accept(Visitor v) throws JasperException { v.visit(this); } public String getText() { return text; } } /** * Represents anything in EL expression, other than functions, including * function arguments etc */ public static class ELText extends ELNode { private String text; ELText(String text) { this.text = text; } @Override public void accept(Visitor v) throws JasperException { v.visit(this); } public String getText() { return text; } } /** * Represents a function * Currently only include the prefix and function name, but not its * arguments. */ public static class Function extends ELNode { private String prefix; private String name; private String uri; private FunctionInfo functionInfo; private String methodName; private String[] parameters; Function(String prefix, String name) { this.prefix = prefix; this.name = name; } @Override public void accept(Visitor v) throws JasperException { v.visit(this); } public String getPrefix() { return prefix; } public String getName() { return name; } public void setUri(String uri) { this.uri = uri; } public String getUri() { return uri; } public void setFunctionInfo(FunctionInfo f) { this.functionInfo = f; } public FunctionInfo getFunctionInfo() { return functionInfo; } public void setMethodName(String methodName) { this.methodName = methodName; } public String getMethodName() { return methodName; } public void setParameters(String[] parameters) { this.parameters = parameters; } public String[] getParameters() { return parameters; } } /** * An ordered list of ELNode. */ public static class Nodes { /* Name used for creating a map for the functions in this EL expression, for communication to Generator. */ String mapName = null; // The function map associated this EL private List<ELNode> list; public Nodes() { list = new ArrayList<ELNode>(); } public void add(ELNode en) { list.add(en); } /** * Visit the nodes in the list with the supplied visitor * @param v The visitor used */ public void visit(Visitor v) throws JasperException { Iterator<ELNode> iter = list.iterator(); while (iter.hasNext()) { ELNode n = iter.next(); n.accept(v); } } public Iterator<ELNode> iterator() { return list.iterator(); } public boolean isEmpty() { return list.size() == 0; } /** * @return true if the expression contains a ${...} */ public boolean containsEL() { Iterator<ELNode> iter = list.iterator(); while (iter.hasNext()) { ELNode n = iter.next(); if (n instanceof Root) { return true; } } return false; } public void setMapName(String name) { this.mapName = name; } public String getMapName() { return mapName; } } /* * A visitor class for traversing ELNodes */ public static class Visitor { public void visit(Root n) throws JasperException { n.getExpression().visit(this); } @SuppressWarnings("unused") public void visit(Function n) throws JasperException { // NOOP by default } @SuppressWarnings("unused") public void visit(Text n) throws JasperException { // NOOP by default } @SuppressWarnings("unused") public void visit(ELText n) throws JasperException { // NOOP by default } } }
/******************************************************************************* * * Pentaho Data Integration * * Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.monetdbbulkloader; import java.util.List; import java.util.Map; import org.pentaho.di.core.CheckResult; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.Const; import org.pentaho.di.core.Counter; import org.pentaho.di.core.SQLStatement; import org.pentaho.di.core.database.Database; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.database.MonetDBDatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.Repository; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.shared.SharedObjectInterface; import org.pentaho.di.trans.DatabaseImpact; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.di.core.ProvidesDatabaseConnectionInformation; import org.w3c.dom.Node; /** * Created on 20-feb-2007 * * @author Sven Boden */ public class MonetDBBulkLoaderMeta extends BaseStepMeta implements StepMetaInterface, ProvidesDatabaseConnectionInformation { private static Class<?> PKG = MonetDBBulkLoaderMeta.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$ /** The database connection name **/ private String dbConnectionName; /** what's the schema for the target? */ private String schemaName; /** what's the table for the target? */ private String tableName; /** Path to the mclient utility */ private String mClientPath; /** Path to the log file */ private String logFile; /** database connection */ private DatabaseMeta databaseMeta; /** Field name of the target table */ private String fieldTable[]; /** Field name in the stream */ private String fieldStream[]; /** flag to indicate that the format is OK for MonetDB*/ private boolean fieldFormatOk[]; /** Encoding to use */ private String encoding; /** Truncate table? */ private boolean truncate = false; /** Auto adjust the table structure? */ private boolean autoSchema = false; /** Auto adjust strings that are too long? */ private boolean autoStringWidths = false; public boolean isAutoStringWidths() { return autoStringWidths; } public void setAutoStringWidths(boolean autoStringWidths) { this.autoStringWidths = autoStringWidths; } public boolean isTruncate() { return truncate; } public void setTruncate(boolean truncate) { this.truncate = truncate; } public boolean isAutoSchema() { return autoSchema; } public void setAutoSchema(boolean autoSchema) { this.autoSchema = autoSchema; } /** The number of rows to buffer before passing them over to MonetDB. * This number should be non-zero since we need to specify the number of rows we pass. */ private String bufferSize; public MonetDBBulkLoaderMeta() { super(); } /** * @return Returns the database. */ public DatabaseMeta getDatabaseMeta() { return databaseMeta; } /** * @return Returns the database. */ public DatabaseMeta getDatabaseMeta(MonetDBBulkLoader loader) { return databaseMeta; } /** * @param database The database to set. */ public void setDatabaseMeta(DatabaseMeta database) { this.databaseMeta = database; } /** * @return Returns the tableName. */ public String getTableName() { return tableName; } /** * @param tableName The tableName to set. */ public void setTableName(String tableName) { this.tableName = tableName; } /** * @return Returns the fieldTable. */ public String[] getFieldTable() { return fieldTable; } /** * @param fieldTable The fieldTable to set. */ public void setFieldTable(String[] fieldTable) { this.fieldTable = fieldTable; } /** * @return Returns the fieldStream. */ public String[] getFieldStream() { return fieldStream; } /** * @param fieldStream The fieldStream to set. */ public void setFieldStream(String[] fieldStream) { this.fieldStream = fieldStream; } public void loadXML(Node stepnode, List<DatabaseMeta> databases, Map<String, Counter> counters) throws KettleXMLException { readData(stepnode, databases); } public void allocate(int nrvalues) { fieldTable = new String[nrvalues]; fieldStream = new String[nrvalues]; fieldFormatOk = new boolean[nrvalues]; } public Object clone() { MonetDBBulkLoaderMeta retval = (MonetDBBulkLoaderMeta)super.clone(); int nrvalues = fieldTable.length; retval.allocate(nrvalues); for (int i=0;i<nrvalues;i++) { retval.fieldTable[i] = fieldTable[i]; retval.fieldStream[i] = fieldStream[i]; } return retval; } private void readData(Node stepnode, List<? extends SharedObjectInterface> databases) throws KettleXMLException { try { dbConnectionName = XMLHandler.getTagValue(stepnode, "connection"); //$NON-NLS-1$ databaseMeta = DatabaseMeta.findDatabase(databases, dbConnectionName); bufferSize = XMLHandler.getTagValue(stepnode, "buffer_size"); //$NON-NLS-1$ schemaName = XMLHandler.getTagValue(stepnode, "schema"); //$NON-NLS-1$ tableName = XMLHandler.getTagValue(stepnode, "table"); //$NON-NLS-1$ mClientPath = XMLHandler.getTagValue(stepnode, "mclient_path"); //$NON-NLS-1$ logFile = XMLHandler.getTagValue(stepnode, "log_file"); //$NON-NLS-1$ encoding = XMLHandler.getTagValue(stepnode, "encoding"); //$NON-NLS-1$ truncate = "Y".equals(XMLHandler.getTagValue(stepnode, "truncate")); //$NON-NLS-1$ autoSchema = "Y".equals(XMLHandler.getTagValue(stepnode, "auto_schema")); //$NON-NLS-1$ autoStringWidths = "Y".equals(XMLHandler.getTagValue(stepnode, "auto_string_widths")); //$NON-NLS-1$ int nrvalues = XMLHandler.countNodes(stepnode, "mapping"); //$NON-NLS-1$ allocate(nrvalues); for (int i=0;i<nrvalues;i++) { Node vnode = XMLHandler.getSubNodeByNr(stepnode, "mapping", i); //$NON-NLS-1$ fieldTable[i] = XMLHandler.getTagValue(vnode, "stream_name"); //$NON-NLS-1$ fieldStream[i] = XMLHandler.getTagValue(vnode, "field_name"); //$NON-NLS-1$ if (fieldStream[i]==null) fieldStream[i]=fieldTable[i]; // default: the same name! fieldFormatOk[i] = "Y".equalsIgnoreCase(XMLHandler.getTagValue(vnode, "field_format_ok")); //$NON-NLS-1$ } } catch(Exception e) { throw new KettleXMLException(BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.Exception.UnableToReadStepInfoFromXML"), e); //$NON-NLS-1$ } } public void setDefault() { fieldTable = null; databaseMeta = null; bufferSize = "100000"; schemaName = ""; //$NON-NLS-1$ tableName = BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.DefaultTableName"); //$NON-NLS-1$ mClientPath = "/usr/local/bin/mclient"; //$NON-NLS-1$ logFile = ""; //$NON-NLS-1$ encoding = ""; //$NON-NLS-1$ truncate = false; autoSchema = false; autoStringWidths = false; allocate(0); } public String getXML() { StringBuffer retval = new StringBuffer(300); retval.append(" ").append(XMLHandler.addTagValue("connection", dbConnectionName)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ retval.append(" ").append(XMLHandler.addTagValue("buffer_size", bufferSize)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("schema", schemaName)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("table", tableName)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("mclient_path", mClientPath)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("log_file", logFile)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("encoding", encoding)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("truncate", truncate)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("auto_schema", autoSchema)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("auto_string_widths", autoStringWidths)); //$NON-NLS-1$ //$NON-NLS-2$ for (int i=0;i<fieldTable.length;i++) { retval.append(" <mapping>").append(Const.CR); //$NON-NLS-1$ retval.append(" ").append(XMLHandler.addTagValue("stream_name", fieldTable[i])); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("field_name", fieldStream[i])); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("field_format_ok", fieldFormatOk[i])); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" </mapping>").append(Const.CR); //$NON-NLS-1$ } return retval.toString(); } public void readRep(Repository rep, ObjectId id_step, List<DatabaseMeta> databases, Map<String, Counter> counters) throws KettleException { try { databaseMeta = rep.loadDatabaseMetaFromStepAttribute(id_step, "id_connection", databases); bufferSize = rep.getStepAttributeString(id_step, "buffer_size"); //$NON-NLS-1$ dbConnectionName = rep.getStepAttributeString(id_step, "db_connection_name"); //$NON-NLS-1$ schemaName = rep.getStepAttributeString(id_step, "schema"); //$NON-NLS-1$ tableName = rep.getStepAttributeString(id_step, "table"); //$NON-NLS-1$ mClientPath = rep.getStepAttributeString(id_step, "mclient_path"); //$NON-NLS-1$ logFile = rep.getStepAttributeString(id_step, "log_file"); //$NON-NLS-1$ encoding = rep.getStepAttributeString(id_step, "encoding"); //$NON-NLS-1$ truncate = Boolean.parseBoolean(rep.getStepAttributeString(id_step, "truncate")); //$NON-NLS-1$ autoSchema = Boolean.parseBoolean(rep.getStepAttributeString(id_step, "auto_schema")); //$NON-NLS-1$ autoStringWidths = Boolean.parseBoolean(rep.getStepAttributeString(id_step, "auto_string_widths")); //$NON-NLS-1$ int nrvalues = rep.countNrStepAttributes(id_step, "stream_name"); //$NON-NLS-1$ allocate(nrvalues); for (int i=0;i<nrvalues;i++) { fieldTable[i] = rep.getStepAttributeString(id_step, i, "stream_name"); //$NON-NLS-1$ fieldStream[i] = rep.getStepAttributeString(id_step, i, "field_name"); //$NON-NLS-1$ if (fieldStream[i]==null) fieldStream[i]=fieldTable[i]; fieldFormatOk[i] = rep.getStepAttributeBoolean(id_step, i, "field_format_ok"); //$NON-NLS-1$ } } catch(Exception e) { throw new KettleException(BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.Exception.UnexpectedErrorReadingStepInfoFromRepository"), e); //$NON-NLS-1$ } } public void saveRep(Repository rep, ObjectId id_transformation, ObjectId id_step) throws KettleException { try { rep.saveDatabaseMetaStepAttribute(id_transformation, id_step, "id_connection", databaseMeta); rep.saveStepAttribute(id_transformation, id_step, "buffer_size", bufferSize); //$NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, "db_connection_name", dbConnectionName); //$NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, "schema", schemaName); //$NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, "table", tableName); //$NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, "mclient_path", mClientPath); //$NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, "log_file", logFile); //$NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, "encoding", encoding); //$NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, "truncate", truncate); //$NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, "auto_schema", autoSchema); //$NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, "auto_string_widths", autoStringWidths); //$NON-NLS-1$ for (int i=0;i<fieldTable.length;i++) { rep.saveStepAttribute(id_transformation, id_step, i, "stream_name", fieldTable[i]); //$NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, i, "field_name", fieldStream[i]); //$NON-NLS-1$ rep.saveStepAttribute(id_transformation, id_step, i, "field_format_ok", fieldFormatOk[i]); //$NON-NLS-1$ } // Also, save the step-database relationship! if (databaseMeta!=null) rep.insertStepDatabase(id_transformation, id_step, databaseMeta.getObjectId()); } catch(Exception e) { throw new KettleException(BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.Exception.UnableToSaveStepInfoToRepository")+id_step, e); //$NON-NLS-1$ } } public void getFields(RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space) throws KettleStepException { // Default: nothing changes to rowMeta } public void check(List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String input[], String output[], RowMetaInterface info) { CheckResult cr; String error_message = ""; //$NON-NLS-1$ if (databaseMeta!=null) { Database db = new Database(loggingObject, databaseMeta); db.shareVariablesWith(transMeta); try { db.connect(); if (!Const.isEmpty(tableName)) { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.CheckResult.TableNameOK"), stepMeta); //$NON-NLS-1$ remarks.add(cr); boolean first=true; boolean error_found=false; error_message = ""; //$NON-NLS-1$ // Check fields in table String schemaTable = databaseMeta.getQuotedSchemaTableCombination( transMeta.environmentSubstitute(schemaName), transMeta.environmentSubstitute(tableName)); RowMetaInterface r = db.getTableFields(schemaTable); if (r!=null) { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.CheckResult.TableExists"), stepMeta); //$NON-NLS-1$ remarks.add(cr); // How about the fields to insert/dateMask in the table? first=true; error_found=false; error_message = ""; //$NON-NLS-1$ for (int i=0;i<fieldTable.length;i++) { String field = fieldTable[i]; ValueMetaInterface v = r.searchValueMeta(field); if (v==null) { if (first) { first=false; error_message+=BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.CheckResult.MissingFieldsToLoadInTargetTable")+Const.CR; //$NON-NLS-1$ } error_found=true; error_message+="\t\t"+field+Const.CR; //$NON-NLS-1$ } } if (error_found) { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta); } else { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.CheckResult.AllFieldsFoundInTargetTable"), stepMeta); //$NON-NLS-1$ } remarks.add(cr); } else { error_message=BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.CheckResult.CouldNotReadTableInfo"); //$NON-NLS-1$ cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta); remarks.add(cr); } } // Look up fields in the input stream <prev> if (prev!=null && prev.size()>0) { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.CheckResult.StepReceivingDatas",prev.size()+""), stepMeta); //$NON-NLS-1$ //$NON-NLS-2$ remarks.add(cr); boolean first=true; error_message = ""; //$NON-NLS-1$ boolean error_found = false; for (int i=0;i<fieldStream.length;i++) { ValueMetaInterface v = prev.searchValueMeta(fieldStream[i]); if (v==null) { if (first) { first=false; error_message+=BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.CheckResult.MissingFieldsInInput")+Const.CR; //$NON-NLS-1$ } error_found=true; error_message+="\t\t"+fieldStream[i]+Const.CR; //$NON-NLS-1$ } } if (error_found) { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta); } else { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.CheckResult.AllFieldsFoundInInput"), stepMeta); //$NON-NLS-1$ } remarks.add(cr); } else { error_message=BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.CheckResult.MissingFieldsInInput3")+Const.CR; //$NON-NLS-1$ cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta); remarks.add(cr); } } catch(KettleException e) { error_message = BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.CheckResult.DatabaseErrorOccurred")+e.getMessage(); //$NON-NLS-1$ cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta); remarks.add(cr); } finally { db.disconnect(); } } else { error_message = BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.CheckResult.InvalidConnection"); //$NON-NLS-1$ cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta); remarks.add(cr); } // See if we have input streams leading to this step! if (input.length>0) { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.CheckResult.StepReceivingInfoFromOtherSteps"), stepMeta); //$NON-NLS-1$ remarks.add(cr); } else { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.CheckResult.NoInputError"), stepMeta); //$NON-NLS-1$ remarks.add(cr); } } public SQLStatement getTableDdl( TransMeta transMeta, String stepname, boolean autoSchema, MonetDBBulkLoaderData data, boolean safeMode ) throws KettleException { String name = stepname; // new name might not yet be linked to other steps! StepMeta stepMeta = new StepMeta(BaseMessages.getString(PKG, "MonetDBBulkLoaderDialog.StepMeta.Title"), name, this); //$NON-NLS-1$ RowMetaInterface prev = transMeta.getPrevStepFields(stepname); SQLStatement sql = getSQLStatements(transMeta, stepMeta, prev, autoSchema, data, safeMode); return sql; } public RowMetaInterface updateFields( TransMeta transMeta, String stepname, MonetDBBulkLoaderData data ) throws KettleStepException { RowMetaInterface prev = transMeta.getPrevStepFields(stepname); return updateFields( prev, data ); } public RowMetaInterface updateFields( RowMetaInterface prev, MonetDBBulkLoaderData data ) { // update the field table from the fields coming from the previous step RowMetaInterface tableFields = new RowMeta(); List<ValueMetaInterface> fields = prev.getValueMetaList(); fieldTable = new String[fields.size()]; fieldStream = new String[fields.size()]; fieldFormatOk = new boolean[fields.size()]; int idx = 0; for( ValueMetaInterface field: fields) { ValueMetaInterface tableField = field.clone(); tableFields.addValueMeta(tableField); fieldTable[idx] = field.getName(); fieldStream[idx] = field.getName(); fieldFormatOk[idx] = true; } data.keynrs = new int[getFieldStream().length]; for (int i=0;i<data.keynrs.length;i++) { data.keynrs[i] = i; } return tableFields; } public SQLStatement getSQLStatements(TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, boolean autoSchema, MonetDBBulkLoaderData data, boolean safeMode) throws KettleStepException { SQLStatement retval = new SQLStatement(stepMeta.getName(), databaseMeta, null); // default: nothing to do! if (databaseMeta!=null) { if (prev!=null && prev.size()>0) { // Copy the row RowMetaInterface tableFields; if( autoSchema ) { tableFields = updateFields( prev, data ); } else { tableFields = new RowMeta(); // Now change the field names for (int i=0;i<fieldTable.length;i++) { ValueMetaInterface v = prev.searchValueMeta(fieldStream[i]); if (v!=null) { ValueMetaInterface tableField = v.clone(); tableField.setName(fieldTable[i]); tableFields.addValueMeta(tableField); } } } if (!Const.isEmpty(tableName)) { Database db = new Database(loggingObject, databaseMeta); db.shareVariablesWith(transMeta); try { db.connect(); String schemaTable = databaseMeta.getQuotedSchemaTableCombination(transMeta.environmentSubstitute(schemaName), transMeta.environmentSubstitute(tableName)); MonetDBDatabaseMeta.safeModeLocal.set(safeMode); String cr_table = db.getDDL(schemaTable, tableFields, null, false, null, true ); String sql = cr_table; if (sql.length()==0) retval.setSQL(null); else retval.setSQL(sql); } catch(KettleException e) { retval.setError(BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.GetSQL.ErrorOccurred")+e.getMessage()); //$NON-NLS-1$ } finally { db.disconnect(); MonetDBDatabaseMeta.safeModeLocal.remove(); } } else { retval.setError(BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.GetSQL.NoTableDefinedOnConnection")); //$NON-NLS-1$ } } else { retval.setError(BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.GetSQL.NotReceivingAnyFields")); //$NON-NLS-1$ } } else { retval.setError(BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.GetSQL.NoConnectionDefined")); //$NON-NLS-1$ } return retval; } public void analyseImpact(List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String input[], String output[], RowMetaInterface info) throws KettleStepException { if (prev != null) { /* DEBUG CHECK THIS */ // Insert dateMask fields : read/write for (int i = 0; i < fieldTable.length; i++) { ValueMetaInterface v = prev.searchValueMeta(fieldStream[i]); DatabaseImpact ii = new DatabaseImpact(DatabaseImpact.TYPE_IMPACT_READ_WRITE, transMeta.getName(), stepMeta.getName(), databaseMeta .getDatabaseName(), transMeta.environmentSubstitute(tableName), fieldTable[i], fieldStream[i], v!=null?v.getOrigin():"?", "", "Type = " + v.toStringMeta()); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ impact.add(ii); } } } public StepInterface getStep(StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans) { return new MonetDBBulkLoader(stepMeta, stepDataInterface, cnr, transMeta, trans); } public StepDataInterface getStepData() { return new MonetDBBulkLoaderData(); } public DatabaseMeta[] getUsedDatabaseConnections() { if (databaseMeta!=null) { return new DatabaseMeta[] { databaseMeta }; } else { return super.getUsedDatabaseConnections(); } } public RowMetaInterface getRequiredFields(VariableSpace space) throws KettleException { String realTableName = space.environmentSubstitute(tableName); String realSchemaName = space.environmentSubstitute(schemaName); if (databaseMeta!=null) { Database db = new Database(loggingObject, databaseMeta); try { db.connect(); if (!Const.isEmpty(realTableName)) { String schemaTable = databaseMeta.getQuotedSchemaTableCombination(realSchemaName, realTableName); // Check if this table exists... if (db.checkTableExists(schemaTable)) { return db.getTableFields(schemaTable); } else { throw new KettleException(BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.Exception.TableNotFound")); } } else { throw new KettleException(BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.Exception.TableNotSpecified")); } } catch(Exception e) { throw new KettleException(BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.Exception.ErrorGettingFields"), e); } finally { db.disconnect(); } } else { throw new KettleException(BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.Exception.ConnectionNotDefined")); } } /** * @return the schemaName */ public String getSchemaName() { return schemaName; } /** * @param schemaName the schemaName to set */ public void setSchemaName(String schemaName) { this.schemaName = schemaName; } public String getLogFile() { return logFile; } public void setLogFile(String logFile) { this.logFile = logFile; } public String getEncoding() { return encoding; } public void setEncoding(String encoding) { this.encoding = encoding; } public String getDelimiter() { return ","; } public String getEnclosure() { return "\""; } /** * @return the bufferSize */ public String getBufferSize() { return bufferSize; } /** * @param bufferSize the bufferSize to set */ public void setBufferSize(String bufferSize) { this.bufferSize = bufferSize; } /** * @return the fieldFormatOk */ public boolean[] getFieldFormatOk() { return fieldFormatOk; } /** * @param fieldFormatOk the fieldFormatOk to set */ public void setFieldFormatOk(boolean[] fieldFormatOk) { this.fieldFormatOk = fieldFormatOk; } /** * @param clientPath the mClientPath to set */ public void setMClientPath(String clientPath) { mClientPath = clientPath; } /** * @return the mClientPath */ public String getMClientPath() { return mClientPath; } @Override public String getMissingDatabaseConnectionInformationMessage() { // TODO return null; } /** * @param database connection name to set */ public void setDbConnectionName(String dbConnectionName) { this.dbConnectionName = dbConnectionName; } /** * @return the database connection name */ public String getDbConnectionName() { return this.dbConnectionName; } }
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.compiler.compiler; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import org.drools.core.common.EventFactHandle; import org.drools.core.definitions.impl.KnowledgePackageImpl; import org.drools.core.impl.InternalKnowledgeBase; import org.drools.core.impl.KnowledgeBaseFactory; import org.drools.core.rule.TypeDeclaration; import org.junit.Assert; import org.junit.Test; import org.kie.api.KieBase; import org.kie.api.KieServices; import org.kie.api.builder.KieBuilder; import org.kie.api.builder.KieFileSystem; import org.kie.api.builder.Message; import org.kie.api.builder.model.KieModuleModel; import org.kie.api.definition.KiePackage; import org.kie.api.definition.type.Annotation; import org.kie.api.definition.type.FactField; import org.kie.api.definition.type.FactType; import org.kie.api.definition.type.Role; import org.kie.api.io.KieResources; import org.kie.api.io.Resource; import org.kie.api.io.ResourceType; import org.kie.api.runtime.KieContainer; import org.kie.api.runtime.KieSession; import org.kie.api.runtime.rule.FactHandle; import org.kie.internal.builder.KnowledgeBuilder; import org.kie.internal.builder.KnowledgeBuilderFactory; import org.kie.internal.builder.KnowledgeBuilderResults; import org.kie.internal.builder.ResultSeverity; import org.kie.internal.io.ResourceFactory; import org.kie.internal.utils.KieHelper; import static java.util.Arrays.asList; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class TypeDeclarationTest { @Test public void testClassNameClashing() { String str = ""; str += "package org.kie \n" + "declare org.kie.Character \n" + " name : String \n" + "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ), ResourceType.DRL ); if ( kbuilder.hasErrors() ) { fail( kbuilder.getErrors().toString() ); } } @Test public void testAnnotationReDefinition(){ String str1 = ""; str1 += "package org.kie \n" + "declare org.kie.EventA \n" + " name : String \n" + " duration : Long \n" + "end \n"; String str2 = ""; str2 += "package org.kie \n" + "declare org.kie.EventA \n" + " @role (event) \n" + " @duration (duration) \n" + "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str1.getBytes() ), ResourceType.DRL ); kbuilder.add( ResourceFactory.newByteArrayResource( str2.getBytes() ), ResourceType.DRL ); if (kbuilder.hasErrors() ) { fail( kbuilder.getErrors().toString() ); } //No Warnings KnowledgeBuilderResults warnings = kbuilder.getResults(ResultSeverity.WARNING); Assert.assertEquals(0, warnings.size()); //just 1 package was created Assert.assertEquals(1, kbuilder.getKnowledgePackages().size()); //Get the Fact Type for org.kie.EventA FactType factType = ((KnowledgePackageImpl)kbuilder.getKnowledgePackages().iterator().next()).getFactType("org.kie.EventA"); assertNotNull( factType ); //'name' field must still be there FactField field = factType.getField("name"); assertNotNull( field ); //'duration' field must still be there field = factType.getField("duration"); assertNotNull( field ); //New Annotations must be there too TypeDeclaration typeDeclaration = ((KnowledgePackageImpl)kbuilder.getKnowledgePackages().iterator().next()).getTypeDeclaration("EventA"); assertEquals(Role.Type.EVENT, typeDeclaration.getRole()); assertEquals("duration", typeDeclaration.getDurationAttribute()); } @Test public void testNoAnnotationUpdateIfError(){ String str1 = ""; str1 += "package org.drools.compiler \n" + "declare org.drools.EventA \n" + " name : String \n" + " duration : Long \n" + "end \n"; String str2 = ""; str2 += "package org.drools.compiler \n" + "declare org.drools.EventA \n" + " @role (event) \n" + " @duration (duration) \n" + " anotherField : String \n" + "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource(str1.getBytes()), ResourceType.DRL ); kbuilder.add( ResourceFactory.newByteArrayResource( str2.getBytes() ), ResourceType.DRL ); if (!kbuilder.hasErrors() ) { fail("Errors Expected"); } //No Warnings KnowledgeBuilderResults warnings = kbuilder.getResults(ResultSeverity.WARNING); assertEquals(0, warnings.size()); //just 1 package was created assertEquals(0, kbuilder.getKnowledgePackages().size()); } /** * The same resource (containing a type declaration) is added twice in the * kbuilder. */ @Test public void testDuplicatedTypeDeclarationWith2FieldsInSameResource() { //same package, different resources String str1 = ""; str1 += "package org.drools.compiler \n" + "declare org.drools.ClassA \n" + " name : String \n" + " lastName : String \n" + "end \n"; Resource resource = ResourceFactory.newByteArrayResource( str1.getBytes()); KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( resource, ResourceType.DRL ); kbuilder.add( resource, ResourceType.DRL ); if ( kbuilder.hasErrors() ) { fail( kbuilder.getErrors().toString() ); } } /** * 2 resources (containing a the same type declaration) are added to the * kbuilder. * The expectation here is to silently discard the second type declaration. */ @Test public void testDuplicatedTypeDeclarationInDifferentResources() { //same package, different resources String str1 = ""; str1 += "package org.drools.compiler \n" + "declare org.drools.ClassA \n" + " name : String \n" + "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str1.getBytes() ), ResourceType.DRL ); kbuilder.add( ResourceFactory.newByteArrayResource( str1.getBytes() ), ResourceType.DRL ); if ( kbuilder.hasErrors() ) { fail( kbuilder.getErrors().toString() ); } } /** * 2 resources (containing different declarations of the same type ) are added * to the kbuilder. * The expectation here is that compilation fails because we are changing * the type of a field */ @Test public void testClashingTypeDeclarationInDifferentResources() { //same package, different resources String str1 = ""; str1 += "package org.drools.compiler \n" + "declare org.drools.ClassA \n" + " name : String \n" + " age : Integer \n" + "end \n"; String str2 = ""; str2 += "package org.drools.compiler \n" + "declare org.drools.ClassA \n" + " name : String \n" + " age : String \n" + "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str1.getBytes() ), ResourceType.DRL ); kbuilder.add( ResourceFactory.newByteArrayResource( str2.getBytes() ), ResourceType.DRL ); if (!kbuilder.hasErrors() ) { fail( "An error should have been generated, redefinition of ClassA is not allowed" ); } } /** * 2 resources (containing different declarations of the same type ) are added * to the kbuilder. * The expectation here is to silently discard the second type declaration. * This is because the new definition has less fields that the original * UPDATE : any use of the full-arg constructor in the second DRL will fail, * so we generate an error anyway */ @Test public void testNotSoHarmlessTypeReDeclaration() { //same package, different resources String str1 = ""; str1 += "package org.drools.compiler \n" + "declare org.drools.ClassA \n" + " name : String \n" + " age : Integer \n" + "end \n"; String str2 = ""; str2 += "package org.drools.compiler \n" + "declare org.drools.ClassA \n" + " name : String \n" + "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str1.getBytes() ), ResourceType.DRL ); kbuilder.add( ResourceFactory.newByteArrayResource( str2.getBytes() ), ResourceType.DRL ); if ( ! kbuilder.hasErrors() ) { fail( "An error should have been generated, redefinition of ClassA is not allowed" ); } /* //1 Warning KnowledgeBuilderResults warnings = kbuilder.getResults( ResultSeverity.WARNING ); Assert.assertEquals(1, warnings.size()); System.out.println(warnings.iterator().next().getMessage()); //just 1 package was created Assert.assertEquals(1, kbuilder.getKnowledgePackages().size()); //Get the Fact Type for org.drools.ClassA FactType factType = ((KnowledgePackageImp)kbuilder.getKnowledgePackages().iterator().next()).pkg.getFactType("org.drools.ClassA"); Assert.assertNotNull(factType); //'age' field must still be there FactField field = factType.getField("age"); Assert.assertNotNull(field); //Assert that the 'name' field must be String and not Long Assert.assertEquals(Integer.class, field.getType()); */ } /** * 2 resources (containing different declarations of the same type ) are added * to the kbuilder. * The expectation here is that the compilation fails because we are * adding a new field to the declared Type */ @Test public void testTypeReDeclarationWithExtraField() { //same package, different resources String str1 = ""; str1 += "package org.drools.compiler \n" + "declare org.drools.ClassA \n" + " name : String \n" + " age : Integer \n" + "end \n"; String str2 = ""; str2 += "package org.drools.compiler \n" + "declare org.drools.ClassA \n" + " name : String \n" + " lastName : String \n" + "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str1.getBytes() ), ResourceType.DRL ); kbuilder.add( ResourceFactory.newByteArrayResource( str2.getBytes() ), ResourceType.DRL ); if ( ! kbuilder.hasErrors() ) { fail( kbuilder.getErrors().toString() ); } } /** * 2 resources (containing different declarations of the same type ) are added * to the kbuilder. * The expectation here is that the compilation fails because we are * trying to add an incompatible re-definition of the declared type: * it introduces a new field 'lastName' */ @Test public void testTypeReDeclarationWithExtraField2() { //same package, different resources String str1 = ""; str1 += "package org.drools.compiler \n" + "declare org.drools.ClassA \n" + " name : String \n" + " age : Integer \n" + "end \n"; String str2 = ""; str2 += "package org.drools.compiler \n" + "declare org.drools.ClassA \n" + " name : String \n" + " lastName : String \n" + "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str1.getBytes() ), ResourceType.DRL ); kbuilder.add( ResourceFactory.newByteArrayResource( str2.getBytes() ), ResourceType.DRL ); if (!kbuilder.hasErrors() ) { fail( kbuilder.getErrors().toString() ); } } @Test public void testDuplicateDeclaration() { String str = ""; str += "package org.drools.compiler \n" + "declare Bean \n" + " name : String \n" + "end \n" + "declare Bean \n" + " age : int \n" + "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ), ResourceType.DRL ); if ( ! kbuilder.hasErrors() ) { fail( "Two definitions with the same name are not allowed, but it was not detected! " ); } } @Retention(value = RetentionPolicy.RUNTIME) @Target(value = ElementType.TYPE) public static @interface KlassAnnotation { String value(); } @Retention(value = RetentionPolicy.RUNTIME) @Target(value = ElementType.FIELD) public static @interface FieldAnnotation { String prop(); } @Test public void testTypeDeclarationMetadata() { String str = ""; str += "package org.drools.compiler.test; \n" + "import org.drools.compiler.compiler.TypeDeclarationTest.KlassAnnotation; \n" + "import org.drools.compiler.compiler.TypeDeclarationTest.FieldAnnotation; \n" + "import org.drools.compiler.Person\n" + "\n" + "declare Bean \n" + "@role(event) \n" + "@expires( 1s ) \n" + "@KlassAnnotation( \"klass\" )" + "" + " name : String @key @FieldAnnotation( prop = \"fld\" )\n" + "end \n" + "declare Person @role(event) end"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ), ResourceType.DRL ); System.err.println( kbuilder.getErrors() ); assertFalse(kbuilder.hasErrors()); InternalKnowledgeBase kBase = KnowledgeBaseFactory.newKnowledgeBase(); kBase.addPackages( kbuilder.getKnowledgePackages() ); FactType bean = kBase.getFactType( "org.drools.compiler.test", "Bean" ); FactType pers = kBase.getFactType( "org.drools", "Person" ); assertEquals( "org.drools.compiler.test.Bean", bean.getName() ); assertEquals( "Bean", bean.getSimpleName() ); assertEquals( "org.drools.compiler.test", bean.getPackageName() ); assertEquals( 3, bean.getClassAnnotations().size() ); Annotation ann = bean.getClassAnnotations().get( 0 ); if (!ann.getName().equals("org.drools.compiler.compiler.TypeDeclarationTest$KlassAnnotation")) { ann = bean.getClassAnnotations().get( 1 ); } if (!ann.getName().equals("org.drools.compiler.compiler.TypeDeclarationTest$KlassAnnotation")) { ann = bean.getClassAnnotations().get( 2 ); } assertEquals( "org.drools.compiler.compiler.TypeDeclarationTest$KlassAnnotation", ann.getName() ); assertEquals( "klass", ann.getPropertyValue( "value" ) ); assertEquals( String.class, ann.getPropertyType( "value" ) ); assertEquals( 2, bean.getMetaData().size() ); assertEquals( "event", bean.getMetaData().get( "role" ) ); FactField field = bean.getField( "name" ); assertNotNull( field ); assertEquals( 2, field.getFieldAnnotations().size() ); Annotation fnn = field.getFieldAnnotations().get( 0 ); if (!fnn.getName().equals("org.drools.compiler.compiler.TypeDeclarationTest$FieldAnnotation")) { fnn = field.getFieldAnnotations().get( 1 ); } assertEquals( "org.drools.compiler.compiler.TypeDeclarationTest$FieldAnnotation", fnn.getName() ); assertEquals( "fld", fnn.getPropertyValue( "prop" ) ); assertEquals( String.class, fnn.getPropertyType( "prop" ) ); assertEquals( 1, field.getMetaData().size() ); assertTrue( field.getMetaData().containsKey( "key" ) ); } public static class EventBar { public static class Foo { } } @Test public void testTypeDeclarationWithInnerClasses() { // DROOLS-150 String str = ""; str += "package org.drools.compiler;\n" + "\n" + "import org.drools.compiler.compiler.TypeDeclarationTest.EventBar.*;\n" + "" + "declare Foo\n" + " @role( event )\n" + "end\n" + "" + "rule R when Foo() then end"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ), ResourceType.DRL ); System.err.println( kbuilder.getErrors() ); assertFalse( kbuilder.hasErrors() ); InternalKnowledgeBase kBase = KnowledgeBaseFactory.newKnowledgeBase(); kBase.addPackages( kbuilder.getKnowledgePackages() ); KieSession knowledgeSession = kBase.newKieSession(); FactHandle handle = knowledgeSession.insert( new EventBar.Foo() ); assertTrue( handle instanceof EventFactHandle ); } @Test public void testTypeDeclarationWithInnerClassesImport() { // DROOLS-150 String str = ""; str += "package org.drools.compiler;\n" + "\n" + "import org.drools.compiler.compiler.TypeDeclarationTest.EventBar.Foo;\n" + "" + "declare Foo\n" + " @role( event )\n" + "end\n" + "" + "rule R when Foo() then end"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ), ResourceType.DRL ); System.err.println( kbuilder.getErrors() ); assertFalse( kbuilder.hasErrors() ); InternalKnowledgeBase kBase = KnowledgeBaseFactory.newKnowledgeBase(); kBase.addPackages( kbuilder.getKnowledgePackages() ); KieSession knowledgeSession = kBase.newKieSession(); FactHandle handle = knowledgeSession.insert( new EventBar.Foo() ); assertTrue( handle instanceof EventFactHandle ); } static class ClassC { private String name; private Integer age; public String getName() { return name; } public void setName( String name ) { this.name = name; } public Integer getAge() { return age; } public void setAge( Integer age ) { this.age = age; } } @Test public void testTypeReDeclarationPojo() { String str1 = "" + "package org.drools \n" + "import " + TypeDeclarationTest.class.getName() + ".ClassC; \n" + "" + "declare " + TypeDeclarationTest.class.getName() + ".ClassC \n" + " name : String \n" + " age : Integer \n" + "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str1.getBytes() ), ResourceType.DRL ); if ( kbuilder.hasErrors() ) { fail( kbuilder.getErrors().toString() ); } } @Test public void testTypeReDeclarationPojoMoreFields() { String str1 = "" + "package org.drools \n" + "import " + TypeDeclarationTest.class.getName() + ".ClassC; \n" + "" + "declare " + TypeDeclarationTest.class.getName() + ".ClassC \n" + " name : String \n" + " age : Integer \n" + " address : Objet \n" + "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str1.getBytes() ), ResourceType.DRL ); if ( ! kbuilder.hasErrors() ) { fail( kbuilder.getErrors().toString() ); } } @Test public void testTypeReDeclarationPojoLessFields() { String str1 = "" + "package org.drools \n" + "import " + TypeDeclarationTest.class.getName() + ".ClassC; \n" + "" + "declare " + TypeDeclarationTest.class.getName() + ".ClassC \n" + " name : String \n" + "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str1.getBytes() ), ResourceType.DRL ); if ( ! kbuilder.hasErrors() ) { fail( kbuilder.getErrors().toString() ); } } @Test public void testMultipleTypeReDeclaration() { //same package, different resources String str1 = ""; str1 += "package org.drools \n" + "declare org.drools.ClassC \n" + " name : String \n" + " age : Integer \n" + "end \n"; String str2 = ""; str2 += "package org.drools \n" + "declare org.drools.ClassC \n" + " name : String \n" + " age : Integer \n" + "end \n"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str1.getBytes() ), ResourceType.DRL ); kbuilder.add( ResourceFactory.newByteArrayResource( str2.getBytes() ), ResourceType.DRL ); if ( kbuilder.hasErrors() ) { fail( kbuilder.getErrors().toString() ); } } @Test public void testDeclaresInForeignPackages() { String str1 = "" + "package org.drools \n" + "declare foreign.ClassC fld : foreign.ClassD end " + "declare foreign.ClassD end " + ""; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str1.getBytes() ), ResourceType.DRL ); if ( kbuilder.hasErrors() ) { fail( kbuilder.getErrors().toString() ); } } @Test public void testDeclareFieldArray() { String str1 = "" + "package org.drools " + "declare Test end " + "declare Pet " + " owners : Owner[] " + " twoDimArray : Foo[][] " + " friends : Pet[] " + " ages : int[] " + "end " + "declare Owner " + " name : String " + "end " + "declare Foo end " + ""; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource( str1.getBytes() ), ResourceType.DRL ); if ( kbuilder.hasErrors() ) { fail( kbuilder.getErrors().toString() ); } for( KiePackage kp : kbuilder.getKnowledgePackages() ) { if ( kp.getName().equals( "org.drools" ) ) { Collection<FactType> types = kp.getFactTypes(); for ( FactType type : types ) { if ( "org.drools.Pet".equals( type.getName() ) ) { assertEquals( 4, type.getFields().size() ); FactField owners = type.getField( "owners" ); assertTrue( owners != null && owners.getType().getSimpleName().equals( "Owner[]" ) && owners.getType().isArray() ); FactField twoDim = type.getField( "twoDimArray" ); assertTrue( twoDim != null && twoDim.getType().getSimpleName().equals( "Foo[][]" ) && twoDim.getType().isArray() ); FactField friends = type.getField( "friends" ); assertTrue( friends != null && friends.getType().getSimpleName().equals( "Pet[]" ) && friends.getType().isArray() ); FactField ages = type.getField( "ages" ); assertTrue( ages != null && ages.getType().getSimpleName().equals( "int[]" ) && ages.getType().isArray() ); } } } } } @Test( expected = UnsupportedOperationException.class ) public void testPreventReflectionAPIsOnJavaClasses() { String drl = "package org.test; " + // existing java class "declare org.drools.compiler.Person " + " @role(event) " + "end \n" + ""; KieBuilder kieBuilder = build(drl); assertFalse( kieBuilder.getResults().hasMessages( Message.Level.ERROR ) ); KieBase kieBase = KieServices.Factory.get().newKieContainer( kieBuilder.getKieModule().getReleaseId() ).getKieBase(); FactType type = kieBase.getFactType( "org.drools.compiler", "Person" ); } @Test public void testCrossPackageDeclares() { String pkg1 = "package org.drools.compiler.test1; " + "import org.drools.compiler.test2.GrandChild; " + "import org.drools.compiler.test2.Child; " + "import org.drools.compiler.test2.BarFuu; " + "declare FuBaz foo : String end " + "declare Parent " + " unknown : BarFuu " + "end " + "declare GreatChild extends GrandChild " + " father : Child " + "end " ; String pkg2 = "package org.drools.compiler.test2; " + "import org.drools.compiler.test1.Parent; " + "import org.drools.compiler.test1.FuBaz; " + "declare BarFuu " + " baz : FuBaz " + "end " + "declare Child extends Parent " + "end " + "declare GrandChild extends Child " + " notknown : FuBaz " + "end " ; KieServices ks = KieServices.Factory.get(); KieFileSystem kfs = ks.newKieFileSystem(); kfs.generateAndWritePomXML( ks.newReleaseId( "test", "foo", "1.0" ) ); KieModuleModel km = ks.newKieModuleModel(); km.newKieBaseModel( "rules" ) .addPackage( "org.drools.compiler.test2" ) .addPackage( "org.drools.compiler.test1" ); kfs.writeKModuleXML( km.toXML() ); KieResources kr = ks.getResources(); Resource r1 = kr.newByteArrayResource( pkg1.getBytes() ) .setResourceType( ResourceType.DRL ) .setSourcePath( "org/drools/compiler/test1/p1.drl" ); Resource r2 = kr.newByteArrayResource( pkg2.getBytes() ) .setResourceType( ResourceType.DRL ) .setSourcePath( "org/drools/compiler/test2/p2.drl" ); kfs.write( r1 ); kfs.write( r2 ); KieBuilder builder = ks.newKieBuilder( kfs ); builder.buildAll(); assertEquals( Collections.emptyList(), builder.getResults().getMessages( Message.Level.ERROR ) ); KieContainer kc = ks.newKieContainer(builder.getKieModule().getReleaseId()); FactType ft = kc.getKieBase( "rules" ).getFactType( "org.drools.compiler.test2", "Child" ); assertNotNull( ft ); assertNotNull( ft.getFactClass() ); assertEquals( "org.drools.compiler.test1.Parent", ft.getFactClass().getSuperclass().getName() ); } @Test public void testUnknownField() throws InstantiationException, IllegalAccessException { // DROOLS-546 String drl = "package org.test; " + "declare Pet" + " " + "end \n"; KieBuilder kieBuilder = build(drl); assertFalse( kieBuilder.getResults().hasMessages( Message.Level.ERROR ) ); KieBase kieBase = KieServices.Factory.get().newKieContainer( kieBuilder.getKieModule().getReleaseId() ).getKieBase(); FactType factType = kieBase.getFactType("org.test", "Pet"); Object instance = factType.newInstance(); factType.get(instance, "unknownField"); factType.set(instance, "unknownField", "myValue"); } @Test public void testPositionalArguments() throws InstantiationException, IllegalAccessException { String drl = "package org.test;\n" + "global java.util.List names;\n" + "declare Person\n" + " name : String\n" + " age : int\n" + "end\n" + "rule R when \n" + " $p : Person( \"Mark\", 37; )\n" + "then\n" + " names.add( $p.getName() );\n" + "end\n"; KieBuilder kieBuilder = build(drl); assertFalse(kieBuilder.getResults().hasMessages(Message.Level.ERROR)); KieBase kieBase = KieServices.Factory.get().newKieContainer( kieBuilder.getKieModule().getReleaseId() ).getKieBase(); FactType factType = kieBase.getFactType( "org.test", "Person" ); Object instance = factType.newInstance(); factType.set(instance, "name", "Mark"); factType.set(instance, "age", 37); List<String> names = new ArrayList<String>(); KieSession ksession = kieBase.newKieSession(); ksession.setGlobal("names", names); ksession.insert(instance); ksession.fireAllRules(); assertEquals( 1, names.size() ); assertEquals( "Mark", names.get( 0 ) ); } @Test public void testExplictPositionalArguments() throws InstantiationException, IllegalAccessException { String drl = "package org.test;\n" + "global java.util.List names;\n" + "declare Person\n" + " name : String @position(1)\n" + " age : int @position(0)\n" + "end\n" + "rule R when \n" + " $p : Person( 37, \"Mark\"; )\n" + "then\n" + " names.add( $p.getName() );\n" + "end\n"; KieBuilder kieBuilder = build(drl); assertFalse(kieBuilder.getResults().hasMessages(Message.Level.ERROR)); KieBase kieBase = KieServices.Factory.get().newKieContainer( kieBuilder.getKieModule().getReleaseId() ).getKieBase(); FactType factType = kieBase.getFactType("org.test", "Person"); Object instance = factType.newInstance(); factType.set(instance, "name", "Mark"); factType.set(instance, "age", 37); List<String> names = new ArrayList<String>(); KieSession ksession = kieBase.newKieSession(); ksession.setGlobal("names", names); ksession.insert(instance); ksession.fireAllRules(); assertEquals(1, names.size()); assertEquals("Mark", names.get(0)); } @Test public void testTooManyPositionalArguments() throws InstantiationException, IllegalAccessException { // DROOLS-559 String drl = "package org.test;\n" + "global java.util.List names;\n" + "declare Person\n" + " name : String\n" + " age : int\n" + "end\n" + "rule R when \n" + " $p : Person( \"Mark\", 37, 42; )\n" + "then\n" + " names.add( $p.getName() );\n" + "end\n"; KieBuilder kieBuilder = build(drl); assertTrue( kieBuilder.getResults().hasMessages( Message.Level.ERROR ) ); } @Test public void testOutOfRangePositions() throws InstantiationException, IllegalAccessException { // DROOLS-559 String drl = "package org.test;\n" + "global java.util.List names;\n" + "declare Person\n" + " name : String @position(3)\n" + " age : int @position(1)\n" + "end\n" + "rule R when \n" + " $p : Person( 37, \"Mark\"; )\n" + "then\n" + " names.add( $p.getName() );\n" + "end\n"; KieBuilder kieBuilder = build(drl); assertTrue( kieBuilder.getResults().hasMessages( Message.Level.ERROR ) ); } @Test public void testDuplicatedPositions() throws InstantiationException, IllegalAccessException { // DROOLS-559 String drl = "package org.test;\n" + "global java.util.List names;\n" + "declare Person\n" + " name : String @position(1)\n" + " age : int @position(1)\n" + "end\n" + "rule R when \n" + " $p : Person( 37, \"Mark\"; )\n" + "then\n" + " names.add( $p.getName() );\n" + "end\n"; KieBuilder kieBuilder = build(drl); assertTrue( kieBuilder.getResults().hasMessages( Message.Level.ERROR ) ); } private KieBuilder build(String drl) { KieServices kieServices = KieServices.Factory.get(); KieFileSystem kfs = kieServices.newKieFileSystem(); kfs.write( kieServices.getResources().newByteArrayResource( drl.getBytes() ) .setSourcePath( "test.drl" ) .setResourceType( ResourceType.DRL ) ); KieBuilder kieBuilder = kieServices.newKieBuilder( kfs ); kieBuilder.buildAll(); return kieBuilder; } @Test public void testMultipleAnnotationDeclarations() { String str1 = ""; str1 += "package org.kie1 " + "" + "declare Foo \n" + " name : String " + " age : int " + "end "; String str2 = ""; str2 += "package org.kie2 " + "" + "declare org.kie1.Foo " + " @role(event) " + "end "; String str3 = ""; str3 += "package org.kie3 " + "" + "declare org.kie1.Foo " + " @propertyReactive " + "end "; String str4 = "" + "package org.kie4; " + "import org.kie1.Foo; " + "" + "rule Check " + "when " + " $f : Foo( name == 'bar' ) " + "then " + " modify( $f ) { setAge( 99 ); } " + "end "; KieHelper helper = new KieHelper(); helper.addContent( str1, ResourceType.DRL ); helper.addContent( str2, ResourceType.DRL ); helper.addContent( str3, ResourceType.DRL ); helper.addContent( str4, ResourceType.DRL ); List<Message> msg = helper.verify().getMessages( Message.Level.ERROR ); System.out.println( msg ); assertEquals( 0, msg.size() ); KieBase kieBase = helper.build(); FactType type = kieBase.getFactType( "org.kie1", "Foo" ); assertEquals( 2, type.getFields().size() ); Object foo = null; try { foo = type.newInstance(); type.set( foo, "name", "bar" ); assertEquals( "bar", type.get( foo, "name" ) ); } catch ( InstantiationException e ) { fail( e.getMessage() ); } catch ( IllegalAccessException e ) { fail( e.getMessage() ); } KieSession session = kieBase.newKieSession(); FactHandle handle = session.insert( foo ); int n = session.fireAllRules( 5 ); assertTrue( handle instanceof EventFactHandle ); assertEquals( 1, n ); assertEquals( 99, type.get( foo, "age" ) ); } @Test() public void testTraitExtendPojo() { //DROOLS-697 final String s1 = "package test;\n" + "declare Poojo " + "end " + "declare trait Mask extends Poojo " + "end " + ""; KieHelper kh = new KieHelper(); kh.addContent( s1, ResourceType.DRL ); assertEquals( 1, kh.verify().getMessages( Message.Level.ERROR ).size() ); } @Test() public void testPojoExtendInterface() { // DROOLS-697 // It is now allowed for a declared type to extend an interface // The interface itself will be added to the implements part of the generated class final String s1 = "package test;\n" + "declare Poojo extends Mask " + "end " + "declare trait Mask " + "end " + ""; KieHelper kh = new KieHelper(); kh.addContent( s1, ResourceType.DRL ); assertEquals( 0, kh.verify().getMessages( Message.Level.ERROR ).size() ); } public static interface Base { public Object getFld(); public void setFld( Object x ); } public static interface Ext extends Base { public String getFld(); public void setFld( String s ); } @Test public void testRedeclareWithInterfaceExtensionAndOverride() { final String s1 = "package test;\n" + "declare trait " + Ext.class.getCanonicalName() + " extends " + Base.class.getCanonicalName() + " " + " fld : String " + "end " + "declare trait " + Base.class.getCanonicalName() + " " + "end " + ""; KieHelper kh = new KieHelper(); kh.addContent( s1, ResourceType.DRL ); assertEquals( 0, kh.verify().getMessages( Message.Level.ERROR ).size() ); } @Test public void testDeclareWithExtensionAndOverride() { final String s1 = "package test; " + "global java.util.List list; " + "declare Sub extends Sup " + " fld : String " + "end " + "declare Sup " + " fld : Object " + "end " + "rule Init when " + "then insert( new Sub( 'aa' ) ); end " + "rule CheckSup when " + " $s : Sup( $f : fld == 'aa' ) " + "then " + " list.add( \"Sup\" + $f ); " + "end " + "rule CheckSub when " + " $s : Sub( $f : fld == 'aa' ) " + "then " + " list.add( \"Sub\" + $f ); " + "end "; KieHelper kh = new KieHelper(); kh.addContent( s1, ResourceType.DRL ); assertEquals( 0, kh.verify().getMessages( Message.Level.ERROR ).size() ); assertEquals( 0, kh.verify().getMessages( Message.Level.WARNING ).size() ); KieSession ks = kh.build().newKieSession(); List list = new ArrayList(); ks.setGlobal( "list", list ); ks.fireAllRules(); assertEquals( 2, list.size() ); assertTrue( list.containsAll( asList("Supaa", "Subaa") ) ); FactType sup = ks.getKieBase().getFactType( "test", "Sup" ); FactType sub = ks.getKieBase().getFactType( "test", "Sub" ); try { Method m1 = sup.getFactClass().getMethod( "getFld" ); assertNotNull( m1 ); assertEquals( Object.class, m1.getReturnType() ); Method m2 = sub.getFactClass().getMethod( "getFld" ); assertNotNull( m2 ); assertEquals( String.class, m2.getReturnType() ); assertEquals( 0, sub.getFactClass().getFields().length ); assertEquals( 0, sub.getFactClass().getDeclaredFields().length ); assertEquals( 1, sup.getFactClass().getDeclaredFields().length ); } catch ( Exception e ) { e.printStackTrace(); fail( e.getMessage() ); } } public static class SomeClass {} @Test public void testRedeclareClassAsTrait() { final String s1 = "package test; " + "global java.util.List list; " + "declare trait " + SomeClass.class.getCanonicalName() + " end "; KieHelper kh = new KieHelper(); kh.addContent( s1, ResourceType.DRL ); assertEquals( 1, kh.verify().getMessages( Message.Level.ERROR ).size() ); } public static class BeanishClass { private int foo; public int getFoo() { return foo; } public void setFoo( int x ) { foo = x; } public void setFooAsString( String x ) { foo = Integer.parseInt( x ); } } @Test public void testDeclarationOfClassWithNonStandardSetter() { final String s1 = "package test; " + "import " + BeanishClass.class.getCanonicalName() + "; " + "declare " + BeanishClass.class.getSimpleName() + " @propertyReactive end " + "rule Check when BeanishClass() @Watch( foo ) then end "; KieHelper kh = new KieHelper(); kh.addContent( s1, ResourceType.DRL ); assertEquals( 0, kh.verify().getMessages( Message.Level.ERROR ).size() ); } @Test public void testDeclarationOfClassWithNonStandardSetterAndCanonicalName() { // DROOLS-815 final String s1 = "package test; " + "import " + BeanishClass.class.getCanonicalName() + "; " + "declare " + BeanishClass.class.getCanonicalName() + " @propertyReactive end " + "rule Check when BeanishClass() @Watch( foo ) then end "; KieHelper kh = new KieHelper(); kh.addContent( s1, ResourceType.DRL ); assertEquals( 0, kh.verify().getMessages( Message.Level.ERROR ).size() ); } @Test public void testDeclarationOfClassWithNonStandardSetterAndFulllName() { final String s1 = "package test; " + "import " + BeanishClass.class.getCanonicalName() + "; " + "declare " + BeanishClass.class.getName() + " @propertyReactive end " + "rule Check when BeanishClass() @watch( foo ) then end "; KieHelper kh = new KieHelper(); kh.addContent( s1, ResourceType.DRL ); assertEquals( 0, kh.verify().getMessages( Message.Level.ERROR ).size() ); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.persistence; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.MappedByteBuffer; import java.nio.file.OpenOption; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicReference; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteSystemProperties; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.DataRegionConfiguration; import org.apache.ignite.configuration.DataStorageConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.managers.communication.GridIoMessage; import org.apache.ignite.internal.processors.cache.CacheGroupContext; import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPartitionSupplyMessage; import org.apache.ignite.internal.processors.cache.persistence.file.FileIO; import org.apache.ignite.internal.processors.cache.persistence.file.FileIOFactory; import org.apache.ignite.internal.util.lang.GridAbsPredicate; import org.apache.ignite.internal.util.typedef.G; import org.apache.ignite.internal.util.typedef.internal.CU; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteInClosure; import org.apache.ignite.plugin.extensions.communication.Message; import org.apache.ignite.spi.IgniteSpiException; import org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; /** * */ public class LocalWalModeChangeDuringRebalancingSelfTest extends GridCommonAbstractTest { /** */ private static boolean disableWalDuringRebalancing = true; /** */ private static final AtomicReference<CountDownLatch> supplyMessageLatch = new AtomicReference<>(); /** */ private static final AtomicReference<CountDownLatch> fileIOLatch = new AtomicReference<>(); /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); cfg.setDataStorageConfiguration( new DataStorageConfiguration() .setDefaultDataRegionConfiguration( new DataRegionConfiguration() .setPersistenceEnabled(true) .setMaxSize(200 * 1024 * 1024) .setInitialSize(200 * 1024 * 1024) ) // Test verifies checkpoint count, so it is essencial that no checkpoint is triggered by timeout .setCheckpointFrequency(999_999_999_999L) .setFileIOFactory(new TestFileIOFactory(new DataStorageConfiguration().getFileIOFactory())) ); cfg.setCacheConfiguration( new CacheConfiguration(DEFAULT_CACHE_NAME) // Test checks internal state before and after rebalance, so it is configured to be triggered manually .setRebalanceDelay(-1) ); cfg.setCommunicationSpi(new TcpCommunicationSpi() { @Override public void sendMessage(ClusterNode node, Message msg) throws IgniteSpiException { if (msg instanceof GridIoMessage && ((GridIoMessage)msg).message() instanceof GridDhtPartitionSupplyMessage) { int grpId = ((GridDhtPartitionSupplyMessage)((GridIoMessage)msg).message()).groupId(); if (grpId == CU.cacheId(DEFAULT_CACHE_NAME)) { CountDownLatch latch0 = supplyMessageLatch.get(); if (latch0 != null) try { latch0.await(); } catch (InterruptedException ex) { throw new IgniteException(ex); } } } super.sendMessage(node, msg); } @Override public void sendMessage(ClusterNode node, Message msg, IgniteInClosure<IgniteException> ackC) throws IgniteSpiException { if (msg instanceof GridIoMessage && ((GridIoMessage)msg).message() instanceof GridDhtPartitionSupplyMessage) { int grpId = ((GridDhtPartitionSupplyMessage)((GridIoMessage)msg).message()).groupId(); if (grpId == CU.cacheId(DEFAULT_CACHE_NAME)) { CountDownLatch latch0 = supplyMessageLatch.get(); if (latch0 != null) try { latch0.await(); } catch (InterruptedException ex) { throw new IgniteException(ex); } } } super.sendMessage(node, msg, ackC); } }); cfg.setConsistentId(igniteInstanceName); System.setProperty(IgniteSystemProperties.IGNITE_DISABLE_WAL_DURING_REBALANCING, Boolean.toString(disableWalDuringRebalancing)); return cfg; } /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { super.beforeTestsStarted(); cleanPersistenceDir(); } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { super.afterTest(); CountDownLatch msgLatch = supplyMessageLatch.get(); if (msgLatch != null) { while (msgLatch.getCount() > 0) msgLatch.countDown(); supplyMessageLatch.set(null); } CountDownLatch fileLatch = fileIOLatch.get(); if (fileLatch != null) { while (fileLatch.getCount() > 0) fileLatch.countDown(); fileIOLatch.set(null); } stopAllGrids(); cleanPersistenceDir(); disableWalDuringRebalancing = true; } /** * @throws Exception If failed. */ public void testWalDisabledDuringRebalancing() throws Exception { doTestSimple(); } /** * @throws Exception If failed. */ public void testWalNotDisabledIfParameterSetToFalse() throws Exception { disableWalDuringRebalancing = false; doTestSimple(); } /** * @throws Exception If failed. */ private void doTestSimple() throws Exception { Ignite ignite = startGrids(3); ignite.cluster().active(true); IgniteCache<Integer, Integer> cache = ignite.cache(DEFAULT_CACHE_NAME); for (int k = 0; k < 10_000; k++) cache.put(k, k); IgniteEx newIgnite = startGrid(3); final GridCacheDatabaseSharedManager.CheckpointHistory cpHistory = ((GridCacheDatabaseSharedManager)newIgnite.context().cache().context().database()).checkpointHistory(); GridTestUtils.waitForCondition(new GridAbsPredicate() { @Override public boolean apply() { return !cpHistory.checkpoints().isEmpty(); } }, 10_000); U.sleep(10); // To ensure timestamp granularity. long newIgniteStartedTimestamp = System.currentTimeMillis(); ignite.cluster().setBaselineTopology(4); CacheGroupContext grpCtx = newIgnite.cachex(DEFAULT_CACHE_NAME).context().group(); assertEquals(!disableWalDuringRebalancing, grpCtx.walEnabled()); U.sleep(10); // To ensure timestamp granularity. long rebalanceStartedTimestamp = System.currentTimeMillis(); for (Ignite g : G.allGrids()) g.cache(DEFAULT_CACHE_NAME).rebalance(); awaitPartitionMapExchange(); assertTrue(grpCtx.walEnabled()); U.sleep(10); // To ensure timestamp granularity. long rebalanceFinishedTimestamp = System.currentTimeMillis(); for (Integer k = 0; k < 1000; k++) assertEquals("k=" + k, k, cache.get(k)); int checkpointsBeforeNodeStarted = 0; int checkpointsBeforeRebalance = 0; int checkpointsAfterRebalance = 0; for (Long timestamp : cpHistory.checkpoints()) { if (timestamp < newIgniteStartedTimestamp) checkpointsBeforeNodeStarted++; else if (timestamp >= newIgniteStartedTimestamp && timestamp < rebalanceStartedTimestamp) checkpointsBeforeRebalance++; else if (timestamp >= rebalanceStartedTimestamp && timestamp <= rebalanceFinishedTimestamp) checkpointsAfterRebalance++; } assertEquals(1, checkpointsBeforeNodeStarted); // checkpoint on start assertEquals(0, checkpointsBeforeRebalance); assertEquals(disableWalDuringRebalancing ? 1 : 0, checkpointsAfterRebalance); // checkpoint if WAL was re-activated } /** * @throws Exception If failed. */ public void testLocalAndGlobalWalStateInterdependence() throws Exception { Ignite ignite = startGrids(3); ignite.cluster().active(true); IgniteCache<Integer, Integer> cache = ignite.cache(DEFAULT_CACHE_NAME); for (int k = 0; k < 10_000; k++) cache.put(k, k); IgniteEx newIgnite = startGrid(3); ignite.cluster().setBaselineTopology(ignite.cluster().nodes()); CacheGroupContext grpCtx = newIgnite.cachex(DEFAULT_CACHE_NAME).context().group(); assertFalse(grpCtx.walEnabled()); ignite.cluster().disableWal(DEFAULT_CACHE_NAME); for (Ignite g : G.allGrids()) g.cache(DEFAULT_CACHE_NAME).rebalance(); awaitPartitionMapExchange(); assertFalse(grpCtx.walEnabled()); // WAL is globally disabled ignite.cluster().enableWal(DEFAULT_CACHE_NAME); assertTrue(grpCtx.walEnabled()); } /** * @throws Exception If failed. */ public void testParallelExchangeDuringRebalance() throws Exception { doTestParallelExchange(supplyMessageLatch); } /** * @throws Exception If failed. */ public void testParallelExchangeDuringCheckpoint() throws Exception { doTestParallelExchange(fileIOLatch); } /** * @throws Exception If failed. */ private void doTestParallelExchange(AtomicReference<CountDownLatch> latchRef) throws Exception { Ignite ignite = startGrids(3); ignite.cluster().active(true); IgniteCache<Integer, Integer> cache = ignite.cache(DEFAULT_CACHE_NAME); for (int k = 0; k < 10_000; k++) cache.put(k, k); IgniteEx newIgnite = startGrid(3); CacheGroupContext grpCtx = newIgnite.cachex(DEFAULT_CACHE_NAME).context().group(); CountDownLatch latch = new CountDownLatch(1); latchRef.set(latch); ignite.cluster().setBaselineTopology(ignite.cluster().nodes()); for (Ignite g : G.allGrids()) g.cache(DEFAULT_CACHE_NAME).rebalance(); assertFalse(grpCtx.walEnabled()); // TODO : test with client node as well startGrid(4); // Trigger exchange assertFalse(grpCtx.walEnabled()); latch.countDown(); assertFalse(grpCtx.walEnabled()); for (Ignite g : G.allGrids()) g.cache(DEFAULT_CACHE_NAME).rebalance(); awaitPartitionMapExchange(); assertTrue(grpCtx.walEnabled()); } /** * @throws Exception If failed. */ public void testDataClearedAfterRestartWithDisabledWal() throws Exception { Ignite ignite = startGrid(0); ignite.cluster().active(true); IgniteCache<Integer, Integer> cache = ignite.cache(DEFAULT_CACHE_NAME); for (int k = 0; k < 10_000; k++) cache.put(k, k); IgniteEx newIgnite = startGrid(1); ignite.cluster().setBaselineTopology(2); CacheGroupContext grpCtx = newIgnite.cachex(DEFAULT_CACHE_NAME).context().group(); assertFalse(grpCtx.localWalEnabled()); stopGrid(1); stopGrid(0); newIgnite = startGrid(1); newIgnite.cluster().active(true); newIgnite.cluster().setBaselineTopology(newIgnite.cluster().nodes()); cache = newIgnite.cache(DEFAULT_CACHE_NAME); for (int k = 0; k < 10_000; k++) assertFalse("k=" + k +", v=" + cache.get(k), cache.containsKey(k)); } /** * @throws Exception If failed. */ public void testWalNotDisabledAfterShrinkingBaselineTopology() throws Exception { Ignite ignite = startGrids(4); ignite.cluster().active(true); IgniteCache<Integer, Integer> cache = ignite.cache(DEFAULT_CACHE_NAME); for (int k = 0; k < 10_000; k++) cache.put(k, k); for (Ignite g : G.allGrids()) { CacheGroupContext grpCtx = ((IgniteEx)g).cachex(DEFAULT_CACHE_NAME).context().group(); assertTrue(grpCtx.walEnabled()); } stopGrid(2); ignite.cluster().setBaselineTopology(5); for (Ignite g : G.allGrids()) { CacheGroupContext grpCtx = ((IgniteEx)g).cachex(DEFAULT_CACHE_NAME).context().group(); assertTrue(grpCtx.walEnabled()); g.cache(DEFAULT_CACHE_NAME).rebalance(); } awaitPartitionMapExchange(); for (Ignite g : G.allGrids()) { CacheGroupContext grpCtx = ((IgniteEx)g).cachex(DEFAULT_CACHE_NAME).context().group(); assertTrue(grpCtx.walEnabled()); } } /** * */ private static class TestFileIOFactory implements FileIOFactory { /** */ private final FileIOFactory delegate; /** * @param delegate Delegate. */ TestFileIOFactory(FileIOFactory delegate) { this.delegate = delegate; } /** {@inheritDoc} */ @Override public FileIO create(File file) throws IOException { return new TestFileIO(delegate.create(file)); } /** {@inheritDoc} */ @Override public FileIO create(File file, OpenOption... modes) throws IOException { return new TestFileIO(delegate.create(file, modes)); } } /** * */ private static class TestFileIO implements FileIO { /** */ private final FileIO delegate; /** * @param delegate Delegate. */ TestFileIO(FileIO delegate) { this.delegate = delegate; } /** {@inheritDoc} */ @Override public long position() throws IOException { return delegate.position(); } /** {@inheritDoc} */ @Override public void position(long newPosition) throws IOException { delegate.position(newPosition); } /** {@inheritDoc} */ @Override public int read(ByteBuffer destBuf) throws IOException { return delegate.read(destBuf); } /** {@inheritDoc} */ @Override public int read(ByteBuffer destBuf, long position) throws IOException { return delegate.read(destBuf, position); } /** {@inheritDoc} */ @Override public int read(byte[] buf, int off, int len) throws IOException { return delegate.read(buf, off, len); } /** {@inheritDoc} */ @Override public int write(ByteBuffer srcBuf) throws IOException { CountDownLatch latch = fileIOLatch.get(); if (latch != null && Thread.currentThread().getName().contains("checkpoint")) try { latch.await(); } catch (InterruptedException ex) { throw new IgniteException(ex); } return delegate.write(srcBuf); } /** {@inheritDoc} */ @Override public int write(ByteBuffer srcBuf, long position) throws IOException { CountDownLatch latch = fileIOLatch.get(); if (latch != null && Thread.currentThread().getName().contains("checkpoint")) try { latch.await(); } catch (InterruptedException ex) { throw new IgniteException(ex); } return delegate.write(srcBuf, position); } /** {@inheritDoc} */ @Override public void write(byte[] buf, int off, int len) throws IOException { CountDownLatch latch = fileIOLatch.get(); if (latch != null && Thread.currentThread().getName().contains("checkpoint")) try { latch.await(); } catch (InterruptedException ex) { throw new IgniteException(ex); } delegate.write(buf, off, len); } /** {@inheritDoc} */ @Override public MappedByteBuffer map(int maxWalSegmentSize) throws IOException { return delegate.map(maxWalSegmentSize); } /** {@inheritDoc} */ @Override public void force() throws IOException { delegate.force(); } /** {@inheritDoc} */ @Override public void force(boolean withMetadata) throws IOException { delegate.force(withMetadata); } /** {@inheritDoc} */ @Override public long size() throws IOException { return delegate.size(); } /** {@inheritDoc} */ @Override public void clear() throws IOException { delegate.clear(); } /** {@inheritDoc} */ @Override public void close() throws IOException { delegate.close(); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.gateway; import com.carrotsearch.hppc.cursors.ObjectCursor; import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.InternalTestCluster.RestartCallback; import org.elasticsearch.test.store.MockFSDirectoryService; import org.elasticsearch.test.store.MockFSIndexStore; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.stream.IntStream; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @ClusterScope(numDataNodes = 0, scope = Scope.TEST) public class RecoveryFromGatewayIT extends ESIntegTestCase { @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return pluginList(MockFSIndexStore.TestPlugin.class); } public void testOneNodeRecoverFromGateway() throws Exception { internalCluster().startNode(); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("appAccountIds").field("type", "text").endObject().endObject() .endObject().endObject().string(); assertAcked(prepareCreate("test").addMapping("type1", mapping)); client().prepareIndex("test", "type1", "10990239").setSource(jsonBuilder().startObject() .startArray("appAccountIds").value(14).value(179).endArray().endObject()).execute().actionGet(); client().prepareIndex("test", "type1", "10990473").setSource(jsonBuilder().startObject() .startArray("appAccountIds").value(14).endArray().endObject()).execute().actionGet(); client().prepareIndex("test", "type1", "10990513").setSource(jsonBuilder().startObject() .startArray("appAccountIds").value(14).value(179).endArray().endObject()).execute().actionGet(); client().prepareIndex("test", "type1", "10990695").setSource(jsonBuilder().startObject() .startArray("appAccountIds").value(14).endArray().endObject()).execute().actionGet(); client().prepareIndex("test", "type1", "11026351").setSource(jsonBuilder().startObject() .startArray("appAccountIds").value(14).endArray().endObject()).execute().actionGet(); refresh(); assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("appAccountIds", 179)).execute().actionGet(), 2); ensureYellow("test"); // wait for primary allocations here otherwise if we have a lot of shards we might have a // shard that is still in post recovery when we restart and the ensureYellow() below will timeout Map<String, long[]> primaryTerms = assertAndCapturePrimaryTerms(null); internalCluster().fullRestart(); logger.info("Running Cluster Health (wait for the shards to startup)"); ensureYellow(); primaryTerms = assertAndCapturePrimaryTerms(primaryTerms); client().admin().indices().prepareRefresh().execute().actionGet(); assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("appAccountIds", 179)).execute().actionGet(), 2); internalCluster().fullRestart(); logger.info("Running Cluster Health (wait for the shards to startup)"); ensureYellow(); primaryTerms = assertAndCapturePrimaryTerms(primaryTerms); client().admin().indices().prepareRefresh().execute().actionGet(); assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("appAccountIds", 179)).execute().actionGet(), 2); } private Map<String, long[]> assertAndCapturePrimaryTerms(Map<String, long[]> previousTerms) { if (previousTerms == null) { previousTerms = new HashMap<>(); } final Map<String, long[]> result = new HashMap<>(); final ClusterState state = client().admin().cluster().prepareState().get().getState(); for (ObjectCursor<IndexMetaData> cursor : state.metaData().indices().values()) { final IndexMetaData indexMetaData = cursor.value; final String index = indexMetaData.getIndex().getName(); final long[] previous = previousTerms.get(index); final long[] current = IntStream.range(0, indexMetaData.getNumberOfShards()).mapToLong(indexMetaData::primaryTerm).toArray(); if (previous == null) { result.put(index, current); } else { assertThat("number of terms changed for index [" + index + "]", current.length, equalTo(previous.length)); for (int shard = 0; shard < current.length; shard++) { assertThat("primary term didn't increase for [" + index + "][" + shard + "]", current[shard], greaterThan(previous[shard])); } result.put(index, current); } } return result; } public void testSingleNodeNoFlush() throws Exception { internalCluster().startNode(); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("field").field("type", "text").endObject().startObject("num").field("type", "integer").endObject().endObject() .endObject().endObject().string(); // note: default replica settings are tied to #data nodes-1 which is 0 here. We can do with 1 in this test. int numberOfShards = numberOfShards(); assertAcked(prepareCreate("test").setSettings( SETTING_NUMBER_OF_SHARDS, numberOfShards(), SETTING_NUMBER_OF_REPLICAS, randomIntBetween(0, 1) ).addMapping("type1", mapping)); int value1Docs; int value2Docs; boolean indexToAllShards = randomBoolean(); if (indexToAllShards) { // insert enough docs so all shards will have a doc value1Docs = randomIntBetween(numberOfShards * 10, numberOfShards * 20); value2Docs = randomIntBetween(numberOfShards * 10, numberOfShards * 20); } else { // insert a two docs, some shards will not have anything value1Docs = 1; value2Docs = 1; } for (int i = 0; i < 1 + randomInt(100); i++) { for (int id = 0; id < Math.max(value1Docs, value2Docs); id++) { if (id < value1Docs) { index("test", "type1", "1_" + id, jsonBuilder().startObject().field("field", "value1").startArray("num").value(14).value(179).endArray().endObject() ); } if (id < value2Docs) { index("test", "type1", "2_" + id, jsonBuilder().startObject().field("field", "value2").startArray("num").value(14).endArray().endObject() ); } } } refresh(); for (int i = 0; i <= randomInt(10); i++) { assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), value1Docs + value2Docs); assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("field", "value1")).get(), value1Docs); assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("field", "value2")).get(), value2Docs); assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("num", 179)).get(), value1Docs); } if (!indexToAllShards) { // we have to verify primaries are started for them to be restored logger.info("Ensure all primaries have been started"); ensureYellow(); } Map<String, long[]> primaryTerms = assertAndCapturePrimaryTerms(null); internalCluster().fullRestart(); logger.info("Running Cluster Health (wait for the shards to startup)"); ensureYellow(); primaryTerms = assertAndCapturePrimaryTerms(primaryTerms); for (int i = 0; i <= randomInt(10); i++) { assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), value1Docs + value2Docs); assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("field", "value1")).get(), value1Docs); assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("field", "value2")).get(), value2Docs); assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("num", 179)).get(), value1Docs); } internalCluster().fullRestart(); logger.info("Running Cluster Health (wait for the shards to startup)"); ensureYellow(); primaryTerms = assertAndCapturePrimaryTerms(primaryTerms); for (int i = 0; i <= randomInt(10); i++) { assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), value1Docs + value2Docs); assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("field", "value1")).get(), value1Docs); assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("field", "value2")).get(), value2Docs); assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("num", 179)).get(), value1Docs); } } public void testSingleNodeWithFlush() throws Exception { internalCluster().startNode(); client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).execute().actionGet(); flush(); client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject().field("field", "value2").endObject()).execute().actionGet(); refresh(); assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).execute().actionGet(), 2); ensureYellow("test"); // wait for primary allocations here otherwise if we have a lot of shards we might have a // shard that is still in post recovery when we restart and the ensureYellow() below will timeout Map<String, long[]> primaryTerms = assertAndCapturePrimaryTerms(null); internalCluster().fullRestart(); logger.info("Running Cluster Health (wait for the shards to startup)"); ensureYellow(); primaryTerms = assertAndCapturePrimaryTerms(primaryTerms); for (int i = 0; i < 10; i++) { assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).execute().actionGet(), 2); } internalCluster().fullRestart(); logger.info("Running Cluster Health (wait for the shards to startup)"); ensureYellow(); primaryTerms = assertAndCapturePrimaryTerms(primaryTerms); for (int i = 0; i < 10; i++) { assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).execute().actionGet(), 2); } } public void testTwoNodeFirstNodeCleared() throws Exception { final String firstNode = internalCluster().startNode(); internalCluster().startNode(); client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).execute().actionGet(); flush(); client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject().field("field", "value2").endObject()).execute().actionGet(); refresh(); logger.info("Running Cluster Health (wait for the shards to startup)"); ensureGreen(); for (int i = 0; i < 10; i++) { assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).execute().actionGet(), 2); } Map<String, long[]> primaryTerms = assertAndCapturePrimaryTerms(null); internalCluster().fullRestart(new RestartCallback() { @Override public Settings onNodeStopped(String nodeName) throws Exception { return Settings.builder().put("gateway.recover_after_nodes", 2).build(); } @Override public boolean clearData(String nodeName) { return firstNode.equals(nodeName); } }); logger.info("Running Cluster Health (wait for the shards to startup)"); ensureGreen(); primaryTerms = assertAndCapturePrimaryTerms(primaryTerms); for (int i = 0; i < 10; i++) { assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).execute().actionGet(), 2); } } public void testLatestVersionLoaded() throws Exception { // clean two nodes internalCluster().startNodesAsync(2, Settings.builder().put("gateway.recover_after_nodes", 2).build()).get(); client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).execute().actionGet(); client().admin().indices().prepareFlush().execute().actionGet(); client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject().field("field", "value2").endObject()).execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); logger.info("--> running cluster_health (wait for the shards to startup)"); ensureGreen(); for (int i = 0; i < 10; i++) { assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).execute().actionGet(), 2); } String metaDataUuid = client().admin().cluster().prepareState().execute().get().getState().getMetaData().clusterUUID(); assertThat(metaDataUuid, not(equalTo("_na_"))); Map<String, long[]> primaryTerms = assertAndCapturePrimaryTerms(null); logger.info("--> closing first node, and indexing more data to the second node"); internalCluster().fullRestart(new RestartCallback() { @Override public void doAfterNodes(int numNodes, Client client) throws Exception { if (numNodes == 1) { logger.info("--> one node is closed - start indexing data into the second one"); client.prepareIndex("test", "type1", "3").setSource(jsonBuilder().startObject().field("field", "value3").endObject()).execute().actionGet(); // TODO: remove once refresh doesn't fail immediately if there a master block: // https://github.com/elastic/elasticsearch/issues/9997 client.admin().cluster().prepareHealth("test").setWaitForYellowStatus().get(); client.admin().indices().prepareRefresh().execute().actionGet(); for (int i = 0; i < 10; i++) { assertHitCount(client.prepareSearch().setSize(0).setQuery(matchAllQuery()).execute().actionGet(), 3); } logger.info("--> add some metadata, additional type and template"); client.admin().indices().preparePutMapping("test").setType("type2") .setSource(jsonBuilder().startObject().startObject("type2").endObject().endObject()) .execute().actionGet(); client.admin().indices().preparePutTemplate("template_1") .setTemplate("te*") .setOrder(0) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("field1").field("type", "text").field("store", true).endObject() .startObject("field2").field("type", "keyword").field("store", true).endObject() .endObject().endObject().endObject()) .execute().actionGet(); client.admin().indices().prepareAliases().addAlias("test", "test_alias", QueryBuilders.termQuery("field", "value")).execute().actionGet(); logger.info("--> starting two nodes back, verifying we got the latest version"); } } }); logger.info("--> running cluster_health (wait for the shards to startup)"); ensureGreen(); primaryTerms = assertAndCapturePrimaryTerms(primaryTerms); assertThat(client().admin().cluster().prepareState().execute().get().getState().getMetaData().clusterUUID(), equalTo(metaDataUuid)); for (int i = 0; i < 10; i++) { assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).execute().actionGet(), 3); } ClusterState state = client().admin().cluster().prepareState().execute().actionGet().getState(); assertThat(state.metaData().index("test").mapping("type2"), notNullValue()); assertThat(state.metaData().templates().get("template_1").template(), equalTo("te*")); assertThat(state.metaData().index("test").getAliases().get("test_alias"), notNullValue()); assertThat(state.metaData().index("test").getAliases().get("test_alias").filter(), notNullValue()); } public void testReusePeerRecovery() throws Exception { final Settings settings = Settings.builder() .put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false) .put("gateway.recover_after_nodes", 4) .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.getKey(), 4) .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 4) .put(MockFSDirectoryService.CRASH_INDEX_SETTING.getKey(), false).build(); internalCluster().startNodesAsync(4, settings).get(); // prevent any rebalance actions during the peer recovery // if we run into a relocation the reuse count will be 0 and this fails the test. We are testing here if // we reuse the files on disk after full restarts for replicas. assertAcked(prepareCreate("test").setSettings(Settings.builder() .put(indexSettings()) .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE))); ensureGreen(); logger.info("--> indexing docs"); for (int i = 0; i < 1000; i++) { client().prepareIndex("test", "type").setSource("field", "value").execute().actionGet(); if ((i % 200) == 0) { client().admin().indices().prepareFlush().execute().actionGet(); } } if (randomBoolean()) { client().admin().indices().prepareFlush().execute().actionGet(); } logger.info("Running Cluster Health"); ensureGreen(); client().admin().indices().prepareForceMerge("test").setMaxNumSegments(100).get(); // just wait for merges client().admin().indices().prepareFlush().setWaitIfOngoing(true).setForce(true).get(); boolean useSyncIds = randomBoolean(); if (useSyncIds == false) { logger.info("--> disabling allocation while the cluster is shut down"); // Disable allocations while we are closing nodes client().admin().cluster().prepareUpdateSettings() .setTransientSettings(Settings.builder() .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), EnableAllocationDecider.Allocation.NONE)) .get(); logger.info("--> full cluster restart"); internalCluster().fullRestart(); logger.info("--> waiting for cluster to return to green after first shutdown"); ensureGreen(); } else { logger.info("--> trying to sync flush"); assertEquals(client().admin().indices().prepareSyncedFlush("test").get().failedShards(), 0); assertSyncIdsNotNull(); } logger.info("--> disabling allocation while the cluster is shut down{}", useSyncIds ? "" : " a second time"); // Disable allocations while we are closing nodes client().admin().cluster().prepareUpdateSettings() .setTransientSettings(Settings.builder() .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), EnableAllocationDecider.Allocation.NONE)) .get(); Map<String, long[]> primaryTerms = assertAndCapturePrimaryTerms(null); logger.info("--> full cluster restart"); internalCluster().fullRestart(); logger.info("--> waiting for cluster to return to green after {}shutdown", useSyncIds ? "" : "second "); ensureGreen(); primaryTerms = assertAndCapturePrimaryTerms(primaryTerms); if (useSyncIds) { assertSyncIdsNotNull(); } RecoveryResponse recoveryResponse = client().admin().indices().prepareRecoveries("test").get(); for (RecoveryState recoveryState : recoveryResponse.shardRecoveryStates().get("test")) { long recovered = 0; for (RecoveryState.File file : recoveryState.getIndex().fileDetails()) { if (file.name().startsWith("segments")) { recovered += file.length(); } } if (!recoveryState.getPrimary() && (useSyncIds == false)) { logger.info("--> replica shard {} recovered from {} to {}, recovered {}, reuse {}", recoveryState.getShardId().getId(), recoveryState.getSourceNode().getName(), recoveryState.getTargetNode().getName(), recoveryState.getIndex().recoveredBytes(), recoveryState.getIndex().reusedBytes()); assertThat("no bytes should be recovered", recoveryState.getIndex().recoveredBytes(), equalTo(recovered)); assertThat("data should have been reused", recoveryState.getIndex().reusedBytes(), greaterThan(0L)); // we have to recover the segments file since we commit the translog ID on engine startup assertThat("all bytes should be reused except of the segments file", recoveryState.getIndex().reusedBytes(), equalTo(recoveryState.getIndex().totalBytes() - recovered)); assertThat("no files should be recovered except of the segments file", recoveryState.getIndex().recoveredFileCount(), equalTo(1)); assertThat("all files should be reused except of the segments file", recoveryState.getIndex().reusedFileCount(), equalTo(recoveryState.getIndex().totalFileCount() - 1)); assertThat("> 0 files should be reused", recoveryState.getIndex().reusedFileCount(), greaterThan(0)); } else { if (useSyncIds && !recoveryState.getPrimary()) { logger.info("--> replica shard {} recovered from {} to {} using sync id, recovered {}, reuse {}", recoveryState.getShardId().getId(), recoveryState.getSourceNode().getName(), recoveryState.getTargetNode().getName(), recoveryState.getIndex().recoveredBytes(), recoveryState.getIndex().reusedBytes()); } assertThat(recoveryState.getIndex().recoveredBytes(), equalTo(0L)); assertThat(recoveryState.getIndex().reusedBytes(), equalTo(recoveryState.getIndex().totalBytes())); assertThat(recoveryState.getIndex().recoveredFileCount(), equalTo(0)); assertThat(recoveryState.getIndex().reusedFileCount(), equalTo(recoveryState.getIndex().totalFileCount())); } } } public void assertSyncIdsNotNull() { IndexStats indexStats = client().admin().indices().prepareStats("test").get().getIndex("test"); for (ShardStats shardStats : indexStats.getShards()) { assertNotNull(shardStats.getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID)); } } public void testRecoveryDifferentNodeOrderStartup() throws Exception { // we need different data paths so we make sure we start the second node fresh final String node_1 = internalCluster().startNode(Settings.builder().put(Environment.PATH_DATA_SETTING.getKey(), createTempDir()).build()); client().prepareIndex("test", "type1", "1").setSource("field", "value").execute().actionGet(); internalCluster().startNode(Settings.builder().put(Environment.PATH_DATA_SETTING.getKey(), createTempDir()).build()); ensureGreen(); Map<String, long[]> primaryTerms = assertAndCapturePrimaryTerms(null); internalCluster().fullRestart(new RestartCallback() { @Override public boolean doRestart(String nodeName) { return !node_1.equals(nodeName); } }); ensureYellow(); primaryTerms = assertAndCapturePrimaryTerms(primaryTerms); assertThat(client().admin().indices().prepareExists("test").execute().actionGet().isExists(), equalTo(true)); assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet(), 1); } public void testStartedShardFoundIfStateNotYetProcessed() throws Exception { // nodes may need to report the shards they processed the initial recovered cluster state from the master final String nodeName = internalCluster().startNode(); assertAcked(prepareCreate("test").setSettings(SETTING_NUMBER_OF_SHARDS, 1)); final Index index = resolveIndex("test"); final ShardId shardId = new ShardId(index, 0); index("test", "type", "1"); flush("test"); final boolean corrupt = randomBoolean(); internalCluster().fullRestart(new RestartCallback() { @Override public Settings onNodeStopped(String nodeName) throws Exception { // make sure state is not recovered return Settings.builder().put(GatewayService.RECOVER_AFTER_NODES_SETTING.getKey(), 2).build(); } }); if (corrupt) { for (Path path : internalCluster().getInstance(NodeEnvironment.class, nodeName).availableShardPaths(shardId)) { final Path indexPath = path.resolve(ShardPath.INDEX_FOLDER_NAME); if (Files.exists(indexPath)) { // multi data path might only have one path in use try (DirectoryStream<Path> stream = Files.newDirectoryStream(indexPath)) { for (Path item : stream) { if (item.getFileName().toString().startsWith("segments_")) { logger.debug("--> deleting [{}]", item); Files.delete(item); } } } } } } DiscoveryNode node = internalCluster().getInstance(ClusterService.class, nodeName).localNode(); TransportNodesListGatewayStartedShards.NodesGatewayStartedShards response; response = internalCluster().getInstance(TransportNodesListGatewayStartedShards.class) .execute(new TransportNodesListGatewayStartedShards.Request(shardId, new DiscoveryNode[]{node})) .get(); assertThat(response.getNodes(), hasSize(1)); assertThat(response.getNodes().get(0).allocationId(), notNullValue()); if (corrupt) { assertThat(response.getNodes().get(0).storeException(), notNullValue()); } else { assertThat(response.getNodes().get(0).storeException(), nullValue()); } // start another node so cluster consistency checks won't time out due to the lack of state internalCluster().startNode(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.jms.tests; import javax.jms.Connection; import javax.jms.ConnectionFactory; import javax.jms.Destination; import javax.jms.JMSContext; import javax.jms.JMSException; import javax.jms.Message; import javax.jms.MessageConsumer; import javax.jms.Queue; import javax.jms.Session; import javax.jms.Topic; import javax.jms.TopicConnectionFactory; import javax.jms.XAConnectionFactory; import javax.naming.InitialContext; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.api.jms.JMSFactoryType; import org.apache.activemq.artemis.core.postoffice.Binding; import org.apache.activemq.artemis.core.postoffice.impl.LocalQueueBinding; import org.apache.activemq.artemis.core.security.Role; import org.apache.activemq.artemis.core.server.ActiveMQServer; import org.apache.activemq.artemis.core.settings.impl.AddressSettings; import org.apache.activemq.artemis.jms.tests.tools.ServerManagement; import org.apache.activemq.artemis.jms.tests.tools.container.Server; import org.apache.activemq.artemis.jms.tests.util.ProxyAssertSupport; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.Rule; import org.junit.rules.TestRule; import org.junit.rules.TestWatcher; import org.junit.runner.Description; /** * @deprecated this infrastructure should not be used for new code. New tests should go into * org.apache.activemq.tests.integration.jms at the integration-tests project. */ @Deprecated public abstract class ActiveMQServerTestCase { public static final int MAX_TIMEOUT = 1000 * 10 /* seconds */; public static final int MIN_TIMEOUT = 1000 * 1 /* seconds */; private static final int DRAIN_WAIT_TIME = 250; protected final JmsTestLogger log = JmsTestLogger.LOGGER; /** * Some testcases are time sensitive, and we need to make sure a GC would happen before certain scenarios */ public static void forceGC() { WeakReference<Object> dumbReference = new WeakReference<>(new Object()); // A loop that will wait GC, using the minimal time as possible while (dumbReference.get() != null) { System.gc(); try { Thread.sleep(500); } catch (InterruptedException e) { } } } protected static List<Server> servers = new ArrayList<>(); protected static Topic topic1; protected static Topic topic2; protected static Topic topic3; protected Queue queue1; protected Queue queue2; protected Queue queue3; protected Queue queue4; private final Set<Connection> connectionsSet = new HashSet<>(); private final Set<JMSContext> contextSet = new HashSet<>(); @Rule public TestRule watcher = new TestWatcher() { @Override protected void starting(Description description) { log.info(String.format("#*#*# Starting test: %s()...", description.getMethodName())); } @Override protected void finished(Description description) { log.info(String.format("#*#*# Finished test: %s()...", description.getMethodName())); } @Override protected void failed(Throwable e, Description description) { ActiveMQServerTestCase.tearDownAllServers(); } }; @Before public void setUp() throws Exception { System.setProperty("java.naming.factory.initial", getContextFactory()); try { // create any new server we need ActiveMQServerTestCase.servers.add(ServerManagement.create()); // start the servers if needed if (!ActiveMQServerTestCase.servers.get(0).isStarted()) { ActiveMQServerTestCase.servers.get(0).start(getConfiguration(), true); } // deploy the objects for this test deployAdministeredObjects(); lookUp(); } catch (Exception e) { // if we get here we need to clean up for the next test e.printStackTrace(); ActiveMQServerTestCase.servers.get(0).stop(); throw e; } // empty the queues checkEmpty(queue1); checkEmpty(queue2); checkEmpty(queue3); checkEmpty(queue4); // Check no subscriptions left lying around checkNoSubscriptions(topic1); checkNoSubscriptions(topic2); checkNoSubscriptions(topic3); } @After public void tearDown() throws Exception { for (JMSContext context : contextSet) { context.close(); } contextSet.clear(); for (Connection localConn : connectionsSet) { localConn.close(); } connectionsSet.clear(); } public void stop() throws Exception { ActiveMQServerTestCase.servers.get(0).stop(); } public String getContextFactory() { return org.apache.activemq.artemis.jms.tests.tools.container.InVMInitialContextFactory.class.getCanonicalName(); } public void start() throws Exception { System.setProperty("java.naming.factory.initial", getContextFactory()); ActiveMQServerTestCase.servers.get(0).start(getConfiguration(), false); } public void startNoDelete() throws Exception { System.setProperty("java.naming.factory.initial", getContextFactory()); ActiveMQServerTestCase.servers.get(0).start(getConfiguration(), false); } public void stopServerPeer() throws Exception { ActiveMQServerTestCase.servers.get(0).stopServerPeer(); } public void startServerPeer() throws Exception { System.setProperty("java.naming.factory.initial", getContextFactory()); ActiveMQServerTestCase.servers.get(0).startServerPeer(); } protected HashMap<String, Object> getConfiguration() { return new HashMap<>(); } protected void deployAndLookupAdministeredObjects() throws Exception { createTopic("Topic1"); createTopic("Topic2"); createTopic("Topic3"); createQueue("Queue1"); createQueue("Queue2"); createQueue("Queue3"); createQueue("Queue4"); lookUp(); } protected void deployAdministeredObjects() throws Exception { // set DLA and expiry to avoid spamming the log with warnings getJmsServer().getAddressSettingsRepository().addMatch("#", new AddressSettings().setDeadLetterAddress(SimpleString.toSimpleString("DLA")).setExpiryAddress(SimpleString.toSimpleString("Expiry"))); createTopic("Topic1"); createTopic("Topic2"); createTopic("Topic3"); createQueue("Queue1"); createQueue("Queue2"); createQueue("Queue3"); createQueue("Queue4"); deployConnectionFactory(0, JMSFactoryType.CF, "ConnectionFactory", "/ConnectionFactory"); deployConnectionFactory(0, JMSFactoryType.TOPIC_CF, "CF_TOPIC", "/CF_TOPIC"); deployConnectionFactory(0, JMSFactoryType.XA_CF, "CF_XA_TRUE", "/CF_XA_TRUE"); } private void lookUp() throws Exception { InitialContext ic = getInitialContext(); ActiveMQServerTestCase.topic1 = (Topic) ic.lookup("/topic/Topic1"); ActiveMQServerTestCase.topic2 = (Topic) ic.lookup("/topic/Topic2"); ActiveMQServerTestCase.topic3 = (Topic) ic.lookup("/topic/Topic3"); queue1 = (Queue) ic.lookup("/queue/Queue1"); queue2 = (Queue) ic.lookup("/queue/Queue2"); queue3 = (Queue) ic.lookup("/queue/Queue3"); queue4 = (Queue) ic.lookup("/queue/Queue4"); } @AfterClass public static final void tearDownAllServers() { for (Server s : servers) { try { s.stop(); } catch (Exception cause) { // ignore } } servers.clear(); } protected ActiveMQServer getJmsServer() throws Exception { return ActiveMQServerTestCase.servers.get(0).getActiveMQServer(); } protected void checkNoSubscriptions(final Topic topic) throws Exception { } protected void drainDestination(final ConnectionFactory cf, final Destination dest) throws JMSException { Connection conn = null; try { conn = cf.createConnection(); Session sess = conn.createSession(false, Session.AUTO_ACKNOWLEDGE); MessageConsumer cons = sess.createConsumer(dest); Message m = null; conn.start(); log.trace("Draining messages from " + dest); while (true) { m = cons.receive(DRAIN_WAIT_TIME); if (m == null) { break; } log.trace("Drained message"); } } finally { if (conn != null) { conn.close(); } } } public InitialContext getInitialContext() throws Exception { return new InitialContext(ServerManagement.getJNDIEnvironment(0)); } public ConnectionFactory getConnectionFactory() throws Exception { return (ConnectionFactory) getInitialContext().lookup("/ConnectionFactory"); } public TopicConnectionFactory getTopicConnectionFactory() throws Exception { return (TopicConnectionFactory) getInitialContext().lookup("/CF_TOPIC"); } public XAConnectionFactory getXAConnectionFactory() throws Exception { return (XAConnectionFactory) getInitialContext().lookup("/CF_XA_TRUE"); } public void createQueue(final String name) throws Exception { ActiveMQServerTestCase.servers.get(0).createQueue(name, null); } public void createTopic(final String name) throws Exception { ActiveMQServerTestCase.servers.get(0).createTopic(name, null); } public void destroyQueue(final String name) throws Exception { ActiveMQServerTestCase.servers.get(0).destroyQueue(name, null); } public void destroyTopic(final String name) throws Exception { ActiveMQServerTestCase.servers.get(0).destroyTopic(name, null); } public void createQueue(final String name, final int i) throws Exception { ActiveMQServerTestCase.servers.get(i).createQueue(name, null); } public void createTopic(final String name, final int i) throws Exception { ActiveMQServerTestCase.servers.get(i).createTopic(name, null); } public void destroyQueue(final String name, final int i) throws Exception { ActiveMQServerTestCase.servers.get(i).destroyQueue(name, null); } public boolean checkNoMessageData() { return false; } public boolean checkEmpty(final Queue queue) throws Exception { Long messageCount = ActiveMQServerTestCase.servers.get(0).getMessageCountForQueue(queue.getQueueName()); if (messageCount > 0) { removeAllMessages(queue.getQueueName(), true); } return true; } public boolean checkEmpty(final Queue queue, final int i) { return true; } public boolean checkEmpty(final Topic topic) { return true; } protected void removeAllMessages(final String destName, final boolean isQueue) throws Exception { ActiveMQServerTestCase.servers.get(0).removeAllMessages(destName); } protected boolean assertRemainingMessages(final int expected) throws Exception { String queueName = "Queue1"; Binding binding = servers.get(0).getActiveMQServer().getPostOffice().getBinding(SimpleString.toSimpleString(queueName)); if (binding != null && binding instanceof LocalQueueBinding) { ((LocalQueueBinding) binding).getQueue().flushExecutor(); } Long messageCount = null; for (int i = 0; i < 10; i++) { messageCount = servers.get(0).getMessageCountForQueue(queueName); if (messageCount.longValue() == expected) { break; } else { Thread.sleep(100); } } ProxyAssertSupport.assertEquals(expected, messageCount.intValue()); return expected == messageCount.intValue(); } protected static void assertActiveConnectionsOnTheServer(final int expectedSize) throws Exception { ProxyAssertSupport.assertEquals(expectedSize, ActiveMQServerTestCase.servers.get(0).getActiveMQServer().getActiveMQServerControl().getConnectionCount()); } public static void deployConnectionFactory(final String clientId, final String objectName, final String... jndiBindings) throws Exception { ActiveMQServerTestCase.servers.get(0).deployConnectionFactory(clientId, objectName, jndiBindings); } public static void deployConnectionFactory(final String objectName, final int prefetchSize, final String... jndiBindings) throws Exception { ActiveMQServerTestCase.servers.get(0).deployConnectionFactory(objectName, prefetchSize, jndiBindings); } public static void deployConnectionFactory(final int server, final String objectName, final int prefetchSize, final String... jndiBindings) throws Exception { ActiveMQServerTestCase.servers.get(server).deployConnectionFactory(objectName, prefetchSize, jndiBindings); } public static void deployConnectionFactory(final int server, final String objectName, final String... jndiBindings) throws Exception { ActiveMQServerTestCase.servers.get(server).deployConnectionFactory(objectName, jndiBindings); } public static void deployConnectionFactory(final int server, JMSFactoryType type, final String objectName, final String... jndiBindings) throws Exception { ActiveMQServerTestCase.servers.get(server).deployConnectionFactory(objectName, type, jndiBindings); } public void deployConnectionFactory(final String clientId, final String objectName, final int prefetchSize, final int defaultTempQueueFullSize, final int defaultTempQueuePageSize, final int defaultTempQueueDownCacheSize, final boolean supportsFailover, final boolean supportsLoadBalancing, final int dupsOkBatchSize, final boolean blockOnAcknowledge, final String... jndiBindings) throws Exception { ActiveMQServerTestCase.servers.get(0).deployConnectionFactory(clientId, JMSFactoryType.CF, objectName, prefetchSize, defaultTempQueueFullSize, defaultTempQueuePageSize, defaultTempQueueDownCacheSize, supportsFailover, supportsLoadBalancing, dupsOkBatchSize, blockOnAcknowledge, jndiBindings); } public static void deployConnectionFactory(final String objectName, final int prefetchSize, final int defaultTempQueueFullSize, final int defaultTempQueuePageSize, final int defaultTempQueueDownCacheSize, final String... jndiBindings) throws Exception { ActiveMQServerTestCase.servers.get(0).deployConnectionFactory(objectName, prefetchSize, defaultTempQueueFullSize, defaultTempQueuePageSize, defaultTempQueueDownCacheSize, jndiBindings); } public static void undeployConnectionFactory(final String objectName) throws Exception { ActiveMQServerTestCase.servers.get(0).undeployConnectionFactory(objectName); } protected List<String> listAllSubscribersForTopic(final String s) throws Exception { return ActiveMQServerTestCase.servers.get(0).listAllSubscribersForTopic(s); } protected Long getMessageCountForQueue(final String s) throws Exception { return ActiveMQServerTestCase.servers.get(0).getMessageCountForQueue(s); } protected Set<Role> getSecurityConfig() throws Exception { return ActiveMQServerTestCase.servers.get(0).getSecurityConfig(); } protected void setSecurityConfig(final Set<Role> defConfig) throws Exception { ActiveMQServerTestCase.servers.get(0).setSecurityConfig(defConfig); } protected void setSecurityConfigOnManager(final String destination, final boolean isQueue, final Set<Role> roles) throws Exception { ActiveMQServerTestCase.servers.get(0).configureSecurityForDestination(destination, isQueue, roles); } protected final JMSContext addContext(JMSContext createContext) { contextSet.add(createContext); return createContext; } protected final Connection addConnection(Connection conn) { connectionsSet.add(conn); return conn; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.managedidentities.v1alpha1.model; /** * Represents a Managed Microsoft Identities Peering. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Managed Service for Microsoft Active Directory API. * For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class Peering extends com.google.api.client.json.GenericJson { /** * Required. The full names of the Google Compute Engine [networks](/compute/docs/networks-and- * firewalls#networks) to which the instance is connected. Caller needs to make sure that CIDR * subnets do not overlap between networks, else peering creation will fail. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String authorizedNetwork; /** * Output only. The time the instance was created. * The value may be {@code null}. */ @com.google.api.client.util.Key private String createTime; /** * Required. Full domain resource path for the Managed AD Domain involved in peering. The resource * path should be in the form: `projects/{project_id}/locations/global/domains/{domain_name}` * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String domainResource; /** * Optional. Resource labels to represent user provided metadata. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, java.lang.String> labels; /** * Output only. Unique name of the peering in this scope including projects and location using the * form: `projects/{project_id}/locations/global/peerings/{peering_id}`. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String name; /** * Output only. The current state of this Peering. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String state; /** * Output only. Additional information about the current status of this peering, if available. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String statusMessage; /** * Output only. Last update time. * The value may be {@code null}. */ @com.google.api.client.util.Key private String updateTime; /** * Required. The full names of the Google Compute Engine [networks](/compute/docs/networks-and- * firewalls#networks) to which the instance is connected. Caller needs to make sure that CIDR * subnets do not overlap between networks, else peering creation will fail. * @return value or {@code null} for none */ public java.lang.String getAuthorizedNetwork() { return authorizedNetwork; } /** * Required. The full names of the Google Compute Engine [networks](/compute/docs/networks-and- * firewalls#networks) to which the instance is connected. Caller needs to make sure that CIDR * subnets do not overlap between networks, else peering creation will fail. * @param authorizedNetwork authorizedNetwork or {@code null} for none */ public Peering setAuthorizedNetwork(java.lang.String authorizedNetwork) { this.authorizedNetwork = authorizedNetwork; return this; } /** * Output only. The time the instance was created. * @return value or {@code null} for none */ public String getCreateTime() { return createTime; } /** * Output only. The time the instance was created. * @param createTime createTime or {@code null} for none */ public Peering setCreateTime(String createTime) { this.createTime = createTime; return this; } /** * Required. Full domain resource path for the Managed AD Domain involved in peering. The resource * path should be in the form: `projects/{project_id}/locations/global/domains/{domain_name}` * @return value or {@code null} for none */ public java.lang.String getDomainResource() { return domainResource; } /** * Required. Full domain resource path for the Managed AD Domain involved in peering. The resource * path should be in the form: `projects/{project_id}/locations/global/domains/{domain_name}` * @param domainResource domainResource or {@code null} for none */ public Peering setDomainResource(java.lang.String domainResource) { this.domainResource = domainResource; return this; } /** * Optional. Resource labels to represent user provided metadata. * @return value or {@code null} for none */ public java.util.Map<String, java.lang.String> getLabels() { return labels; } /** * Optional. Resource labels to represent user provided metadata. * @param labels labels or {@code null} for none */ public Peering setLabels(java.util.Map<String, java.lang.String> labels) { this.labels = labels; return this; } /** * Output only. Unique name of the peering in this scope including projects and location using the * form: `projects/{project_id}/locations/global/peerings/{peering_id}`. * @return value or {@code null} for none */ public java.lang.String getName() { return name; } /** * Output only. Unique name of the peering in this scope including projects and location using the * form: `projects/{project_id}/locations/global/peerings/{peering_id}`. * @param name name or {@code null} for none */ public Peering setName(java.lang.String name) { this.name = name; return this; } /** * Output only. The current state of this Peering. * @return value or {@code null} for none */ public java.lang.String getState() { return state; } /** * Output only. The current state of this Peering. * @param state state or {@code null} for none */ public Peering setState(java.lang.String state) { this.state = state; return this; } /** * Output only. Additional information about the current status of this peering, if available. * @return value or {@code null} for none */ public java.lang.String getStatusMessage() { return statusMessage; } /** * Output only. Additional information about the current status of this peering, if available. * @param statusMessage statusMessage or {@code null} for none */ public Peering setStatusMessage(java.lang.String statusMessage) { this.statusMessage = statusMessage; return this; } /** * Output only. Last update time. * @return value or {@code null} for none */ public String getUpdateTime() { return updateTime; } /** * Output only. Last update time. * @param updateTime updateTime or {@code null} for none */ public Peering setUpdateTime(String updateTime) { this.updateTime = updateTime; return this; } @Override public Peering set(String fieldName, Object value) { return (Peering) super.set(fieldName, value); } @Override public Peering clone() { return (Peering) super.clone(); } }
// ======================================================================== // Copyright (c) 2004-2009 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // All rights reserved. This program and the accompanying materials // are made available under the terms of the Eclipse Public License v1.0 // and Apache License v2.0 which accompanies this distribution. // The Eclipse Public License is available at // http://www.eclipse.org/legal/epl-v10.html // The Apache License v2.0 is available at // http://www.opensource.org/licenses/apache2.0.php // You may elect to redistribute this code under either of these licenses. // ======================================================================== package org.eclipse.jetty.io.nio; import java.io.IOException; import java.nio.channels.ClosedChannelException; import java.nio.channels.SelectableChannel; import java.nio.channels.SelectionKey; import java.nio.channels.SocketChannel; import org.eclipse.jetty.io.AsyncEndPoint; import org.eclipse.jetty.io.Buffer; import org.eclipse.jetty.io.ConnectedEndPoint; import org.eclipse.jetty.io.Connection; import org.eclipse.jetty.io.EofException; import org.eclipse.jetty.io.nio.SelectorManager.SelectSet; import org.eclipse.jetty.util.log.Log; /* ------------------------------------------------------------ */ /** * An Endpoint that can be scheduled by {@link SelectorManager}. */ public class SelectChannelEndPoint extends ChannelEndPoint implements AsyncEndPoint, ConnectedEndPoint { private final SelectorManager.SelectSet _selectSet; private final SelectorManager _manager; private final Runnable _handler = new Runnable() { public void run() { handle(); } }; private volatile Connection _connection; private boolean _dispatched = false; private boolean _redispatched = false; private volatile boolean _writable = true; private SelectionKey _key; private int _interestOps; private boolean _readBlocked; private boolean _writeBlocked; private boolean _open; private volatile long _idleTimestamp; /* ------------------------------------------------------------ */ public SelectChannelEndPoint(SocketChannel channel, SelectSet selectSet, SelectionKey key, int maxIdleTime) throws IOException { super(channel, maxIdleTime); _manager = selectSet.getManager(); _selectSet = selectSet; _dispatched = false; _redispatched = false; _open=true; _key = key; _connection = _manager.newConnection(channel,this); scheduleIdle(); } /* ------------------------------------------------------------ */ public SelectChannelEndPoint(SocketChannel channel, SelectSet selectSet, SelectionKey key) throws IOException { super(channel); _manager = selectSet.getManager(); _selectSet = selectSet; _dispatched = false; _redispatched = false; _open=true; _key = key; _connection = _manager.newConnection(channel,this); scheduleIdle(); } /* ------------------------------------------------------------ */ public SelectionKey getSelectionKey() { synchronized (this) { return _key; } } /* ------------------------------------------------------------ */ public SelectorManager getSelectManager() { return _manager; } /* ------------------------------------------------------------ */ public Connection getConnection() { return _connection; } /* ------------------------------------------------------------ */ public void setConnection(Connection connection) { Connection old=_connection; _connection=connection; _manager.endPointUpgraded(this,old); } /* ------------------------------------------------------------ */ /** Called by selectSet to schedule handling * */ public void schedule() { synchronized (this) { // If there is no key, then do nothing if (_key == null || !_key.isValid()) { _readBlocked=false; _writeBlocked=false; this.notifyAll(); return; } // If there are threads dispatched reading and writing if (_readBlocked || _writeBlocked) { // assert _dispatched; if (_readBlocked && _key.isReadable()) _readBlocked=false; if (_writeBlocked && _key.isWritable()) _writeBlocked=false; // wake them up is as good as a dispatched. this.notifyAll(); // we are not interested in further selecting _key.interestOps(0); return; } // Otherwise if we are still dispatched if (!isReadyForDispatch()) { // we are not interested in further selecting _key.interestOps(0); return; } // Remove writeable op if ((_key.readyOps() & SelectionKey.OP_WRITE) == SelectionKey.OP_WRITE && (_key.interestOps() & SelectionKey.OP_WRITE) == SelectionKey.OP_WRITE) { // Remove writeable op _interestOps = _key.interestOps() & ~SelectionKey.OP_WRITE; _key.interestOps(_interestOps); _writable = true; // Once writable is in ops, only removed with dispatch. } if (_dispatched) _key.interestOps(0); else dispatch(); } } /* ------------------------------------------------------------ */ public void dispatch() { synchronized(this) { if (_dispatched) _redispatched=true; else { _dispatched = _manager.dispatch(_handler); if(!_dispatched) { Log.warn("Dispatched Failed!"); updateKey(); } } } } /* ------------------------------------------------------------ */ /** * Called when a dispatched thread is no longer handling the endpoint. * The selection key operations are updated. * @return If false is returned, the endpoint has been redispatched and * thread must keep handling the endpoint. */ private boolean undispatch() { synchronized (this) { if (_redispatched) { _redispatched=false; return false; } _dispatched = false; updateKey(); } return true; } /* ------------------------------------------------------------ */ public void scheduleIdle() { _idleTimestamp=System.currentTimeMillis(); } /* ------------------------------------------------------------ */ public void cancelIdle() { _idleTimestamp=0; } /* ------------------------------------------------------------ */ public void checkIdleTimestamp(long now) { if (_idleTimestamp!=0 && _maxIdleTime!=0 && now>(_idleTimestamp+_maxIdleTime)) { idleExpired(); } } /* ------------------------------------------------------------ */ protected void idleExpired() { try { close(); } catch (IOException e) { Log.ignore(e); } } /* ------------------------------------------------------------ */ /* */ @Override public int flush(Buffer header, Buffer buffer, Buffer trailer) throws IOException { int l = super.flush(header, buffer, trailer); if (!(_writable=l!=0)) { synchronized (this) { if (!_dispatched) updateKey(); } } return l; } /* ------------------------------------------------------------ */ /* */ @Override public int flush(Buffer buffer) throws IOException { int l = super.flush(buffer); if (!(_writable=l!=0)) { synchronized (this) { if (!_dispatched) updateKey(); } } return l; } /* ------------------------------------------------------------ */ public boolean isReadyForDispatch() { synchronized (this) { return !(_dispatched || getConnection().isSuspended()); } } /* ------------------------------------------------------------ */ /* * Allows thread to block waiting for further events. */ @Override public boolean blockReadable(long timeoutMs) throws IOException { synchronized (this) { long start=_selectSet.getNow(); try { _readBlocked=true; while (isOpen() && _readBlocked) { try { updateKey(); this.wait(timeoutMs); timeoutMs -= _selectSet.getNow()-start; if (_readBlocked && timeoutMs<=0) return false; } catch (InterruptedException e) { Log.warn(e); } } } finally { _readBlocked=false; } } return true; } /* ------------------------------------------------------------ */ /* * Allows thread to block waiting for further events. */ @Override public boolean blockWritable(long timeoutMs) throws IOException { synchronized (this) { long start=_selectSet.getNow(); try { _writeBlocked=true; while (isOpen() && _writeBlocked) { try { updateKey(); this.wait(timeoutMs); timeoutMs -= _selectSet.getNow()-start; if (_writeBlocked && timeoutMs<=0) return false; } catch (InterruptedException e) { Log.warn(e); } } } finally { _writeBlocked=false; if (_idleTimestamp!=-1) scheduleIdle(); } } return true; } /* ------------------------------------------------------------ */ public void setWritable(boolean writable) { _writable=writable; } /* ------------------------------------------------------------ */ public void scheduleWrite() { _writable=false; updateKey(); } /* ------------------------------------------------------------ */ /** * Updates selection key. Adds operations types to the selection key as needed. No operations * are removed as this is only done during dispatch. This method records the new key and * schedules a call to doUpdateKey to do the keyChange */ private void updateKey() { synchronized (this) { int ops=-1; if (getChannel().isOpen()) { _interestOps = ((!_dispatched || _readBlocked) ? SelectionKey.OP_READ : 0) | ((!_writable || _writeBlocked) ? SelectionKey.OP_WRITE : 0); try { ops = ((_key!=null && _key.isValid())?_key.interestOps():-1); } catch(Exception e) { _key=null; Log.ignore(e); } } if(_interestOps == ops && getChannel().isOpen()) return; } _selectSet.addChange(this); _selectSet.wakeup(); } /* ------------------------------------------------------------ */ /** * Synchronize the interestOps with the actual key. Call is scheduled by a call to updateKey */ void doUpdateKey() { synchronized (this) { if (getChannel().isOpen()) { if (_interestOps>0) { if (_key==null || !_key.isValid()) { SelectableChannel sc = (SelectableChannel)getChannel(); if (sc.isRegistered()) { updateKey(); } else { try { _key=((SelectableChannel)getChannel()).register(_selectSet.getSelector(),_interestOps,this); } catch (Exception e) { Log.ignore(e); if (_key!=null && _key.isValid()) { _key.cancel(); } cancelIdle(); if (_open) { _selectSet.destroyEndPoint(this); } _open=false; _key = null; } } } else { _key.interestOps(_interestOps); } } else { if (_key!=null && _key.isValid()) _key.interestOps(0); else _key=null; } } else { if (_key!=null && _key.isValid()) _key.cancel(); cancelIdle(); if (_open) { _selectSet.destroyEndPoint(this); } _open=false; _key = null; } } } /* ------------------------------------------------------------ */ /* */ private void handle() { boolean dispatched=true; try { while(dispatched) { try { while(true) { final Connection next = _connection.handle(); if (next!=_connection) { _connection=next; continue; } break; } } catch (ClosedChannelException e) { Log.ignore(e); } catch (EofException e) { Log.debug("EOF", e); try{close();} catch(IOException e2){Log.ignore(e2);} } catch (IOException e) { Log.warn(e.toString()); Log.debug(e); try{close();} catch(IOException e2){Log.ignore(e2);} } catch (Throwable e) { Log.warn("handle failed", e); try{close();} catch(IOException e2){Log.ignore(e2);} } dispatched=!undispatch(); } } finally { if (dispatched) { dispatched=!undispatch(); while (dispatched) { Log.warn("SCEP.run() finally DISPATCHED"); dispatched=!undispatch(); } } } } /* ------------------------------------------------------------ */ /* * @see org.eclipse.io.nio.ChannelEndPoint#close() */ @Override public void close() throws IOException { try { super.close(); } catch (IOException e) { Log.ignore(e); } finally { updateKey(); } } /* ------------------------------------------------------------ */ @Override public String toString() { synchronized(this) { return "SCEP@" + hashCode() + "\t[d=" + _dispatched + ",io=" + _interestOps+ ",w=" + _writable + ",rb=" + _readBlocked + ",wb=" + _writeBlocked + "]"; } } /* ------------------------------------------------------------ */ public SelectSet getSelectSet() { return _selectSet; } /* ------------------------------------------------------------ */ /** * Don't set the SoTimeout * @see org.eclipse.jetty.io.nio.ChannelEndPoint#setMaxIdleTime(int) */ @Override public void setMaxIdleTime(int timeMs) throws IOException { _maxIdleTime=timeMs; } }
package org.zaproxy.zapmavenplugin; /* * Copyright 2001-2005 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language * governing permissions and limitations under the License. */ import java.io.BufferedReader; import java.io.File; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.InetSocketAddress; import java.net.Proxy; import java.net.URL; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.UUID; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.zaproxy.clientapi.core.ApiResponse; import org.zaproxy.clientapi.core.ApiResponseElement; import org.zaproxy.clientapi.core.ClientApi; import org.zaproxy.clientapi.core.ClientApiException; /** * Goal which touches a timestamp file. */ @Mojo( name = "process-zap", defaultPhase = LifecyclePhase.POST_INTEGRATION_TEST, threadSafe = true ) public class ProcessZAP extends AbstractMojo { private ClientApi zapClientAPI; private Proxy proxy; /** * API KEY. */ @Parameter(defaultValue = "ZAP-MAVEN-PLUGIN") public String apiKey; /** * Location of the host of the ZAP proxy */ @Parameter( defaultValue = "localhost", required = true) private String zapProxyHost; /** * Location of the port of the ZAP proxy */ @Parameter( defaultValue = "8080", required = true) private int zapProxyPort; /** * Location of the port of the ZAP proxy */ @Parameter( required=true ) private String targetURL; /** * Switch to spider the URL */ @Parameter( defaultValue="true" ) private boolean spiderURL; /** * Switch to scan the URL */ @Parameter( defaultValue="true" ) private boolean scanURL; /** * Save session of scan */ @Parameter( defaultValue="true" ) private boolean saveSession; /** * Switch to shutdown ZAP */ @Parameter( defaultValue="true" ) private boolean shutdownZAP; /** * Save session of scan */ @Parameter( defaultValue="true" ) private boolean reportAlerts; /** * Location to store the ZAP reports */ @Parameter( defaultValue="${project.build.directory}/zap-reports" ) private String reportsDirectory; /** * Set the output format type, in addition to the XML report. Must be one of "none" or "json". */ @Parameter ( defaultValue="none" ) private String format; /** * create a Timestamp * * @return */ private String dateTimeString() { final Calendar cal = Calendar.getInstance(); final SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); return sdf.format(cal.getTime()); } /** * create a temporary filename * * @param prefix * if null, then default "temp" * @param suffix * if null, then default ".tmp" * @return */ private String createTempFilename(final String prefix, final String suffix) { final StringBuilder sb = new StringBuilder(""); if (prefix != null) { sb.append(prefix); } else { sb.append("temp"); } // append date time and random UUID sb.append(dateTimeString()).append("_").append(UUID.randomUUID().toString()); if (suffix != null) { sb.append(suffix); } else { sb.append(".tmp"); } return sb.toString(); } /** * Change the ZAP API status response to an integer * * @param response the ZAP APIresponse code * @return */ private int statusToInt(final ApiResponse response) { return Integer.parseInt(((ApiResponseElement)response).getValue()); } /** * Search for all links and pages on the URL * * @param url the to investigate URL * @throws ClientApiException */ private void spiderURL(final String url) throws ClientApiException { zapClientAPI.spider.scan(apiKey, url); while (statusToInt(zapClientAPI.spider.status(null)) < 100) { try { Thread.sleep(1000); } catch (final InterruptedException e) { getLog().error(e.toString()); } } } /** * Scan all pages found at url * * @param url the url to scan * @throws ClientApiException */ private void scanURL(final String url) throws ClientApiException { zapClientAPI.ascan.scan(apiKey, url, "true", "false"); while ( statusToInt(zapClientAPI.ascan.status()) < 100) { try { Thread.sleep(1000); } catch (final InterruptedException e) { getLog().error(e.toString()); } } } /** * Get all alerts from ZAP proxy * * @param json true for json form, false for xml format * @return all alerts from ZAProxy * @throws Exception */ private String getAllAlerts(final String format) throws Exception { URL url; String result = ""; if (format.equalsIgnoreCase("xml") || format.equalsIgnoreCase("html") || format.equalsIgnoreCase("json")) { url = new URL("http://zap/" + format + "/core/view/alerts"); } else { url = new URL("http://zap/xml/core/view/alerts"); } getLog().info("Open URL: " + url.toString()); final HttpURLConnection uc = (HttpURLConnection) url.openConnection(proxy); uc.connect(); final BufferedReader in = new BufferedReader(new InputStreamReader( uc.getInputStream())); String inputLine; while ((inputLine = in.readLine()) != null) { result = result + inputLine; } in.close(); return result; } /** * Get all alerts from ZAP proxy * * @param json true for json form, false for xml format * @return all alerts from ZAProxy * @throws Exception */ private String getAllAlertsFormat(final String format) throws Exception { if (format.equalsIgnoreCase("xml") || format.equalsIgnoreCase("html") || format.equalsIgnoreCase("json")) { return format; } else { return "xml"; } } /** * execute the whole shabang * * @throws MojoExecutionException */ @Override public void execute() throws MojoExecutionException { try { zapClientAPI = new ClientApi(zapProxyHost, zapProxyPort); proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(zapProxyHost, zapProxyPort)); if (spiderURL) { getLog().info("Spider the site [" + targetURL + "]"); spiderURL(targetURL); } else { getLog().info("skip spidering the site [" + targetURL + "]"); } if (scanURL) { getLog().info("Scan the site [" + targetURL + "]"); scanURL(targetURL); } else { getLog().info("skip scanning the site [" + targetURL + "]"); } // filename to share between the session file and the report file String fileName = ""; if (saveSession) { fileName = createTempFilename("ZAP", ""); zapClientAPI.core.saveSession(apiKey, fileName, "true"); } else { getLog().info("skip saveSession"); } if (reportAlerts) { // reuse fileName of the session file if (fileName == null || fileName.length() == 0) { fileName = createTempFilename("ZAP", ""); } final String fileName_no_extension = FilenameUtils.concat(reportsDirectory, fileName); try { final String alerts = getAllAlerts(getAllAlertsFormat(format)); final String fullFileName = fileName_no_extension + "." + getAllAlertsFormat(format); FileUtils.writeStringToFile(new File(fullFileName), alerts); getLog().info("File save in format in ["+getAllAlertsFormat(format)+"]"); } catch (final Exception e) { getLog().error(e.toString()); e.printStackTrace(); } } } catch (final Exception e) { getLog().error(e.toString()); throw new MojoExecutionException("Processing with ZAP failed"); } finally { if (shutdownZAP && zapClientAPI != null) { try { getLog().info("Shutdown ZAProxy"); zapClientAPI.core.shutdown(apiKey); } catch (final Exception e) { getLog().error(e.toString()); e.printStackTrace(); } } else { getLog().info("No shutdown of ZAP"); } } } }
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.text.tx3g; import android.graphics.Color; import android.graphics.Typeface; import android.text.SpannableStringBuilder; import android.text.Spanned; import android.text.style.ForegroundColorSpan; import android.text.style.StyleSpan; import android.text.style.TypefaceSpan; import android.text.style.UnderlineSpan; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.text.Cue; import com.google.android.exoplayer2.text.SimpleSubtitleDecoder; import com.google.android.exoplayer2.text.Subtitle; import com.google.android.exoplayer2.text.SubtitleDecoderException; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.Util; import java.nio.charset.Charset; import java.util.List; /** * A {@link SimpleSubtitleDecoder} for tx3g. * <p> * Currently supports parsing of a single text track with embedded styles. */ public final class Tx3gDecoder extends SimpleSubtitleDecoder { private static final char BOM_UTF16_BE = '\uFEFF'; private static final char BOM_UTF16_LE = '\uFFFE'; private static final int TYPE_STYL = 0x7374796c; private static final int TYPE_TBOX = 0x74626f78; private static final String TX3G_SERIF = "Serif"; private static final int SIZE_ATOM_HEADER = 8; private static final int SIZE_SHORT = 2; private static final int SIZE_BOM_UTF16 = 2; private static final int SIZE_STYLE_RECORD = 12; private static final int FONT_FACE_BOLD = 0x0001; private static final int FONT_FACE_ITALIC = 0x0002; private static final int FONT_FACE_UNDERLINE = 0x0004; private static final int SPAN_PRIORITY_LOW = 0xFF << Spanned.SPAN_PRIORITY_SHIFT; private static final int SPAN_PRIORITY_HIGH = 0; private static final int DEFAULT_FONT_FACE = 0; private static final int DEFAULT_COLOR = Color.WHITE; private static final String DEFAULT_FONT_FAMILY = C.SANS_SERIF_NAME; private static final float DEFAULT_VERTICAL_PLACEMENT = 0.85f; private final ParsableByteArray parsableByteArray; private boolean customVerticalPlacement; private int defaultFontFace; private int defaultColorRgba; private String defaultFontFamily; private float defaultVerticalPlacement; private int calculatedVideoTrackHeight; /** * Sets up a new {@link Tx3gDecoder} with default values. * * @param initializationData Sample description atom ('stsd') data with default subtitle styles. */ public Tx3gDecoder(List<byte[]> initializationData) { super("Tx3gDecoder"); parsableByteArray = new ParsableByteArray(); if (initializationData != null && initializationData.size() == 1 && (initializationData.get(0).length == 48 || initializationData.get(0).length == 53)) { byte[] initializationBytes = initializationData.get(0); defaultFontFace = initializationBytes[24]; defaultColorRgba = ((initializationBytes[26] & 0xFF) << 24) | ((initializationBytes[27] & 0xFF) << 16) | ((initializationBytes[28] & 0xFF) << 8) | (initializationBytes[29] & 0xFF); String fontFamily = Util.fromUtf8Bytes(initializationBytes, 43, initializationBytes.length - 43); defaultFontFamily = TX3G_SERIF.equals(fontFamily) ? C.SERIF_NAME : C.SANS_SERIF_NAME; //font size (initializationBytes[25]) is 5% of video height calculatedVideoTrackHeight = 20 * initializationBytes[25]; customVerticalPlacement = (initializationBytes[0] & 0x20) != 0; if (customVerticalPlacement) { int requestedVerticalPlacement = ((initializationBytes[10] & 0xFF) << 8) | (initializationBytes[11] & 0xFF); defaultVerticalPlacement = (float) requestedVerticalPlacement / calculatedVideoTrackHeight; defaultVerticalPlacement = Util.constrainValue(defaultVerticalPlacement, 0.0f, 0.95f); } else { defaultVerticalPlacement = DEFAULT_VERTICAL_PLACEMENT; } } else { defaultFontFace = DEFAULT_FONT_FACE; defaultColorRgba = DEFAULT_COLOR; defaultFontFamily = DEFAULT_FONT_FAMILY; customVerticalPlacement = false; defaultVerticalPlacement = DEFAULT_VERTICAL_PLACEMENT; } } @Override protected Subtitle decode(byte[] bytes, int length, boolean reset) throws SubtitleDecoderException { parsableByteArray.reset(bytes, length); String cueTextString = readSubtitleText(parsableByteArray); if (cueTextString.isEmpty()) { return Tx3gSubtitle.EMPTY; } // Attach default styles. SpannableStringBuilder cueText = new SpannableStringBuilder(cueTextString); attachFontFace(cueText, defaultFontFace, DEFAULT_FONT_FACE, 0, cueText.length(), SPAN_PRIORITY_LOW); attachColor(cueText, defaultColorRgba, DEFAULT_COLOR, 0, cueText.length(), SPAN_PRIORITY_LOW); attachFontFamily(cueText, defaultFontFamily, DEFAULT_FONT_FAMILY, 0, cueText.length(), SPAN_PRIORITY_LOW); float verticalPlacement = defaultVerticalPlacement; // Find and attach additional styles. while (parsableByteArray.bytesLeft() >= SIZE_ATOM_HEADER) { int position = parsableByteArray.getPosition(); int atomSize = parsableByteArray.readInt(); int atomType = parsableByteArray.readInt(); if (atomType == TYPE_STYL) { assertTrue(parsableByteArray.bytesLeft() >= SIZE_SHORT); int styleRecordCount = parsableByteArray.readUnsignedShort(); for (int i = 0; i < styleRecordCount; i++) { applyStyleRecord(parsableByteArray, cueText); } } else if (atomType == TYPE_TBOX && customVerticalPlacement) { assertTrue(parsableByteArray.bytesLeft() >= SIZE_SHORT); int requestedVerticalPlacement = parsableByteArray.readUnsignedShort(); verticalPlacement = (float) requestedVerticalPlacement / calculatedVideoTrackHeight; verticalPlacement = Util.constrainValue(verticalPlacement, 0.0f, 0.95f); } parsableByteArray.setPosition(position + atomSize); } return new Tx3gSubtitle( new Cue( cueText, /* textAlignment= */ null, verticalPlacement, Cue.LINE_TYPE_FRACTION, Cue.ANCHOR_TYPE_START, Cue.DIMEN_UNSET, Cue.TYPE_UNSET, Cue.DIMEN_UNSET)); } private static String readSubtitleText(ParsableByteArray parsableByteArray) throws SubtitleDecoderException { assertTrue(parsableByteArray.bytesLeft() >= SIZE_SHORT); int textLength = parsableByteArray.readUnsignedShort(); if (textLength == 0) { return ""; } if (parsableByteArray.bytesLeft() >= SIZE_BOM_UTF16) { char firstChar = parsableByteArray.peekChar(); if (firstChar == BOM_UTF16_BE || firstChar == BOM_UTF16_LE) { return parsableByteArray.readString(textLength, Charset.forName(C.UTF16_NAME)); } } return parsableByteArray.readString(textLength, Charset.forName(C.UTF8_NAME)); } private void applyStyleRecord(ParsableByteArray parsableByteArray, SpannableStringBuilder cueText) throws SubtitleDecoderException { assertTrue(parsableByteArray.bytesLeft() >= SIZE_STYLE_RECORD); int start = parsableByteArray.readUnsignedShort(); int end = parsableByteArray.readUnsignedShort(); parsableByteArray.skipBytes(2); // font identifier int fontFace = parsableByteArray.readUnsignedByte(); parsableByteArray.skipBytes(1); // font size int colorRgba = parsableByteArray.readInt(); attachFontFace(cueText, fontFace, defaultFontFace, start, end, SPAN_PRIORITY_HIGH); attachColor(cueText, colorRgba, defaultColorRgba, start, end, SPAN_PRIORITY_HIGH); } private static void attachFontFace(SpannableStringBuilder cueText, int fontFace, int defaultFontFace, int start, int end, int spanPriority) { if (fontFace != defaultFontFace) { final int flags = Spanned.SPAN_EXCLUSIVE_EXCLUSIVE | spanPriority; boolean isBold = (fontFace & FONT_FACE_BOLD) != 0; boolean isItalic = (fontFace & FONT_FACE_ITALIC) != 0; if (isBold) { if (isItalic) { cueText.setSpan(new StyleSpan(Typeface.BOLD_ITALIC), start, end, flags); } else { cueText.setSpan(new StyleSpan(Typeface.BOLD), start, end, flags); } } else if (isItalic) { cueText.setSpan(new StyleSpan(Typeface.ITALIC), start, end, flags); } boolean isUnderlined = (fontFace & FONT_FACE_UNDERLINE) != 0; if (isUnderlined) { cueText.setSpan(new UnderlineSpan(), start, end, flags); } if (!isUnderlined && !isBold && !isItalic) { cueText.setSpan(new StyleSpan(Typeface.NORMAL), start, end, flags); } } } private static void attachColor(SpannableStringBuilder cueText, int colorRgba, int defaultColorRgba, int start, int end, int spanPriority) { if (colorRgba != defaultColorRgba) { int colorArgb = ((colorRgba & 0xFF) << 24) | (colorRgba >>> 8); cueText.setSpan(new ForegroundColorSpan(colorArgb), start, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE | spanPriority); } } @SuppressWarnings("ReferenceEquality") private static void attachFontFamily(SpannableStringBuilder cueText, String fontFamily, String defaultFontFamily, int start, int end, int spanPriority) { if (fontFamily != defaultFontFamily) { cueText.setSpan(new TypefaceSpan(fontFamily), start, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE | spanPriority); } } private static void assertTrue(boolean checkValue) throws SubtitleDecoderException { if (!checkValue) { throw new SubtitleDecoderException("Unexpected subtitle format."); } } }
package com.kilobolt.GameWorld; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.GL10; import com.badlogic.gdx.graphics.OrthographicCamera; import com.badlogic.gdx.graphics.g2d.Animation; import com.badlogic.gdx.graphics.g2d.SpriteBatch; import com.badlogic.gdx.graphics.g2d.TextureRegion; import com.badlogic.gdx.graphics.glutils.ShapeRenderer; import com.badlogic.gdx.graphics.glutils.ShapeRenderer.ShapeType; import com.kilobolt.GameObjects.Bird; import com.kilobolt.GameObjects.Grass; import com.kilobolt.GameObjects.Pipe; import com.kilobolt.GameObjects.ScrollHandler; import com.kilobolt.ZBHelpers.AssetLoader; public class GameRenderer { private GameWorld myWorld; private OrthographicCamera cam; private ShapeRenderer shapeRenderer; private SpriteBatch batcher; private int midPointY; private int gameHeight; // Game Objects private Bird bird; private ScrollHandler scroller; private Grass frontGrass, backGrass; private Pipe pipe1, pipe2, pipe3; // Game Assets private TextureRegion bg, grass; private Animation birdAnimation; private TextureRegion birdMid, birdDown, birdUp; private TextureRegion skullUp, skullDown, bar; public GameRenderer(GameWorld world, int gameHeight, int midPointY) { myWorld = world; this.gameHeight = gameHeight; this.midPointY = midPointY; cam = new OrthographicCamera(); cam.setToOrtho(true, 136, gameHeight); // this will be scaled to the resolution in Main.java in ZombieBird-desktop. batcher = new SpriteBatch(); // attach batcher to camera batcher.setProjectionMatrix(cam.combined); shapeRenderer = new ShapeRenderer(); shapeRenderer.setProjectionMatrix(cam.combined); // Call helper methods to initialize asset and object instance variables initGameObjects(); initAssets(); } public void render(float runTime) { System.out.println("GameRenderer - render()"); // Fill the screen with black to prevent flickering Gdx.gl.glClearColor(0, 0, 0, 1); Gdx.gl.glClear(GL10.GL_COLOR_BUFFER_BIT); // Begin ShapeRenderer shapeRenderer.begin(ShapeType.Filled); // Draw Background color shapeRenderer.setColor(55 / 255.0f, 80 / 255.0f, 100 / 255.0f, 1); shapeRenderer.rect(0, 0, 136, midPointY + 66); // Draw grass shapeRenderer.setColor(111 / 255.0f, 186 / 255.0f, 45 / 255.0f, 1); shapeRenderer.rect(0, midPointY + 66, 136, 11); // Draw Dirt shapeRenderer.setColor(147 / 255.0f, 80 / 255.0f, 27 / 255.0f, 1); shapeRenderer.rect(0, midPointY + 77, 136, 52); // End ShapeRenderer shapeRenderer.end(); // Begin SpriteBatch batcher.begin(); // Disable transparency // This is good for performance when drawing images that do not require transparency batcher.disableBlending(); batcher.draw(bg, 0, midPointY + 23, 136, 43); // 1. Draw Grass drawGrass(); // 2. Draw Pipes drawPipes(); batcher.enableBlending(); // 3. Draw Skulls (requires transparency) drawSkulls(); // Draw bird at desired coordinates. Retrieve Animation object from AssetLoader // Pass in the runTime variable to get current frame if (bird.shouldntFlap()) { batcher.draw(birdMid, bird.getX(), bird.getY(), bird.getWidth() / 2.0f, bird.getHeight() / 2.0f, bird.getWidth(), bird.getHeight(), 1, 1, bird.getRotation()); } else { batcher.draw(birdAnimation.getKeyFrame(runTime), bird.getX(), bird.getY(), bird.getWidth() / 2.0f, bird.getHeight() / 2.0f, bird.getWidth(), bird.getHeight(), 1, 1, bird.getRotation()); } // End SpriteBatch batcher.end(); // testing the boundingCircle to ensure it is lined up shapeRenderer.begin(ShapeType.Filled); shapeRenderer.setColor(Color.RED); shapeRenderer.circle(bird.getBoundingCircle().x, bird.getBoundingCircle().y, bird.getBoundingCircle().radius); // testing the rectangles for the pipes and skulls // Bar up for pipes 1 2 and 3 shapeRenderer.rect(pipe1.getBarUp().x, pipe1.getBarUp().y, pipe1.getBarUp().width, pipe1.getBarUp().height); shapeRenderer.rect(pipe2.getBarUp().x, pipe2.getBarUp().y, pipe2.getBarUp().width, pipe2.getBarUp().height); shapeRenderer.rect(pipe3.getBarUp().x, pipe3.getBarUp().y, pipe3.getBarUp().width, pipe3.getBarUp().height); // Bar down for pipes 1 2 and 3 shapeRenderer.rect(pipe1.getBarDown().x, pipe1.getBarDown().y, pipe1.getBarDown().width, pipe1.getBarDown().height); shapeRenderer.rect(pipe2.getBarDown().x, pipe2.getBarDown().y, pipe2.getBarDown().width, pipe2.getBarDown().height); shapeRenderer.rect(pipe3.getBarDown().x, pipe3.getBarDown().y, pipe3.getBarDown().width, pipe3.getBarDown().height); // Skull up for Pipes 1 2 and 3 shapeRenderer.rect(pipe1.getSkullUp().x, pipe1.getSkullUp().y, pipe1.getSkullUp().width, pipe1.getSkullUp().height); shapeRenderer.rect(pipe2.getSkullUp().x, pipe2.getSkullUp().y, pipe2.getSkullUp().width, pipe2.getSkullUp().height); shapeRenderer.rect(pipe3.getSkullUp().x, pipe3.getSkullUp().y, pipe3.getSkullUp().width, pipe3.getSkullUp().height); // Skull down for Pipes 1 2 and 3 shapeRenderer.rect(pipe1.getSkullDown().x, pipe1.getSkullDown().y, pipe1.getSkullDown().width, pipe1.getSkullDown().height); shapeRenderer.rect(pipe2.getSkullDown().x, pipe2.getSkullDown().y, pipe2.getSkullDown().width, pipe2.getSkullDown().height); shapeRenderer.rect(pipe3.getSkullDown().x, pipe3.getSkullDown().y, pipe3.getSkullDown().width, pipe3.getSkullDown().height); shapeRenderer.end(); } private void initGameObjects() { bird = myWorld.getBird(); scroller = myWorld.getScroller(); frontGrass = scroller.getFrontGrass(); backGrass = scroller.getBackGrass(); pipe1 = scroller.getPipe1(); pipe2 = scroller.getPipe2(); pipe3 = scroller.getPipe3(); } private void initAssets() { bg = AssetLoader.bg; grass = AssetLoader.grass; birdAnimation = AssetLoader.birdAnimation; birdMid = AssetLoader.bird; birdDown = AssetLoader.birdDown; birdUp = AssetLoader.birdUp; skullUp = AssetLoader.skullUp; skullDown = AssetLoader.skullDown; bar = AssetLoader.bar; } private void drawGrass() { // Draw the grass batcher.draw(grass, frontGrass.getX(), frontGrass.getY(), frontGrass.getWidth(), frontGrass.getHeight()); batcher.draw(grass, backGrass.getX(), backGrass.getY(), backGrass.getWidth(), backGrass.getHeight()); } private void drawSkulls() { // Temporary code! Sorry about the mess :) // We will fix this when we finish the Pipe class. batcher.draw(skullUp, pipe1.getX() - 1, pipe1.getY() + pipe1.getHeight() - 14, 24, 14); batcher.draw(skullDown, pipe1.getX() - 1, pipe1.getY() + pipe1.getHeight() + 45, 24, 14); batcher.draw(skullUp, pipe2.getX() - 1, pipe2.getY() + pipe2.getHeight() - 14, 24, 14); batcher.draw(skullDown, pipe2.getX() - 1, pipe2.getY() + pipe2.getHeight() + 45, 24, 14); batcher.draw(skullUp, pipe3.getX() - 1, pipe3.getY() + pipe3.getHeight() - 14, 24, 14); batcher.draw(skullDown, pipe3.getX() - 1, pipe3.getY() + pipe3.getHeight() + 45, 24, 14); } private void drawPipes() { // Temporary code! Sorry about the mess :) // We will fix this when we finish the Pipe class. batcher.draw(bar, pipe1.getX(), pipe1.getY(), pipe1.getWidth(), pipe1.getHeight()); batcher.draw(bar, pipe1.getX(), pipe1.getY() + pipe1.getHeight() + 45, pipe1.getWidth(), midPointY + 66 - (pipe1.getHeight() + 45)); batcher.draw(bar, pipe2.getX(), pipe2.getY(), pipe2.getWidth(), pipe2.getHeight()); batcher.draw(bar, pipe2.getX(), pipe2.getY() + pipe2.getHeight() + 45, pipe2.getWidth(), midPointY + 66 - (pipe2.getHeight() + 45)); batcher.draw(bar, pipe3.getX(), pipe3.getY(), pipe3.getWidth(), pipe3.getHeight()); batcher.draw(bar, pipe3.getX(), pipe3.getY() + pipe3.getHeight() + 45, pipe3.getWidth(), midPointY + 66 - (pipe3.getHeight() + 45)); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zookeeper.server.quorum; import java.io.ByteArrayOutputStream; import java.io.BufferedInputStream; import java.io.IOException; import java.net.BindException; import java.net.ServerSocket; import java.net.Socket; import java.net.SocketAddress; import java.net.SocketException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicLong; import javax.security.sasl.SaslException; import org.apache.jute.BinaryOutputArchive; import org.apache.zookeeper.common.Time; import org.apache.zookeeper.server.FinalRequestProcessor; import org.apache.zookeeper.server.Request; import org.apache.zookeeper.server.RequestProcessor; import org.apache.zookeeper.server.ZooKeeperThread; import org.apache.zookeeper.server.quorum.QuorumPeer.LearnerType; import org.apache.zookeeper.server.quorum.flexible.QuorumVerifier; import org.apache.zookeeper.server.util.ZxidUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class has the control logic for the Leader. */ public class Leader { private static final Logger LOG = LoggerFactory.getLogger(Leader.class); static final private boolean nodelay = System.getProperty("leader.nodelay", "true").equals("true"); static { LOG.info("TCP NoDelay set to: " + nodelay); } static public class Proposal { public QuorumPacket packet; public HashSet<Long> ackSet = new HashSet<Long>(); public Request request; @Override public String toString() { return packet.getType() + ", " + packet.getZxid() + ", " + request; } } final LeaderZooKeeperServer zk; final QuorumPeer self; private boolean quorumFormed = false; // the follower acceptor thread LearnerCnxAcceptor cnxAcceptor; // list of all the followers private final HashSet<LearnerHandler> learners = new HashSet<LearnerHandler>(); /** * Returns a copy of the current learner snapshot */ public List<LearnerHandler> getLearners() { synchronized (learners) { return new ArrayList<LearnerHandler>(learners); } } // list of followers that are ready to follow (i.e synced with the leader) private final HashSet<LearnerHandler> forwardingFollowers = new HashSet<LearnerHandler>(); /** * Returns a copy of the current forwarding follower snapshot */ public List<LearnerHandler> getForwardingFollowers() { synchronized (forwardingFollowers) { return new ArrayList<LearnerHandler>(forwardingFollowers); } } private void addForwardingFollower(LearnerHandler lh) { synchronized (forwardingFollowers) { forwardingFollowers.add(lh); } } private final HashSet<LearnerHandler> observingLearners = new HashSet<LearnerHandler>(); /** * Returns a copy of the current observer snapshot */ public List<LearnerHandler> getObservingLearners() { synchronized (observingLearners) { return new ArrayList<LearnerHandler>(observingLearners); } } private void addObserverLearnerHandler(LearnerHandler lh) { synchronized (observingLearners) { observingLearners.add(lh); } } // Pending sync requests. Must access under 'this' lock. private final HashMap<Long,List<LearnerSyncRequest>> pendingSyncs = new HashMap<Long,List<LearnerSyncRequest>>(); synchronized public int getNumPendingSyncs() { return pendingSyncs.size(); } //Follower counter final AtomicLong followerCounter = new AtomicLong(-1); /** * Adds peer to the leader. * * @param learner * instance of learner handle */ void addLearnerHandler(LearnerHandler learner) { synchronized (learners) { learners.add(learner); } } /** * Remove the learner from the learner list * * @param peer */ void removeLearnerHandler(LearnerHandler peer) { synchronized (forwardingFollowers) { forwardingFollowers.remove(peer); } synchronized (learners) { learners.remove(peer); } synchronized (observingLearners) { observingLearners.remove(peer); } } boolean isLearnerSynced(LearnerHandler peer){ synchronized (forwardingFollowers) { return forwardingFollowers.contains(peer); } } ServerSocket ss; Leader(QuorumPeer self,LeaderZooKeeperServer zk) throws IOException { this.self = self; try { if (self.getQuorumListenOnAllIPs()) { ss = new ServerSocket(self.getQuorumAddress().getPort()); } else { ss = new ServerSocket(); } ss.setReuseAddress(true); if (!self.getQuorumListenOnAllIPs()) { ss.bind(self.getQuorumAddress()); } } catch (BindException e) { if (self.getQuorumListenOnAllIPs()) { LOG.error("Couldn't bind to port " + self.getQuorumAddress().getPort(), e); } else { LOG.error("Couldn't bind to " + self.getQuorumAddress(), e); } throw e; } this.zk=zk; } /** * This message is for follower to expect diff */ final static int DIFF = 13; /** * This is for follower to truncate its logs */ final static int TRUNC = 14; /** * This is for follower to download the snapshots */ final static int SNAP = 15; /** * This tells the leader that the connecting peer is actually an observer */ final static int OBSERVERINFO = 16; /** * This message type is sent by the leader to indicate it's zxid and if * needed, its database. */ final static int NEWLEADER = 10; /** * This message type is sent by a follower to pass the last zxid. This is here * for backward compatibility purposes. */ final static int FOLLOWERINFO = 11; /** * This message type is sent by the leader to indicate that the follower is * now uptodate andt can start responding to clients. */ final static int UPTODATE = 12; /** * This message is the first that a follower receives from the leader. * It has the protocol version and the epoch of the leader. */ public static final int LEADERINFO = 17; /** * This message is used by the follow to ack a proposed epoch. */ public static final int ACKEPOCH = 18; /** * This message type is sent to a leader to request and mutation operation. * The payload will consist of a request header followed by a request. */ final static int REQUEST = 1; /** * This message type is sent by a leader to propose a mutation. */ public final static int PROPOSAL = 2; /** * This message type is sent by a follower after it has synced a proposal. */ final static int ACK = 3; /** * This message type is sent by a leader to commit a proposal and cause * followers to start serving the corresponding data. */ final static int COMMIT = 4; /** * This message type is enchanged between follower and leader (initiated by * follower) to determine liveliness. */ final static int PING = 5; /** * This message type is to validate a session that should be active. */ final static int REVALIDATE = 6; /** * This message is a reply to a synchronize command flushing the pipe * between the leader and the follower. */ final static int SYNC = 7; /** * This message type informs observers of a committed proposal. */ final static int INFORM = 8; ConcurrentMap<Long, Proposal> outstandingProposals = new ConcurrentHashMap<Long, Proposal>(); ConcurrentLinkedQueue<Proposal> toBeApplied = new ConcurrentLinkedQueue<Proposal>(); Proposal newLeaderProposal = new Proposal(); class LearnerCnxAcceptor extends ZooKeeperThread{ private volatile boolean stop = false; public LearnerCnxAcceptor() { super("LearnerCnxAcceptor-" + ss.getLocalSocketAddress()); } @Override public void run() { try { while (!stop) { try{ Socket s = ss.accept(); // start with the initLimit, once the ack is processed // in LearnerHandler switch to the syncLimit s.setSoTimeout(self.tickTime * self.initLimit); s.setTcpNoDelay(nodelay); BufferedInputStream is = new BufferedInputStream( s.getInputStream()); LearnerHandler fh = new LearnerHandler(s, is, Leader.this); fh.start(); } catch (SocketException e) { if (stop) { LOG.info("exception while shutting down acceptor: " + e); // When Leader.shutdown() calls ss.close(), // the call to accept throws an exception. // We catch and set stop to true. stop = true; } else { throw e; } } catch (SaslException e){ LOG.error("Exception while connecting to quorum learner", e); } } } catch (Exception e) { LOG.warn("Exception while accepting follower", e); } } public void halt() { stop = true; } } StateSummary leaderStateSummary; long epoch = -1; boolean waitingForNewEpoch = true; volatile boolean readyToStart = false; /** * This method is main function that is called to lead * * @throws IOException * @throws InterruptedException */ void lead() throws IOException, InterruptedException { self.end_fle = Time.currentElapsedTime(); long electionTimeTaken = self.end_fle - self.start_fle; self.setElectionTimeTaken(electionTimeTaken); LOG.info("LEADING - LEADER ELECTION TOOK - {}", electionTimeTaken); self.start_fle = 0; self.end_fle = 0; zk.registerJMX(new LeaderBean(this, zk), self.jmxLocalPeerBean); try { self.tick.set(0); zk.loadData(); leaderStateSummary = new StateSummary(self.getCurrentEpoch(), zk.getLastProcessedZxid()); // Start thread that waits for connection requests from // new followers. cnxAcceptor = new LearnerCnxAcceptor(); cnxAcceptor.start(); readyToStart = true; long epoch = getEpochToPropose(self.getId(), self.getAcceptedEpoch()); zk.setZxid(ZxidUtils.makeZxid(epoch, 0)); synchronized(this){ lastProposed = zk.getZxid(); } newLeaderProposal.packet = new QuorumPacket(NEWLEADER, zk.getZxid(), null, null); if ((newLeaderProposal.packet.getZxid() & 0xffffffffL) != 0) { LOG.info("NEWLEADER proposal has Zxid of " + Long.toHexString(newLeaderProposal.packet.getZxid())); } waitForEpochAck(self.getId(), leaderStateSummary); self.setCurrentEpoch(epoch); // We have to get at least a majority of servers in sync with // us. We do this by waiting for the NEWLEADER packet to get // acknowledged try { waitForNewLeaderAck(self.getId(), zk.getZxid(), LearnerType.PARTICIPANT); } catch (InterruptedException e) { shutdown("Waiting for a quorum of followers, only synced with sids: [ " + getSidSetString(newLeaderProposal.ackSet) + " ]"); HashSet<Long> followerSet = new HashSet<Long>(); for (LearnerHandler f : learners) followerSet.add(f.getSid()); if (self.getQuorumVerifier().containsQuorum(followerSet)) { LOG.warn("Enough followers present. " + "Perhaps the initTicks need to be increased."); } Thread.sleep(self.tickTime); self.tick.incrementAndGet(); return; } startZkServer(); /** * WARNING: do not use this for anything other than QA testing * on a real cluster. Specifically to enable verification that quorum * can handle the lower 32bit roll-over issue identified in * ZOOKEEPER-1277. Without this option it would take a very long * time (on order of a month say) to see the 4 billion writes * necessary to cause the roll-over to occur. * * This field allows you to override the zxid of the server. Typically * you'll want to set it to something like 0xfffffff0 and then * start the quorum, run some operations and see the re-election. */ String initialZxid = System.getProperty("zookeeper.testingonly.initialZxid"); if (initialZxid != null) { long zxid = Long.parseLong(initialZxid); zk.setZxid((zk.getZxid() & 0xffffffff00000000L) | zxid); } if (!System.getProperty("zookeeper.leaderServes", "yes").equals("no")) { self.cnxnFactory.setZooKeeperServer(zk); } // Everything is a go, simply start counting the ticks // WARNING: I couldn't find any wait statement on a synchronized // block that would be notified by this notifyAll() call, so // I commented it out //synchronized (this) { // notifyAll(); //} // We ping twice a tick, so we only update the tick every other // iteration boolean tickSkip = true; while (true) { Thread.sleep(self.tickTime / 2); if (!tickSkip) { self.tick.incrementAndGet(); } HashSet<Long> syncedSet = new HashSet<Long>(); // lock on the followers when we use it. syncedSet.add(self.getId()); for (LearnerHandler f : getLearners()) { // Synced set is used to check we have a supporting quorum, so only // PARTICIPANT, not OBSERVER, learners should be used if (f.synced() && f.getLearnerType() == LearnerType.PARTICIPANT) { syncedSet.add(f.getSid()); } f.ping(); } // check leader running status if (!this.isRunning()) { shutdown("Unexpected internal error"); return; } if (!tickSkip && !self.getQuorumVerifier().containsQuorum(syncedSet)) { //if (!tickSkip && syncedCount < self.quorumPeers.size() / 2) { // Lost quorum, shutdown shutdown("Not sufficient followers synced, only synced with sids: [ " + getSidSetString(syncedSet) + " ]"); // make sure the order is the same! // the leader goes to looking return; } tickSkip = !tickSkip; } } finally { zk.unregisterJMX(this); } } boolean isShutdown; /** * Close down all the LearnerHandlers */ void shutdown(String reason) { LOG.info("Shutting down"); if (isShutdown) { return; } LOG.info("Shutdown called", new Exception("shutdown Leader! reason: " + reason)); if (cnxAcceptor != null) { cnxAcceptor.halt(); } // NIO should not accept conenctions self.cnxnFactory.setZooKeeperServer(null); try { ss.close(); } catch (IOException e) { LOG.warn("Ignoring unexpected exception during close",e); } // clear all the connections self.cnxnFactory.closeAll(); // shutdown the previous zk if (zk != null) { zk.shutdown(); } synchronized (learners) { for (Iterator<LearnerHandler> it = learners.iterator(); it .hasNext();) { LearnerHandler f = it.next(); it.remove(); f.shutdown(); } } isShutdown = true; } /** * Keep a count of acks that are received by the leader for a particular * proposal * * @param zxid * the zxid of the proposal sent out * @param followerAddr */ synchronized public void processAck(long sid, long zxid, SocketAddress followerAddr) { if (LOG.isTraceEnabled()) { LOG.trace("Ack zxid: 0x{}", Long.toHexString(zxid)); for (Proposal p : outstandingProposals.values()) { long packetZxid = p.packet.getZxid(); LOG.trace("outstanding proposal: 0x{}", Long.toHexString(packetZxid)); } LOG.trace("outstanding proposals all"); } if ((zxid & 0xffffffffL) == 0) { /* * We no longer process NEWLEADER ack by this method. However, * the learner sends ack back to the leader after it gets UPTODATE * so we just ignore the message. */ return; } if (outstandingProposals.size() == 0) { if (LOG.isDebugEnabled()) { LOG.debug("outstanding is 0"); } return; } if (lastCommitted >= zxid) { if (LOG.isDebugEnabled()) { LOG.debug("proposal has already been committed, pzxid: 0x{} zxid: 0x{}", Long.toHexString(lastCommitted), Long.toHexString(zxid)); } // The proposal has already been committed return; } Proposal p = outstandingProposals.get(zxid); if (p == null) { LOG.warn("Trying to commit future proposal: zxid 0x{} from {}", Long.toHexString(zxid), followerAddr); return; } p.ackSet.add(sid); if (LOG.isDebugEnabled()) { LOG.debug("Count for zxid: 0x{} is {}", Long.toHexString(zxid), p.ackSet.size()); } if (self.getQuorumVerifier().containsQuorum(p.ackSet)){ if (zxid != lastCommitted+1) { LOG.warn("Commiting zxid 0x{} from {} not first!", Long.toHexString(zxid), followerAddr); LOG.warn("First is 0x{}", Long.toHexString(lastCommitted + 1)); } outstandingProposals.remove(zxid); if (p.request != null) { toBeApplied.add(p); } if (p.request == null) { LOG.warn("Going to commmit null request for proposal: {}", p); } commit(zxid); inform(p); zk.commitProcessor.commit(p.request); if(pendingSyncs.containsKey(zxid)){ for(LearnerSyncRequest r: pendingSyncs.remove(zxid)) { sendSync(r); } } } } static class ToBeAppliedRequestProcessor implements RequestProcessor { private RequestProcessor next; private ConcurrentLinkedQueue<Proposal> toBeApplied; /** * This request processor simply maintains the toBeApplied list. For * this to work next must be a FinalRequestProcessor and * FinalRequestProcessor.processRequest MUST process the request * synchronously! * * @param next * a reference to the FinalRequestProcessor */ ToBeAppliedRequestProcessor(RequestProcessor next, ConcurrentLinkedQueue<Proposal> toBeApplied) { if (!(next instanceof FinalRequestProcessor)) { throw new RuntimeException(ToBeAppliedRequestProcessor.class .getName() + " must be connected to " + FinalRequestProcessor.class.getName() + " not " + next.getClass().getName()); } this.toBeApplied = toBeApplied; this.next = next; } /* * (non-Javadoc) * * @see org.apache.zookeeper.server.RequestProcessor#processRequest(org.apache.zookeeper.server.Request) */ public void processRequest(Request request) throws RequestProcessorException { // request.addRQRec(">tobe"); next.processRequest(request); Proposal p = toBeApplied.peek(); if (p != null && p.request != null && p.request.zxid == request.zxid) { toBeApplied.remove(); } } /* * (non-Javadoc) * * @see org.apache.zookeeper.server.RequestProcessor#shutdown() */ public void shutdown() { LOG.info("Shutting down"); next.shutdown(); } } /** * send a packet to all the followers ready to follow * * @param qp * the packet to be sent */ void sendPacket(QuorumPacket qp) { synchronized (forwardingFollowers) { for (LearnerHandler f : forwardingFollowers) { f.queuePacket(qp); } } } /** * send a packet to all observers */ void sendObserverPacket(QuorumPacket qp) { for (LearnerHandler f : getObservingLearners()) { f.queuePacket(qp); } } long lastCommitted = -1; /** * Create a commit packet and send it to all the members of the quorum * * @param zxid */ public void commit(long zxid) { synchronized(this){ lastCommitted = zxid; } QuorumPacket qp = new QuorumPacket(Leader.COMMIT, zxid, null, null); sendPacket(qp); } /** * Create an inform packet and send it to all observers. * @param zxid * @param proposal */ public void inform(Proposal proposal) { QuorumPacket qp = new QuorumPacket(Leader.INFORM, proposal.request.zxid, proposal.packet.getData(), null); sendObserverPacket(qp); } long lastProposed; /** * Returns the current epoch of the leader. * * @return */ public long getEpoch(){ return ZxidUtils.getEpochFromZxid(lastProposed); } @SuppressWarnings("serial") public static class XidRolloverException extends Exception { public XidRolloverException(String message) { super(message); } } /** * create a proposal and send it out to all the members * * @param request * @return the proposal that is queued to send to all the members */ public Proposal propose(Request request) throws XidRolloverException { /** * Address the rollover issue. All lower 32bits set indicate a new leader * election. Force a re-election instead. See ZOOKEEPER-1277 */ if ((request.zxid & 0xffffffffL) == 0xffffffffL) { String msg = "zxid lower 32 bits have rolled over, forcing re-election, and therefore new epoch start"; shutdown(msg); throw new XidRolloverException(msg); } ByteArrayOutputStream baos = new ByteArrayOutputStream(); BinaryOutputArchive boa = BinaryOutputArchive.getArchive(baos); try { request.hdr.serialize(boa, "hdr"); if (request.txn != null) { request.txn.serialize(boa, "txn"); } baos.close(); } catch (IOException e) { LOG.warn("This really should be impossible", e); } QuorumPacket pp = new QuorumPacket(Leader.PROPOSAL, request.zxid, baos.toByteArray(), null); Proposal p = new Proposal(); p.packet = pp; p.request = request; synchronized (this) { if (LOG.isDebugEnabled()) { LOG.debug("Proposing:: " + request); } lastProposed = p.packet.getZxid(); outstandingProposals.put(lastProposed, p); sendPacket(pp); } return p; } /** * Process sync requests * * @param r the request */ synchronized public void processSync(LearnerSyncRequest r){ if(outstandingProposals.isEmpty()){ sendSync(r); } else { List<LearnerSyncRequest> l = pendingSyncs.get(lastProposed); if (l == null) { l = new ArrayList<LearnerSyncRequest>(); } l.add(r); pendingSyncs.put(lastProposed, l); } } /** * Sends a sync message to the appropriate server * * @param f * @param r */ public void sendSync(LearnerSyncRequest r){ QuorumPacket qp = new QuorumPacket(Leader.SYNC, 0, null, null); r.fh.queuePacket(qp); } /** * lets the leader know that a follower is capable of following and is done * syncing * * @param handler handler of the follower * @return last proposed zxid */ synchronized public long startForwarding(LearnerHandler handler, long lastSeenZxid) { // Queue up any outstanding requests enabling the receipt of // new requests if (lastProposed > lastSeenZxid) { for (Proposal p : toBeApplied) { if (p.packet.getZxid() <= lastSeenZxid) { continue; } handler.queuePacket(p.packet); // Since the proposal has been committed we need to send the // commit message also QuorumPacket qp = new QuorumPacket(Leader.COMMIT, p.packet .getZxid(), null, null); handler.queuePacket(qp); } // Only participant need to get outstanding proposals if (handler.getLearnerType() == LearnerType.PARTICIPANT) { List<Long>zxids = new ArrayList<Long>(outstandingProposals.keySet()); Collections.sort(zxids); for (Long zxid: zxids) { if (zxid <= lastSeenZxid) { continue; } handler.queuePacket(outstandingProposals.get(zxid).packet); } } } if (handler.getLearnerType() == LearnerType.PARTICIPANT) { addForwardingFollower(handler); } else { addObserverLearnerHandler(handler); } return lastProposed; } private HashSet<Long> connectingFollowers = new HashSet<Long>(); public long getEpochToPropose(long sid, long lastAcceptedEpoch) throws InterruptedException, IOException { synchronized(connectingFollowers) { if (!waitingForNewEpoch) { return epoch; } if (lastAcceptedEpoch >= epoch) { epoch = lastAcceptedEpoch+1; } connectingFollowers.add(sid); QuorumVerifier verifier = self.getQuorumVerifier(); if (connectingFollowers.contains(self.getId()) && verifier.containsQuorum(connectingFollowers)) { waitingForNewEpoch = false; self.setAcceptedEpoch(epoch); connectingFollowers.notifyAll(); } else { long start = Time.currentElapsedTime(); long cur = start; long end = start + self.getInitLimit()*self.getTickTime(); while(waitingForNewEpoch && cur < end) { connectingFollowers.wait(end - cur); cur = Time.currentElapsedTime(); } if (waitingForNewEpoch) { throw new InterruptedException("Timeout while waiting for epoch from quorum"); } } return epoch; } } private HashSet<Long> electingFollowers = new HashSet<Long>(); private boolean electionFinished = false; public void waitForEpochAck(long id, StateSummary ss) throws IOException, InterruptedException { synchronized(electingFollowers) { if (electionFinished) { return; } if (ss.getCurrentEpoch() != -1) { if (ss.isMoreRecentThan(leaderStateSummary)) { throw new IOException("Follower is ahead of the leader, leader summary: " + leaderStateSummary.getCurrentEpoch() + " (current epoch), " + leaderStateSummary.getLastZxid() + " (last zxid)"); } electingFollowers.add(id); } QuorumVerifier verifier = self.getQuorumVerifier(); if (electingFollowers.contains(self.getId()) && verifier.containsQuorum(electingFollowers)) { electionFinished = true; electingFollowers.notifyAll(); } else { long start = Time.currentElapsedTime(); long cur = start; long end = start + self.getInitLimit()*self.getTickTime(); while(!electionFinished && cur < end) { electingFollowers.wait(end - cur); cur = Time.currentElapsedTime(); } if (!electionFinished) { throw new InterruptedException("Timeout while waiting for epoch to be acked by quorum"); } } } } /** * Return a list of sid in set as string */ private String getSidSetString(Set<Long> sidSet) { StringBuilder sids = new StringBuilder(); Iterator<Long> iter = sidSet.iterator(); while (iter.hasNext()) { sids.append(iter.next()); if (!iter.hasNext()) { break; } sids.append(","); } return sids.toString(); } /** * Start up Leader ZooKeeper server and initialize zxid to the new epoch */ private synchronized void startZkServer() { // Update lastCommitted and Db's zxid to a value representing the new epoch lastCommitted = zk.getZxid(); LOG.info("Have quorum of supporters, sids: [ " + getSidSetString(newLeaderProposal.ackSet) + " ]; starting up and setting last processed zxid: 0x{}", Long.toHexString(zk.getZxid())); zk.startup(); /* * Update the election vote here to ensure that all members of the * ensemble report the same vote to new servers that start up and * send leader election notifications to the ensemble. * * @see https://issues.apache.org/jira/browse/ZOOKEEPER-1732 */ self.updateElectionVote(getEpoch()); zk.getZKDatabase().setlastProcessedZxid(zk.getZxid()); } /** * Process NEWLEADER ack of a given sid and wait until the leader receives * sufficient acks. * * @param sid * @param learnerType * @throws InterruptedException */ public void waitForNewLeaderAck(long sid, long zxid, LearnerType learnerType) throws InterruptedException { synchronized (newLeaderProposal.ackSet) { if (quorumFormed) { return; } long currentZxid = newLeaderProposal.packet.getZxid(); if (zxid != currentZxid) { LOG.error("NEWLEADER ACK from sid: " + sid + " is from a different epoch - current 0x" + Long.toHexString(currentZxid) + " receieved 0x" + Long.toHexString(zxid)); return; } if (learnerType == LearnerType.PARTICIPANT) { newLeaderProposal.ackSet.add(sid); } if (self.getQuorumVerifier().containsQuorum( newLeaderProposal.ackSet)) { quorumFormed = true; newLeaderProposal.ackSet.notifyAll(); } else { long start = Time.currentElapsedTime(); long cur = start; long end = start + self.getInitLimit() * self.getTickTime(); while (!quorumFormed && cur < end) { newLeaderProposal.ackSet.wait(end - cur); cur = Time.currentElapsedTime(); } if (!quorumFormed) { throw new InterruptedException( "Timeout while waiting for NEWLEADER to be acked by quorum"); } } } } /** * Get string representation of a given packet type * @param packetType * @return string representing the packet type */ public static String getPacketType(int packetType) { switch (packetType) { case DIFF: return "DIFF"; case TRUNC: return "TRUNC"; case SNAP: return "SNAP"; case OBSERVERINFO: return "OBSERVERINFO"; case NEWLEADER: return "NEWLEADER"; case FOLLOWERINFO: return "FOLLOWERINFO"; case UPTODATE: return "UPTODATE"; case LEADERINFO: return "LEADERINFO"; case ACKEPOCH: return "ACKEPOCH"; case REQUEST: return "REQUEST"; case PROPOSAL: return "PROPOSAL"; case ACK: return "ACK"; case COMMIT: return "COMMIT"; case PING: return "PING"; case REVALIDATE: return "REVALIDATE"; case SYNC: return "SYNC"; case INFORM: return "INFORM"; default: return "UNKNOWN"; } } private boolean isRunning() { return self.isRunning() && zk.isRunning(); } }
/* * Copyright (c) 2003-2009 jMonkeyEngine * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of 'jMonkeyEngine' nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.jme.renderer.pass; import java.io.Serializable; import java.util.ArrayList; import com.jme.renderer.RenderContext; import com.jme.renderer.Renderer; import com.jme.scene.Spatial; import com.jme.scene.state.RenderState; import com.jme.system.DisplaySystem; /** * <code>Pass</code> encapsulates logic necessary for rendering one or more * steps in a multipass technique. * * Rendering: * * When renderPass is called, a check is first made to see if the * pass isEnabled(). Then any states set on this pass are enforced via * Spatial.enforceState(RenderState). This is useful for doing things such * as causing this pass to be blended to a previous pass via enforcing an * BlendState, etc. Next, doRender(Renderer) is called to do the actual * rendering work. Finally, any enforced states set before this pass was * run are restored. * * @author Joshua Slack * @version $Id: Pass.java 4596 2009-08-16 03:17:16Z mulova $ */ public abstract class Pass implements Serializable { private static final long serialVersionUID = -2608939114161492853L; /** list of spatials registered with this pass. */ protected ArrayList<Spatial> spatials = new ArrayList<Spatial>(); /** if false, pass will not be updated or rendered. */ protected boolean enabled = true; /** offset params to use to differentiate multiple passes of the same scene in the zbuffer. */ protected float zFactor; protected float zOffset; /** * RenderStates registered with this pass - if a given state is not null it * overrides the corresponding state set during rendering. */ protected RenderState[] passStates = new RenderState[RenderState.StateType.values().length]; /** a place to internally save previous states setup before rendering this pass */ protected RenderState[] savedStates = new RenderState[RenderState.StateType.values().length]; protected RenderContext<?> context = null; /** if enabled, set the states for this pass and then render. */ public final void renderPass(Renderer r) { if (!enabled) return; context = DisplaySystem.getDisplaySystem().getCurrentContext(); applyPassStates(); if (zFactor != 0 || zOffset != 0) { r.setPolygonOffset(zFactor, zOffset); doRender(r); r.clearPolygonOffset(); } else { doRender(r); } resetOldStates(); context = null; } /** * Enforce a particular state. In other words, the given state will override * any state of the same type set on a scene object. Remember to clear the * state when done enforcing. Very useful for multipass techniques where * multiple sets of states need to be applied to a scenegraph drawn multiple * times. * * @param state * state to enforce */ public void setPassState(RenderState state) { passStates[state.getStateType().ordinal()] = state; } /** * Returns the requested RenderState that this Pass currently has set or * null if none is set. * * @param type * the renderstate type to retrieve * @return a renderstate at the given position or null * @deprecated As of 2.0, use {@link #getRenderState(com.jme.scene.state.RenderState.StateType)} instead. */ public RenderState getRenderState(int type) { return passStates != null ? passStates[type] : null; } /** * Returns the requested RenderState that this Pass currently has set or * null if none is set. * * @param type * the renderstate type to retrieve * @return a renderstate at the given position or null */ public RenderState getRenderState(RenderState.StateType type) { return passStates != null ? passStates[type.ordinal()] : null; } /** * Clears an enforced render state index by setting it to null. This allows * object specific states to be used. * * @param renderStateType * The type of RenderState to clear enforcement on. * @deprecated As of 2.0, use {@link #clearPassState(com.jme.scene.state.RenderState.StateType)} instead. */ public void clearPassState(int renderStateType) { passStates[renderStateType] = null; } /** * Clears an enforced render state by setting it to null. This allows * object specific states to be used. * * @param type * The type of RenderState to clear enforcement on. */ public void clearPassState(RenderState.StateType type) { passStates[type.ordinal()] = null; } /** * sets all enforced states to null. * * @see RenderContext#clearEnforcedState(int) */ public void clearPassStates() { for (int i = 0; i < passStates.length; i++) passStates[i] = null; } protected void applyPassStates() { for (int x = RenderState.StateType.values().length; --x >= 0;) { if (passStates[x] != null) { savedStates[x] = context.enforcedStateList[x]; context.enforcedStateList[x] = passStates[x]; } } } protected abstract void doRender(Renderer r); protected void resetOldStates() { for (int x = RenderState.StateType.values().length; --x >= 0;) { if (passStates[x] != null) { context.enforcedStateList[x] = savedStates[x]; } } } /** if enabled, call doUpdate to update information for this pass. */ public final void updatePass(float tpf) { if (!enabled) return; doUpdate(tpf); } protected void doUpdate(float tpf) { } public void add(Spatial toAdd) { spatials.add(toAdd); } public Spatial get(int index) { return spatials.get(index); } public boolean contains(Spatial s) { return spatials.contains(s); } public boolean remove(Spatial toRemove) { return spatials.remove(toRemove); } public void removeAll() { spatials.clear(); } public int size() { return spatials.size(); } /** * @return Returns the enabled. */ public boolean isEnabled() { return enabled; } /** * @param enabled The enabled to set. */ public void setEnabled(boolean enabled) { this.enabled = enabled; } /** * @return Returns the zFactor. */ public float getZFactor() { return zFactor; } /** * Sets the polygon offset param - factor - for this Pass. * * @param factor * The zFactor to set. */ public void setZFactor(float factor) { zFactor = factor; } /** * @return Returns the zOffset. */ public float getZOffset() { return zOffset; } /** * Sets the polygon offset param - offset - for this Pass. * * @param offset * The zOffset to set. */ public void setZOffset(float offset) { zOffset = offset; } public void cleanUp() { } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.runtime.instructions.cp; import java.util.ArrayList; import java.util.HashSet; import java.util.Map.Entry; import org.apache.sysml.api.DMLScript; import org.apache.sysml.lops.Lop; import org.apache.sysml.parser.DMLProgram; import org.apache.sysml.parser.DataIdentifier; import org.apache.sysml.parser.Expression.DataType; import org.apache.sysml.runtime.DMLRuntimeException; import org.apache.sysml.runtime.DMLScriptException; import org.apache.sysml.runtime.controlprogram.FunctionProgramBlock; import org.apache.sysml.runtime.controlprogram.LocalVariableMap; import org.apache.sysml.runtime.controlprogram.caching.CacheableData; import org.apache.sysml.runtime.controlprogram.context.ExecutionContext; import org.apache.sysml.runtime.controlprogram.context.ExecutionContextFactory; import org.apache.sysml.runtime.instructions.Instruction; import org.apache.sysml.runtime.instructions.InstructionUtils; public class FunctionCallCPInstruction extends CPInstruction { private final String _functionName; private final String _namespace; private final CPOperand[] _boundInputs; private final ArrayList<String> _boundInputNames; private final ArrayList<String> _boundOutputNames; private FunctionCallCPInstruction(String namespace, String functName, CPOperand[] boundInputs, ArrayList<String> boundInputNames, ArrayList<String> boundOutputNames, String istr) { super(CPType.External, null, functName, istr); _functionName = functName; _namespace = namespace; _boundInputs = boundInputs; _boundInputNames = boundInputNames; _boundOutputNames = boundOutputNames; } public String getFunctionName() { return _functionName; } public String getNamespace() { return _namespace; } public static FunctionCallCPInstruction parseInstruction(String str) throws DMLRuntimeException { //schema: extfunct, fname, num inputs, num outputs, inputs, outputs String[] parts = InstructionUtils.getInstructionPartsWithValueType ( str ); String namespace = parts[1]; String functionName = parts[2]; int numInputs = Integer.valueOf(parts[3]); int numOutputs = Integer.valueOf(parts[4]); CPOperand[] boundInputs = new CPOperand[numInputs]; ArrayList<String> boundInputNames = new ArrayList<>(); ArrayList<String> boundOutputNames = new ArrayList<>(); for (int i = 0; i < numInputs; i++) { boundInputs[i] = new CPOperand(parts[5 + i]); boundInputNames.add(boundInputs[i].getName()); } for (int i = 0; i < numOutputs; i++) boundOutputNames.add(parts[5 + numInputs + i]); return new FunctionCallCPInstruction ( namespace, functionName, boundInputs, boundInputNames, boundOutputNames, str ); } @Override public Instruction preprocessInstruction(ExecutionContext ec) throws DMLRuntimeException { //default pre-process behavior Instruction tmp = super.preprocessInstruction(ec); //maintain debug state (function call stack) if( DMLScript.ENABLE_DEBUG_MODE ) { ec.handleDebugFunctionEntry((FunctionCallCPInstruction) tmp); } return tmp; } @Override public void processInstruction(ExecutionContext ec) throws DMLRuntimeException { if( LOG.isTraceEnabled() ){ LOG.trace("Executing instruction : " + this.toString()); } // get the function program block (stored in the Program object) FunctionProgramBlock fpb = ec.getProgram().getFunctionProgramBlock(_namespace, _functionName); // sanity check number of function parameters if( _boundInputs.length < fpb.getInputParams().size() ) { throw new DMLRuntimeException("Number of bound input parameters does not match the function signature " + "("+_boundInputs.length+", but "+fpb.getInputParams().size()+" expected)"); } // create bindings to formal parameters for given function call // These are the bindings passed to the FunctionProgramBlock for function execution LocalVariableMap functionVariables = new LocalVariableMap(); for( int i=0; i<fpb.getInputParams().size(); i++) { //error handling non-existing variables CPOperand input = _boundInputs[i]; if( !input.isLiteral() && !ec.containsVariable(input.getName()) ) { throw new DMLRuntimeException("Input variable '"+input.getName()+"' not existing on call of " + DMLProgram.constructFunctionKey(_namespace, _functionName) + " (line "+getLineNum()+")."); } //get input matrix/frame/scalar DataIdentifier currFormalParam = fpb.getInputParams().get(i); Data value = ec.getVariable(input); //graceful value type conversion for scalar inputs with wrong type if( value.getDataType() == DataType.SCALAR && value.getValueType() != currFormalParam.getValueType() ) { value = ScalarObjectFactory.createScalarObject( currFormalParam.getValueType(), (ScalarObject)value); } //set input parameter functionVariables.put(currFormalParam.getName(), value); } // Pin the input variables so that they do not get deleted // from pb's symbol table at the end of execution of function boolean[] pinStatus = ec.pinVariables(_boundInputNames); // Create a symbol table under a new execution context for the function invocation, // and copy the function arguments into the created table. ExecutionContext fn_ec = ExecutionContextFactory.createContext(false, ec.getProgram()); if (DMLScript.USE_ACCELERATOR) { fn_ec.setGPUContexts(ec.getGPUContexts()); fn_ec.getGPUContext(0).initializeThread(); } fn_ec.setVariables(functionVariables); // execute the function block try { fpb._functionName = this._functionName; fpb._namespace = this._namespace; fpb.execute(fn_ec); } catch (DMLScriptException e) { throw e; } catch (Exception e){ String fname = DMLProgram.constructFunctionKey(_namespace, _functionName); throw new DMLRuntimeException("error executing function " + fname, e); } // cleanup all returned variables w/o binding HashSet<String> expectRetVars = new HashSet<>(); for(DataIdentifier di : fpb.getOutputParams()) expectRetVars.add(di.getName()); LocalVariableMap retVars = fn_ec.getVariables(); for( Entry<String,Data> var : retVars.entrySet() ) { if( expectRetVars.contains(var.getKey()) ) continue; //cleanup unexpected return values to avoid leaks if( var.getValue() instanceof CacheableData ) fn_ec.cleanupCacheableData((CacheableData<?>)var.getValue()); } // Unpin the pinned variables ec.unpinVariables(_boundInputNames, pinStatus); // add the updated binding for each return variable to the variables in original symbol table for (int i=0; i< fpb.getOutputParams().size(); i++){ String boundVarName = _boundOutputNames.get(i); Data boundValue = retVars.get(fpb.getOutputParams().get(i).getName()); if (boundValue == null) throw new DMLRuntimeException(boundVarName + " was not assigned a return value"); //cleanup existing data bound to output variable name Data exdata = ec.removeVariable(boundVarName); if ( exdata != null && exdata instanceof CacheableData && exdata != boundValue ) { ec.cleanupCacheableData( (CacheableData<?>)exdata ); } //add/replace data in symbol table ec.setVariable(boundVarName, boundValue); } } @Override public void postprocessInstruction(ExecutionContext ec) throws DMLRuntimeException { //maintain debug state (function call stack) if (DMLScript.ENABLE_DEBUG_MODE ) { ec.handleDebugFunctionExit( this ); } //default post-process behavior super.postprocessInstruction(ec); } @Override public void printMe() { LOG.debug("ExternalBuiltInFunction: " + this.toString()); } public ArrayList<String> getBoundInputParamNames() { return _boundInputNames; } public ArrayList<String> getBoundOutputParamNames() { return _boundOutputNames; } public String updateInstStringFunctionName(String pattern, String replace) { //split current instruction String[] parts = instString.split(Lop.OPERAND_DELIMITOR); if( parts[3].equals(pattern) ) parts[3] = replace; //construct and set modified instruction StringBuilder sb = new StringBuilder(); for( String part : parts ) { sb.append(part); sb.append(Lop.OPERAND_DELIMITOR); } return sb.substring( 0, sb.length()-Lop.OPERAND_DELIMITOR.length() ); } }
/* * Copyright 2012-2016 bambooCORE, greenstep of copyright Chen Xin Nien * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * ----------------------------------------------------------------------- * * author: Chen Xin Nien * contact: [email protected] * */ package com.netsteadfast.greenstep.qcharts.action; import java.util.List; import java.util.Map; import javax.annotation.Resource; import org.apache.log4j.Logger; import org.apache.struts2.json.annotations.JSON; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Controller; import com.netsteadfast.greenstep.base.action.BaseJsonAction; import com.netsteadfast.greenstep.base.exception.AuthorityException; import com.netsteadfast.greenstep.base.exception.ControllerException; import com.netsteadfast.greenstep.base.exception.ServiceException; import com.netsteadfast.greenstep.base.model.ControllerAuthority; import com.netsteadfast.greenstep.base.model.ControllerMethodAuthority; import com.netsteadfast.greenstep.base.model.DefaultResult; import com.netsteadfast.greenstep.qcharts.action.utils.IdFieldCheckUtils; import com.netsteadfast.greenstep.qcharts.action.utils.NotBlankFieldCheckUtils; import com.netsteadfast.greenstep.qcharts.action.utils.SelectItemFieldCheckUtils; import com.netsteadfast.greenstep.qcharts.service.logic.IDataSourceLogicService; import com.netsteadfast.greenstep.vo.DataSourceConfVO; @ControllerAuthority(check=true) @Controller("qcharts.web.controller.DataSourceConfSaveOrUpdateAction") @Scope public class DataSourceConfSaveOrUpdateAction extends BaseJsonAction { private static final long serialVersionUID = 8827352463287481766L; protected Logger logger=Logger.getLogger(DataSourceConfSaveOrUpdateAction.class); private IDataSourceLogicService dataSourceLogicService; private String message = ""; private String success = IS_NO; public DataSourceConfSaveOrUpdateAction() { super(); } @JSON(serialize=false) public IDataSourceLogicService getDataSourceLogicService() { return dataSourceLogicService; } @Autowired @Resource(name="qcharts.service.logic.DataSourceLogicService") public void setDataSourceLogicService( IDataSourceLogicService dataSourceLogicService) { this.dataSourceLogicService = dataSourceLogicService; } private void checkFields() throws ControllerException { this.getCheckFieldHandler() .add("driverOid", SelectItemFieldCheckUtils.class, this.getText("MESSAGE.QCHARTS_PROG001D0001A_driverOid") ) .add("id", IdFieldCheckUtils.class, this.getText("MESSAGE.QCHARTS_PROG001D0001A_id") ) .add("name", NotBlankFieldCheckUtils.class, this.getText("MESSAGE.QCHARTS_PROG001D0001A_name") ) .add("jdbcUrl", NotBlankFieldCheckUtils.class, this.getText("MESSAGE.QCHARTS_PROG001D0001A_jdbcUrl") ) .add("dbAccount", NotBlankFieldCheckUtils.class, this.getText("MESSAGE.QCHARTS_PROG001D0001A_dbAccount") ) .add("dbPassword", NotBlankFieldCheckUtils.class, this.getText("MESSAGE.QCHARTS_PROG001D0001A_dbPassword") ) .process().throwMessage(); } private void save() throws ControllerException, AuthorityException, ServiceException, Exception { this.checkFields(); DataSourceConfVO conf = new DataSourceConfVO(); this.transformFields2ValueObject(conf, new String[]{"id", "name", "jdbcUrl", "dbAccount", "dbPassword", "description"}); DefaultResult<DataSourceConfVO> result = this.dataSourceLogicService.createConf(this.getFields().get("driverOid"), conf); this.message = result.getSystemMessage().getValue(); if (result.getValue()!=null) { this.success = IS_YES; } } private void update() throws ControllerException, AuthorityException, ServiceException, Exception { this.checkFields(); DataSourceConfVO conf = new DataSourceConfVO(); this.transformFields2ValueObject(conf, new String[]{"oid", "id", "name", "jdbcUrl", "dbAccount", "dbPassword", "description"}); DefaultResult<DataSourceConfVO> result = this.dataSourceLogicService.updateConf(this.getFields().get("driverOid"), conf); this.message = result.getSystemMessage().getValue(); if (result.getValue()!=null) { this.success = IS_YES; } } private void delete() throws ControllerException, AuthorityException, ServiceException, Exception { DataSourceConfVO conf = new DataSourceConfVO(); this.transformFields2ValueObject(conf, new String[]{"oid"}); DefaultResult<Boolean> result = this.dataSourceLogicService.deleteConf(conf); this.message = result.getSystemMessage().getValue(); if (result.getValue()!=null && result.getValue()) { this.success = IS_YES; } } /** * qcharts.dataSourceConfSaveAction.action * * @return * @throws Exception */ @ControllerMethodAuthority(programId="QCHARTS_PROG001D0001A") public String doSave() throws Exception { try { if (!this.allowJob()) { this.message = this.getNoAllowMessage(); return SUCCESS; } this.save(); } catch (AuthorityException | ControllerException | ServiceException e) { this.message = e.getMessage().toString(); } catch (Exception e) { this.message = this.logException(e); this.success = IS_EXCEPTION; } return SUCCESS; } /** * qcharts.dataSourceConfUpdateAction.action * * @return * @throws Exception */ @ControllerMethodAuthority(programId="QCHARTS_PROG001D0001E") public String doUpdate() throws Exception { try { if (!this.allowJob()) { this.message = this.getNoAllowMessage(); return SUCCESS; } this.update(); } catch (AuthorityException | ControllerException | ServiceException e) { this.message = e.getMessage().toString(); } catch (Exception e) { this.message = this.logException(e); this.success = IS_EXCEPTION; } return SUCCESS; } /** * qcharts.dataSourceConfDeleteAction.action * * @return * @throws Exception */ @ControllerMethodAuthority(programId="QCHARTS_PROG001D0001Q") public String doDelete() throws Exception { try { if (!this.allowJob()) { this.message = this.getNoAllowMessage(); return SUCCESS; } this.delete(); } catch (AuthorityException | ControllerException | ServiceException e) { this.message = e.getMessage().toString(); } catch (Exception e) { this.message = this.logException(e); this.success = IS_EXCEPTION; } return SUCCESS; } @JSON @Override public String getLogin() { return super.isAccountLogin(); } @JSON @Override public String getIsAuthorize() { return super.isActionAuthorize(); } @JSON @Override public String getMessage() { return this.message; } @JSON @Override public String getSuccess() { return this.success; } @JSON @Override public List<String> getFieldsId() { return this.fieldsId; } @JSON @Override public Map<String, String> getFieldsMessage() { return this.fieldsMessage; } }
/* * Copyright 2017-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.net.pi.runtime; import com.google.common.annotations.Beta; import com.google.common.base.MoreObjects; import com.google.common.base.Objects; import org.onosproject.net.pi.model.PiActionProfileId; import static com.google.common.base.Preconditions.checkNotNull; /** * Instance of a member of an action group in a protocol-independent pipeline. */ @Beta public final class PiActionGroupMember implements PiEntity { private final PiActionProfileId actionProfileId; private final PiActionGroupMemberId id; private final PiAction action; // FIXME: in P4Runtime weight is an attribute of the member reference in a // group. Either remove it from this class or define the containing group // ID. private final int weight; private PiActionGroupMember( PiActionProfileId actionProfileId, PiActionGroupMemberId id, PiAction action, int weight) { this.actionProfileId = actionProfileId; this.id = id; this.action = action; this.weight = weight; } /** * Returns the identifier of this member. * * @return member identifier */ public PiActionGroupMemberId id() { return id; } /** * Returns the identifier of the action profile. * * @return action profile identifier */ public PiActionProfileId actionProfile() { return actionProfileId; } /** * Returns the action associated to this member. * * @return action */ public PiAction action() { return action; } /** * Returns the weight associated to this member. * * @return weight */ public int weight() { return weight; } @Override public PiEntityType piEntityType() { return PiEntityType.GROUP_MEMBER; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof PiActionGroupMember)) { return false; } PiActionGroupMember that = (PiActionGroupMember) o; return weight == that.weight && Objects.equal(actionProfileId, that.actionProfileId) && Objects.equal(id, that.id) && Objects.equal(action, that.action); } @Override public int hashCode() { return Objects.hashCode(actionProfileId, id, action, weight); } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("actionProfile", actionProfileId) .add("id", id) .add("action", action) .add("weight", weight) .toString(); } /** * Returns a new builder of action group members. * * @return member builder */ public static Builder builder() { return new Builder(); } /** * Builder of action group members. */ public static final class Builder { private PiActionProfileId actionProfileId; private PiActionGroupMemberId id; private PiAction action; private int weight; private Builder() { // Hides constructor. } /** * Sets the action profile identifier of this member. * * @param actionProfileId action profile identifier * @return this */ public Builder forActionProfile(PiActionProfileId actionProfileId) { this.actionProfileId = actionProfileId; return this; } /** * Sets the identifier of this member. * * @param id member identifier * @return this */ public Builder withId(PiActionGroupMemberId id) { this.id = id; return this; } /** * Sets the action of this member. * * @param action action * @return this */ public Builder withAction(PiAction action) { this.action = action; return this; } /** * Sets the weight of this member. * <p> * Default value is 0. * * @param weight weight * @return this */ public Builder withWeight(int weight) { this.weight = weight; return this; } /** * Creates a new action group member. * * @return action group member */ public PiActionGroupMember build() { checkNotNull(actionProfileId); checkNotNull(id); checkNotNull(action); return new PiActionGroupMember(actionProfileId, id, action, weight); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.query.functionscore; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.search.ComplexExplanation; import org.apache.lucene.search.Explanation; import org.elasticsearch.ElasticsearchIllegalArgumentException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.lucene.search.function.CombineFunction; import org.elasticsearch.common.lucene.search.function.ScoreFunction; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.MultiGeoPointValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.core.DateFieldMapper; import org.elasticsearch.index.mapper.core.NumberFieldMapper; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParsingException; import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionBuilder; import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionParser; import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.Locale; /** * This class provides the basic functionality needed for adding a decay * function. * * This parser parses this kind of input * * <pre> * {@code} * { * "fieldname1" : { * "origin" = "someValue", * "scale" = "someValue" * } * * } * </pre> * * "origin" here refers to the reference point and "scale" to the level of * uncertainty you have in your origin. * <p> * * For example, you might want to retrieve an event that took place around the * 20 May 2010 somewhere near Berlin. You are mainly interested in events that * are close to the 20 May 2010 but you are unsure about your guess, maybe it * was a week before or after that. Your "origin" for the date field would be * "20 May 2010" and your "scale" would be "7d". * * This class parses the input and creates a scoring function from the * parameters origin and scale. * <p> * To write a new scoring function, create a new class that inherits from this * one and implement the getDistanceFuntion(). Furthermore, to create a builder, * override the getName() in {@link DecayFunctionBuilder}. * <p> * See {@link GaussDecayFunctionBuilder} and {@link GaussDecayFunctionParser} * for an example. The parser furthermore needs to be registered in the * {@link org.elasticsearch.index.query.functionscore.FunctionScoreModule * FunctionScoreModule}. * * **/ public abstract class DecayFunctionParser implements ScoreFunctionParser { public static final ParseField MULTI_VALUE_MODE = new ParseField("multi_value_mode"); /** * Override this function if you want to produce your own scorer. * */ public abstract DecayFunction getDecayFunction(); /** * Parses bodies of the kind * * <pre> * {@code} * { * "fieldname1" : { * "origin" = "someValue", * "scale" = "someValue" * } * * } * </pre> * * */ @Override public ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException { String currentFieldName; XContentParser.Token token; AbstractDistanceScoreFunction scoreFunction = null; String multiValueMode = "MIN"; XContentBuilder variableContent = XContentFactory.jsonBuilder(); String fieldName = null; while ((token = parser.nextToken()) == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); token = parser.nextToken(); if (token == XContentParser.Token.START_OBJECT) { variableContent.copyCurrentStructure(parser); fieldName = currentFieldName; } else if (MULTI_VALUE_MODE.match(currentFieldName)) { multiValueMode = parser.text(); } else { throw new ElasticsearchParseException("Malformed score function score parameters."); } } if (fieldName == null) { throw new ElasticsearchParseException("Malformed score function score parameters."); } XContentParser variableParser = XContentFactory.xContent(variableContent.string()).createParser(variableContent.string()); scoreFunction = parseVariable(fieldName, variableParser, parseContext, MultiValueMode.fromString(multiValueMode.toUpperCase(Locale.ROOT))); return scoreFunction; } // parses origin and scale parameter for field "fieldName" private AbstractDistanceScoreFunction parseVariable(String fieldName, XContentParser parser, QueryParseContext parseContext, MultiValueMode mode) throws IOException { // now, the field must exist, else we cannot read the value for // the doc later MapperService.SmartNameFieldMappers smartMappers = parseContext.smartFieldMappers(fieldName); if (smartMappers == null || !smartMappers.hasMapper()) { throw new QueryParsingException(parseContext.index(), "Unknown field [" + fieldName + "]"); } FieldMapper<?> mapper = smartMappers.fieldMappers().mapper(); // dates and time need special handling parser.nextToken(); if (mapper instanceof DateFieldMapper) { return parseDateVariable(fieldName, parser, parseContext, (DateFieldMapper) mapper, mode); } else if (mapper instanceof GeoPointFieldMapper) { return parseGeoVariable(fieldName, parser, parseContext, (GeoPointFieldMapper) mapper, mode); } else if (mapper instanceof NumberFieldMapper<?>) { return parseNumberVariable(fieldName, parser, parseContext, (NumberFieldMapper<?>) mapper, mode); } else { throw new QueryParsingException(parseContext.index(), "Field " + fieldName + " is of type " + mapper.fieldType() + ", but only numeric types are supported."); } } private AbstractDistanceScoreFunction parseNumberVariable(String fieldName, XContentParser parser, QueryParseContext parseContext, NumberFieldMapper<?> mapper, MultiValueMode mode) throws IOException { XContentParser.Token token; String parameterName = null; double scale = 0; double origin = 0; double decay = 0.5; double offset = 0.0d; boolean scaleFound = false; boolean refFound = false; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { parameterName = parser.currentName(); } else if (parameterName.equals(DecayFunctionBuilder.SCALE)) { scale = parser.doubleValue(); scaleFound = true; } else if (parameterName.equals(DecayFunctionBuilder.DECAY)) { decay = parser.doubleValue(); } else if (parameterName.equals(DecayFunctionBuilder.ORIGIN)) { origin = parser.doubleValue(); refFound = true; } else if (parameterName.equals(DecayFunctionBuilder.OFFSET)) { offset = parser.doubleValue(); } else { throw new ElasticsearchParseException("Parameter " + parameterName + " not supported!"); } } if (!scaleFound || !refFound) { throw new ElasticsearchParseException("Both " + DecayFunctionBuilder.SCALE + " and " + DecayFunctionBuilder.ORIGIN + " must be set for numeric fields."); } IndexNumericFieldData numericFieldData = parseContext.getForField(mapper); return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode); } private AbstractDistanceScoreFunction parseGeoVariable(String fieldName, XContentParser parser, QueryParseContext parseContext, GeoPointFieldMapper mapper, MultiValueMode mode) throws IOException { XContentParser.Token token; String parameterName = null; GeoPoint origin = new GeoPoint(); String scaleString = null; String offsetString = "0km"; double decay = 0.5; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { parameterName = parser.currentName(); } else if (parameterName.equals(DecayFunctionBuilder.SCALE)) { scaleString = parser.text(); } else if (parameterName.equals(DecayFunctionBuilder.ORIGIN)) { origin = GeoUtils.parseGeoPoint(parser); } else if (parameterName.equals(DecayFunctionBuilder.DECAY)) { decay = parser.doubleValue(); } else if (parameterName.equals(DecayFunctionBuilder.OFFSET)) { offsetString = parser.text(); } else { throw new ElasticsearchParseException("Parameter " + parameterName + " not supported!"); } } if (origin == null || scaleString == null) { throw new ElasticsearchParseException(DecayFunctionBuilder.ORIGIN + " and " + DecayFunctionBuilder.SCALE + " must be set for geo fields."); } double scale = DistanceUnit.DEFAULT.parse(scaleString, DistanceUnit.DEFAULT); double offset = DistanceUnit.DEFAULT.parse(offsetString, DistanceUnit.DEFAULT); IndexGeoPointFieldData indexFieldData = parseContext.getForField(mapper); return new GeoFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), indexFieldData, mode); } private AbstractDistanceScoreFunction parseDateVariable(String fieldName, XContentParser parser, QueryParseContext parseContext, DateFieldMapper dateFieldMapper, MultiValueMode mode) throws IOException { XContentParser.Token token; String parameterName = null; String scaleString = null; String originString = null; String offsetString = "0d"; double decay = 0.5; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { parameterName = parser.currentName(); } else if (parameterName.equals(DecayFunctionBuilder.SCALE)) { scaleString = parser.text(); } else if (parameterName.equals(DecayFunctionBuilder.ORIGIN)) { originString = parser.text(); } else if (parameterName.equals(DecayFunctionBuilder.DECAY)) { decay = parser.doubleValue(); } else if (parameterName.equals(DecayFunctionBuilder.OFFSET)) { offsetString = parser.text(); } else { throw new ElasticsearchParseException("Parameter " + parameterName + " not supported!"); } } long origin = SearchContext.current().nowInMillis(); if (originString != null) { origin = dateFieldMapper.parseToMilliseconds(originString); } if (scaleString == null) { throw new ElasticsearchParseException(DecayFunctionBuilder.SCALE + " must be set for date fields."); } TimeValue val = TimeValue.parseTimeValue(scaleString, TimeValue.timeValueHours(24)); double scale = val.getMillis(); val = TimeValue.parseTimeValue(offsetString, TimeValue.timeValueHours(24)); double offset = val.getMillis(); IndexNumericFieldData numericFieldData = parseContext.getForField(dateFieldMapper); return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode); } static class GeoFieldDataScoreFunction extends AbstractDistanceScoreFunction { private final GeoPoint origin; private final IndexGeoPointFieldData fieldData; private MultiGeoPointValues geoPointValues = null; private static final GeoDistance distFunction = GeoDistance.DEFAULT; public GeoFieldDataScoreFunction(GeoPoint origin, double scale, double decay, double offset, DecayFunction func, IndexGeoPointFieldData fieldData, MultiValueMode mode) { super(scale, decay, offset, func, mode); this.origin = origin; this.fieldData = fieldData; } @Override public void setNextReader(AtomicReaderContext context) { geoPointValues = fieldData.load(context).getGeoPointValues(); } @Override protected double distance(int docId) { geoPointValues.setDocument(docId); final int num = geoPointValues.count(); if (num > 0) { double value = mode.startDouble(); for (int i = 0; i < num; i++) { GeoPoint other = geoPointValues.valueAt(i); value = mode.apply(Math.max(0.0d, distFunction.calculate(origin.lat(), origin.lon(), other.lat(), other.lon(), DistanceUnit.METERS) - offset), value); } return mode.reduce(value, num); } else { return 0.0; } } @Override protected String getDistanceString(int docId) { StringBuilder values = new StringBuilder(mode.name()); values.append(" of: ["); geoPointValues.setDocument(docId); final int num = geoPointValues.count(); if (num > 0) { for (int i = 0; i < num; i++) { GeoPoint value = geoPointValues.valueAt(i); values.append("Math.max(arcDistance("); values.append(value).append("(=doc value),").append(origin).append("(=origin)) - ").append(offset).append("(=offset), 0)"); if (i != num - 1) { values.append(", "); } } } else { values.append("0.0"); } values.append("]"); return values.toString(); } @Override protected String getFieldName() { return fieldData.getFieldNames().fullName(); } } static class NumericFieldDataScoreFunction extends AbstractDistanceScoreFunction { private final IndexNumericFieldData fieldData; private final double origin; private SortedNumericDoubleValues doubleValues; public NumericFieldDataScoreFunction(double origin, double scale, double decay, double offset, DecayFunction func, IndexNumericFieldData fieldData, MultiValueMode mode) { super(scale, decay, offset, func, mode); this.fieldData = fieldData; this.origin = origin; } public void setNextReader(AtomicReaderContext context) { this.doubleValues = this.fieldData.load(context).getDoubleValues(); } @Override protected double distance(int docId) { doubleValues.setDocument(docId); final int num = doubleValues.count(); if (num > 0) { double value = mode.startDouble(); for (int i = 0; i < num; i++) { final double other = doubleValues.valueAt(i); value = mode.apply(Math.max(0.0d, Math.abs(other - origin) - offset), value); } return mode.reduce(value, num); } else { return 0.0; } } @Override protected String getDistanceString(int docId) { StringBuilder values = new StringBuilder(mode.name()); values.append("["); doubleValues.setDocument(docId); final int num = doubleValues.count(); if (num > 0) { for (int i = 0; i < num; i++) { double value = doubleValues.valueAt(i); values.append("Math.max(Math.abs("); values.append(value).append("(=doc value) - ").append(origin).append("(=origin))) - ").append(offset).append("(=offset), 0)"); if (i != num - 1) { values.append(", "); } } } else { values.append("0.0"); } values.append("]"); return values.toString(); } @Override protected String getFieldName() { return fieldData.getFieldNames().fullName(); } } /** * This is the base class for scoring a single field. * * */ public static abstract class AbstractDistanceScoreFunction extends ScoreFunction { private final double scale; protected final double offset; private final DecayFunction func; protected final MultiValueMode mode; public AbstractDistanceScoreFunction(double userSuppiedScale, double decay, double offset, DecayFunction func, MultiValueMode mode) { super(CombineFunction.MULT); this.mode = mode; if (userSuppiedScale <= 0.0) { throw new ElasticsearchIllegalArgumentException(FunctionScoreQueryParser.NAME + " : scale must be > 0.0."); } if (decay <= 0.0 || decay >= 1.0) { throw new ElasticsearchIllegalArgumentException(FunctionScoreQueryParser.NAME + " : decay must be in the range [0..1]."); } this.scale = func.processScale(userSuppiedScale, decay); this.func = func; if (offset < 0.0d) { throw new ElasticsearchIllegalArgumentException(FunctionScoreQueryParser.NAME + " : offset must be > 0.0"); } this.offset = offset; } @Override public double score(int docId, float subQueryScore) { double value = distance(docId); return func.evaluate(value, scale); } /** * This function computes the distance from a defined origin. Since * the value of the document is read from the index, it cannot be * guaranteed that the value actually exists. If it does not, we assume * the user handles this case in the query and return 0. * */ protected abstract double distance(int docId); protected abstract String getDistanceString(int docId); protected abstract String getFieldName(); @Override public Explanation explainScore(int docId, Explanation subQueryScore) { ComplexExplanation ce = new ComplexExplanation(); ce.setValue(CombineFunction.toFloat(score(docId, subQueryScore.getValue()))); ce.setMatch(true); ce.setDescription("Function for field " + getFieldName() + ":"); ce.addDetail(func.explainFunction(getDistanceString(docId), distance(docId), scale)); return ce; } } }
package org.zstack.image; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.annotation.Transactional; import org.zstack.core.Platform; import org.zstack.core.asyncbatch.AsyncBatchRunner; import org.zstack.core.asyncbatch.LoopAsyncBatch; import org.zstack.core.cloudbus.*; import org.zstack.core.componentloader.PluginRegistry; import org.zstack.core.config.GlobalConfig; import org.zstack.core.config.GlobalConfigUpdateExtensionPoint; import org.zstack.core.db.DatabaseFacade; import org.zstack.core.db.SQL; import org.zstack.core.db.SQLBatchWithReturn; import org.zstack.core.db.SimpleQuery; import org.zstack.core.db.SimpleQuery.Op; import org.zstack.core.defer.Defer; import org.zstack.core.defer.Deferred; import org.zstack.core.errorcode.ErrorFacade; import org.zstack.core.notification.N; import org.zstack.core.thread.CancelablePeriodicTask; import org.zstack.core.thread.ThreadFacade; import org.zstack.core.workflow.FlowChainBuilder; import org.zstack.core.workflow.ShareFlow; import org.zstack.header.AbstractService; import org.zstack.header.core.AsyncLatch; import org.zstack.header.core.NoErrorCompletion; import org.zstack.header.core.workflow.*; import org.zstack.header.errorcode.ErrorCode; import org.zstack.header.errorcode.ErrorCodeList; import org.zstack.header.errorcode.SysErrors; import org.zstack.header.exception.CloudRuntimeException; import org.zstack.header.identity.*; import org.zstack.header.image.*; import org.zstack.header.image.APICreateRootVolumeTemplateFromVolumeSnapshotEvent.Failure; import org.zstack.header.image.ImageConstant.ImageMediaType; import org.zstack.header.image.ImageDeletionPolicyManager.ImageDeletionPolicy; import org.zstack.header.managementnode.ManagementNodeReadyExtensionPoint; import org.zstack.header.message.APIMessage; import org.zstack.header.message.Message; import org.zstack.header.message.MessageReply; import org.zstack.header.message.NeedQuotaCheckMessage; import org.zstack.header.rest.RESTFacade; import org.zstack.header.search.SearchOp; import org.zstack.header.storage.backup.*; import org.zstack.header.storage.primary.PrimaryStorageVO; import org.zstack.header.storage.primary.PrimaryStorageVO_; import org.zstack.header.storage.snapshot.*; import org.zstack.header.vm.CreateTemplateFromVmRootVolumeMsg; import org.zstack.header.vm.CreateTemplateFromVmRootVolumeReply; import org.zstack.header.vm.VmInstanceConstant; import org.zstack.header.volume.*; import org.zstack.identity.AccountManager; import org.zstack.identity.QuotaUtil; import org.zstack.search.SearchQuery; import org.zstack.tag.TagManager; import org.zstack.utils.CollectionUtils; import org.zstack.utils.ObjectUtils; import org.zstack.utils.RunOnce; import org.zstack.utils.Utils; import org.zstack.utils.data.SizeUnit; import org.zstack.utils.function.ForEachFunction; import org.zstack.utils.function.Function; import org.zstack.utils.gson.JSONObjectUtil; import org.zstack.utils.logging.CLogger; import javax.persistence.Tuple; import javax.persistence.TypedQuery; import java.sql.Timestamp; import java.util.*; import java.util.concurrent.Callable; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; import static org.zstack.core.Platform.operr; import static org.zstack.utils.CollectionDSL.list; public class ImageManagerImpl extends AbstractService implements ImageManager, ManagementNodeReadyExtensionPoint, ReportQuotaExtensionPoint, ResourceOwnerPreChangeExtensionPoint { private static final CLogger logger = Utils.getLogger(ImageManagerImpl.class); @Autowired private CloudBus bus; @Autowired private PluginRegistry pluginRgty; @Autowired private DatabaseFacade dbf; @Autowired private AccountManager acntMgr; @Autowired private ErrorFacade errf; @Autowired private TagManager tagMgr; @Autowired private ThreadFacade thdf; @Autowired private ResourceDestinationMaker destMaker; @Autowired private ImageDeletionPolicyManager deletionPolicyMgr; @Autowired protected RESTFacade restf; private Map<String, ImageFactory> imageFactories = Collections.synchronizedMap(new HashMap<>()); private static final Set<Class> allowedMessageAfterDeletion = new HashSet<>(); private Future<Void> expungeTask; static { allowedMessageAfterDeletion.add(ImageDeletionMsg.class); } @Override @MessageSafe public void handleMessage(Message msg) { if (msg instanceof ImageMessage) { passThrough((ImageMessage) msg); } else if (msg instanceof APIMessage) { handleApiMessage(msg); } else { handleLocalMessage(msg); } } private void handleLocalMessage(Message msg) { bus.dealWithUnknownMessage(msg); } private void handleApiMessage(Message msg) { if (msg instanceof APIAddImageMsg) { handle((APIAddImageMsg) msg); } else if (msg instanceof APIListImageMsg) { handle((APIListImageMsg) msg); } else if (msg instanceof APISearchImageMsg) { handle((APISearchImageMsg) msg); } else if (msg instanceof APIGetImageMsg) { handle((APIGetImageMsg) msg); } else if (msg instanceof APICreateRootVolumeTemplateFromRootVolumeMsg) { handle((APICreateRootVolumeTemplateFromRootVolumeMsg) msg); } else if (msg instanceof APICreateRootVolumeTemplateFromVolumeSnapshotMsg) { handle((APICreateRootVolumeTemplateFromVolumeSnapshotMsg) msg); } else if (msg instanceof APICreateDataVolumeTemplateFromVolumeMsg) { handle((APICreateDataVolumeTemplateFromVolumeMsg) msg); } else { bus.dealWithUnknownMessage(msg); } } private void handle(final APICreateDataVolumeTemplateFromVolumeMsg msg) { final APICreateDataVolumeTemplateFromVolumeEvent evt = new APICreateDataVolumeTemplateFromVolumeEvent(msg.getId()); FlowChain chain = FlowChainBuilder.newShareFlowChain(); chain.setName(String.format("create-data-volume-template-from-volume-%s", msg.getVolumeUuid())); chain.then(new ShareFlow() { List<BackupStorageInventory> backupStorage = new ArrayList<>(); ImageVO image; long actualSize; @Override public void setup() { flow(new NoRollbackFlow() { String __name__ = "get-actual-size-of-data-volume"; @Override public void run(final FlowTrigger trigger, Map data) { SyncVolumeSizeMsg smsg = new SyncVolumeSizeMsg(); smsg.setVolumeUuid(msg.getVolumeUuid()); bus.makeTargetServiceIdByResourceUuid(smsg, VolumeConstant.SERVICE_ID, msg.getVolumeUuid()); bus.send(smsg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { trigger.fail(reply.getError()); return; } SyncVolumeSizeReply sr = reply.castReply(); actualSize = sr.getActualSize(); trigger.next(); } }); } }); flow(new Flow() { String __name__ = "create-image-in-database"; @Override public void run(FlowTrigger trigger, Map data) { SimpleQuery<VolumeVO> q = dbf.createQuery(VolumeVO.class); q.select(VolumeVO_.format, VolumeVO_.size); q.add(VolumeVO_.uuid, Op.EQ, msg.getVolumeUuid()); Tuple t = q.findTuple(); String format = t.get(0, String.class); long size = t.get(1, Long.class); final ImageVO vo = new ImageVO(); vo.setUuid(msg.getResourceUuid() == null ? Platform.getUuid() : msg.getResourceUuid()); vo.setName(msg.getName()); vo.setDescription(msg.getDescription()); vo.setType(ImageConstant.ZSTACK_IMAGE_TYPE); vo.setMediaType(ImageMediaType.DataVolumeTemplate); vo.setSize(size); vo.setActualSize(actualSize); vo.setState(ImageState.Enabled); vo.setStatus(ImageStatus.Creating); vo.setFormat(format); vo.setUrl(String.format("volume://%s", msg.getVolumeUuid())); image = dbf.persistAndRefresh(vo); acntMgr.createAccountResourceRef(msg.getSession().getAccountUuid(), vo.getUuid(), ImageVO.class); tagMgr.createTagsFromAPICreateMessage(msg, vo.getUuid(), ImageVO.class.getSimpleName()); trigger.next(); } @Override public void rollback(FlowRollback trigger, Map data) { if (image != null) { dbf.remove(image); } trigger.rollback(); } }); flow(new Flow() { String __name__ = "select-backup-storage"; @Override public void run(final FlowTrigger trigger, Map data) { final String zoneUuid = new Callable<String>() { @Override @Transactional(readOnly = true) public String call() { String sql = "select ps.zoneUuid" + " from PrimaryStorageVO ps, VolumeVO vol" + " where vol.primaryStorageUuid = ps.uuid" + " and vol.uuid = :volUuid"; TypedQuery<String> q = dbf.getEntityManager().createQuery(sql, String.class); q.setParameter("volUuid", msg.getVolumeUuid()); return q.getSingleResult(); } }.call(); if (msg.getBackupStorageUuids() == null) { AllocateBackupStorageMsg amsg = new AllocateBackupStorageMsg(); amsg.setRequiredZoneUuid(zoneUuid); amsg.setSize(actualSize); bus.makeLocalServiceId(amsg, BackupStorageConstant.SERVICE_ID); bus.send(amsg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (reply.isSuccess()) { backupStorage.add(((AllocateBackupStorageReply) reply).getInventory()); trigger.next(); } else { trigger.fail(errf.stringToOperationError("cannot find proper backup storage", reply.getError())); } } }); } else { List<AllocateBackupStorageMsg> amsgs = CollectionUtils.transformToList(msg.getBackupStorageUuids(), new Function<AllocateBackupStorageMsg, String>() { @Override public AllocateBackupStorageMsg call(String arg) { AllocateBackupStorageMsg amsg = new AllocateBackupStorageMsg(); amsg.setRequiredZoneUuid(zoneUuid); amsg.setSize(actualSize); amsg.setBackupStorageUuid(arg); bus.makeLocalServiceId(amsg, BackupStorageConstant.SERVICE_ID); return amsg; } }); bus.send(amsgs, new CloudBusListCallBack(trigger) { @Override public void run(List<MessageReply> replies) { List<ErrorCode> errs = new ArrayList<>(); for (MessageReply r : replies) { if (r.isSuccess()) { backupStorage.add(((AllocateBackupStorageReply) r).getInventory()); } else { errs.add(r.getError()); } } if (backupStorage.isEmpty()) { trigger.fail(operr("failed to allocate all backup storage[uuid:%s], a list of error: %s", msg.getBackupStorageUuids(), JSONObjectUtil.toJsonString(errs))); } else { trigger.next(); } } }); } } @Override public void rollback(FlowRollback trigger, Map data) { if (!backupStorage.isEmpty()) { List<ReturnBackupStorageMsg> rmsgs = CollectionUtils.transformToList(backupStorage, new Function<ReturnBackupStorageMsg, BackupStorageInventory>() { @Override public ReturnBackupStorageMsg call(BackupStorageInventory arg) { ReturnBackupStorageMsg rmsg = new ReturnBackupStorageMsg(); rmsg.setBackupStorageUuid(arg.getUuid()); rmsg.setSize(actualSize); bus.makeLocalServiceId(rmsg, BackupStorageConstant.SERVICE_ID); return rmsg; } }); bus.send(rmsgs, new CloudBusListCallBack(null) { @Override public void run(List<MessageReply> replies) { for (MessageReply r : replies) { BackupStorageInventory bs = backupStorage.get(replies.indexOf(r)); logger.warn(String.format("failed to return %s bytes to backup storage[uuid:%s]", acntMgr, bs.getUuid())); } } }); } trigger.rollback(); } }); flow(new NoRollbackFlow() { String __name__ = "create-data-volume-template-from-volume"; @Override public void run(final FlowTrigger trigger, Map data) { List<CreateDataVolumeTemplateFromDataVolumeMsg> cmsgs = CollectionUtils.transformToList(backupStorage, new Function<CreateDataVolumeTemplateFromDataVolumeMsg, BackupStorageInventory>() { @Override public CreateDataVolumeTemplateFromDataVolumeMsg call(BackupStorageInventory bs) { CreateDataVolumeTemplateFromDataVolumeMsg cmsg = new CreateDataVolumeTemplateFromDataVolumeMsg(); cmsg.setVolumeUuid(msg.getVolumeUuid()); cmsg.setBackupStorageUuid(bs.getUuid()); cmsg.setImageUuid(image.getUuid()); bus.makeTargetServiceIdByResourceUuid(cmsg, VolumeConstant.SERVICE_ID, msg.getVolumeUuid()); return cmsg; } }); bus.send(cmsgs, new CloudBusListCallBack(msg) { @Override public void run(List<MessageReply> replies) { int fail = 0; String mdsum = null; ErrorCode err = null; String format = null; for (MessageReply r : replies) { BackupStorageInventory bs = backupStorage.get(replies.indexOf(r)); if (!r.isSuccess()) { logger.warn(String.format("failed to create data volume template from volume[uuid:%s] on backup storage[uuid:%s], %s", msg.getVolumeUuid(), bs.getUuid(), r.getError())); fail++; err = r.getError(); continue; } CreateDataVolumeTemplateFromDataVolumeReply reply = r.castReply(); ImageBackupStorageRefVO ref = new ImageBackupStorageRefVO(); ref.setBackupStorageUuid(bs.getUuid()); ref.setStatus(ImageStatus.Ready); ref.setImageUuid(image.getUuid()); ref.setInstallPath(reply.getInstallPath()); dbf.persist(ref); if (mdsum == null) { mdsum = reply.getMd5sum(); } if (reply.getFormat() != null) { format = reply.getFormat(); } } int backupStorageNum = msg.getBackupStorageUuids() == null ? 1 : msg.getBackupStorageUuids().size(); if (fail == backupStorageNum) { ErrorCode errCode = operr("failed to create data volume template from volume[uuid:%s] on all backup storage%s. See cause for one of errors", msg.getVolumeUuid(), msg.getBackupStorageUuids()).causedBy(err); trigger.fail(errCode); } else { image = dbf.reload(image); if (format != null) { image.setFormat(format); } image.setMd5Sum(mdsum); image.setStatus(ImageStatus.Ready); image = dbf.updateAndRefresh(image); trigger.next(); } } }); } }); done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { evt.setInventory(ImageInventory.valueOf(image)); bus.publish(evt); } }); error(new FlowErrorHandler(msg) { @Override public void handle(ErrorCode errCode, Map data) { evt.setError(errCode); bus.publish(evt); } }); } }).start(); } private void handle(final APICreateRootVolumeTemplateFromVolumeSnapshotMsg msg) { final APICreateRootVolumeTemplateFromVolumeSnapshotEvent evt = new APICreateRootVolumeTemplateFromVolumeSnapshotEvent(msg.getId()); SimpleQuery<VolumeSnapshotVO> q = dbf.createQuery(VolumeSnapshotVO.class); q.select(VolumeSnapshotVO_.format); q.add(VolumeSnapshotVO_.uuid, Op.EQ, msg.getSnapshotUuid()); String format = q.findValue(); final ImageVO vo = new ImageVO(); if (msg.getResourceUuid() != null) { vo.setUuid(msg.getResourceUuid()); } else { vo.setUuid(Platform.getUuid()); } vo.setName(msg.getName()); vo.setSystem(msg.isSystem()); vo.setDescription(msg.getDescription()); vo.setPlatform(ImagePlatform.valueOf(msg.getPlatform())); vo.setGuestOsType(vo.getGuestOsType()); vo.setStatus(ImageStatus.Creating); vo.setState(ImageState.Enabled); vo.setFormat(format); vo.setMediaType(ImageMediaType.RootVolumeTemplate); vo.setType(ImageConstant.ZSTACK_IMAGE_TYPE); vo.setUrl(String.format("volumeSnapshot://%s", msg.getSnapshotUuid())); dbf.persist(vo); acntMgr.createAccountResourceRef(msg.getSession().getAccountUuid(), vo.getUuid(), ImageVO.class); tagMgr.createTagsFromAPICreateMessage(msg, vo.getUuid(), ImageVO.class.getSimpleName()); SimpleQuery<VolumeSnapshotVO> sq = dbf.createQuery(VolumeSnapshotVO.class); sq.select(VolumeSnapshotVO_.volumeUuid, VolumeSnapshotVO_.treeUuid); sq.add(VolumeSnapshotVO_.uuid, Op.EQ, msg.getSnapshotUuid()); Tuple t = sq.findTuple(); String volumeUuid = t.get(0, String.class); String treeUuid = t.get(1, String.class); List<CreateTemplateFromVolumeSnapshotMsg> cmsgs = msg.getBackupStorageUuids().stream().map(bsUuid -> { CreateTemplateFromVolumeSnapshotMsg cmsg = new CreateTemplateFromVolumeSnapshotMsg(); cmsg.setSnapshotUuid(msg.getSnapshotUuid()); cmsg.setImageUuid(vo.getUuid()); cmsg.setVolumeUuid(volumeUuid); cmsg.setTreeUuid(treeUuid); cmsg.setBackupStorageUuid(bsUuid); String resourceUuid = volumeUuid != null ? volumeUuid : treeUuid; bus.makeTargetServiceIdByResourceUuid(cmsg, VolumeSnapshotConstant.SERVICE_ID, resourceUuid); return cmsg; }).collect(Collectors.toList()); List<Failure> failures = new ArrayList<>(); AsyncLatch latch = new AsyncLatch(cmsgs.size(), new NoErrorCompletion(msg) { @Override public void done() { if (failures.size() == cmsgs.size()) { // failed on all ErrorCodeList error = errf.stringToOperationError(String.format("failed to create template from" + " the volume snapshot[uuid:%s] on backup storage[uuids:%s]", msg.getSnapshotUuid(), msg.getBackupStorageUuids()), failures.stream().map(f -> f.error).collect(Collectors.toList())); evt.setError(error); dbf.remove(vo); } else { ImageVO imvo = dbf.reload(vo); evt.setInventory(ImageInventory.valueOf(imvo)); logger.debug(String.format("successfully created image[uuid:%s, name:%s] from volume snapshot[uuid:%s]", imvo.getUuid(), imvo.getName(), msg.getSnapshotUuid())); } if (!failures.isEmpty()) { evt.setFailuresOnBackupStorage(failures); } bus.publish(evt); } }); RunOnce once = new RunOnce(); for (CreateTemplateFromVolumeSnapshotMsg cmsg : cmsgs) { bus.send(cmsg, new CloudBusCallBack(latch) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { synchronized (failures) { Failure failure = new Failure(); failure.error = reply.getError(); failure.backupStorageUuid = cmsg.getBackupStorageUuid(); failures.add(failure); } } else { CreateTemplateFromVolumeSnapshotReply cr = reply.castReply(); ImageBackupStorageRefVO ref = new ImageBackupStorageRefVO(); ref.setBackupStorageUuid(cr.getBackupStorageUuid()); ref.setInstallPath(cr.getBackupStorageInstallPath()); ref.setStatus(ImageStatus.Ready); ref.setImageUuid(vo.getUuid()); dbf.persist(ref); once.run(() -> { vo.setSize(cr.getSize()); vo.setActualSize(cr.getActualSize()); vo.setStatus(ImageStatus.Ready); dbf.update(vo); }); } latch.ack(); } }); } } private void passThrough(ImageMessage msg) { ImageVO vo = dbf.findByUuid(msg.getImageUuid(), ImageVO.class); if (vo == null && allowedMessageAfterDeletion.contains(msg.getClass())) { ImageEO eo = dbf.findByUuid(msg.getImageUuid(), ImageEO.class); vo = ObjectUtils.newAndCopy(eo, ImageVO.class); } if (vo == null) { String err = String.format("Cannot find image[uuid:%s], it may have been deleted", msg.getImageUuid()); logger.warn(err); bus.replyErrorByMessageType((Message) msg, errf.instantiateErrorCode(SysErrors.RESOURCE_NOT_FOUND, err)); return; } ImageFactory factory = getImageFacotry(ImageType.valueOf(vo.getType())); Image img = factory.getImage(vo); img.handleMessage((Message) msg); } private void handle(final APICreateRootVolumeTemplateFromRootVolumeMsg msg) { FlowChain chain = FlowChainBuilder.newShareFlowChain(); chain.setName(String.format("create-template-from-root-volume-%s", msg.getRootVolumeUuid())); chain.then(new ShareFlow() { ImageVO imageVO; VolumeInventory rootVolume; Long imageActualSize; List<BackupStorageInventory> targetBackupStorages = new ArrayList<>(); String zoneUuid; { VolumeVO rootvo = dbf.findByUuid(msg.getRootVolumeUuid(), VolumeVO.class); rootVolume = VolumeInventory.valueOf(rootvo); SimpleQuery<PrimaryStorageVO> q = dbf.createQuery(PrimaryStorageVO.class); q.select(PrimaryStorageVO_.zoneUuid); q.add(PrimaryStorageVO_.uuid, Op.EQ, rootVolume.getPrimaryStorageUuid()); zoneUuid = q.findValue(); } @Override public void setup() { flow(new NoRollbackFlow() { String __name__ = "get-volume-actual-size"; @Override public void run(final FlowTrigger trigger, Map data) { SyncVolumeSizeMsg msg = new SyncVolumeSizeMsg(); msg.setVolumeUuid(rootVolume.getUuid()); bus.makeTargetServiceIdByResourceUuid(msg, VolumeConstant.SERVICE_ID, rootVolume.getPrimaryStorageUuid()); bus.send(msg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { trigger.fail(reply.getError()); return; } SyncVolumeSizeReply sr = reply.castReply(); imageActualSize = sr.getActualSize(); trigger.next(); } }); } }); flow(new Flow() { String __name__ = "create-image-in-database"; public void run(FlowTrigger trigger, Map data) { SimpleQuery<VolumeVO> q = dbf.createQuery(VolumeVO.class); q.add(VolumeVO_.uuid, Op.EQ, msg.getRootVolumeUuid()); final VolumeVO volvo = q.find(); String accountUuid = acntMgr.getOwnerAccountUuidOfResource(volvo.getUuid()); final ImageVO imvo = new ImageVO(); if (msg.getResourceUuid() != null) { imvo.setUuid(msg.getResourceUuid()); } else { imvo.setUuid(Platform.getUuid()); } imvo.setDescription(msg.getDescription()); imvo.setMediaType(ImageMediaType.RootVolumeTemplate); imvo.setState(ImageState.Enabled); imvo.setGuestOsType(msg.getGuestOsType()); imvo.setFormat(volvo.getFormat()); imvo.setName(msg.getName()); imvo.setSystem(msg.isSystem()); imvo.setPlatform(ImagePlatform.valueOf(msg.getPlatform())); imvo.setStatus(ImageStatus.Downloading); imvo.setType(ImageConstant.ZSTACK_IMAGE_TYPE); imvo.setUrl(String.format("volume://%s", msg.getRootVolumeUuid())); imvo.setSize(volvo.getSize()); imvo.setActualSize(imageActualSize); dbf.persist(imvo); acntMgr.createAccountResourceRef(accountUuid, imvo.getUuid(), ImageVO.class); tagMgr.createTagsFromAPICreateMessage(msg, imvo.getUuid(), ImageVO.class.getSimpleName()); imageVO = imvo; trigger.next(); } @Override public void rollback(FlowRollback trigger, Map data) { if (imageVO != null) { dbf.remove(imageVO); } trigger.rollback(); } }); flow(new Flow() { String __name__ = String.format("select-backup-storage"); @Override public void run(final FlowTrigger trigger, Map data) { if (msg.getBackupStorageUuids() == null) { AllocateBackupStorageMsg abmsg = new AllocateBackupStorageMsg(); abmsg.setRequiredZoneUuid(zoneUuid); abmsg.setSize(imageActualSize); bus.makeLocalServiceId(abmsg, BackupStorageConstant.SERVICE_ID); bus.send(abmsg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (reply.isSuccess()) { targetBackupStorages.add(((AllocateBackupStorageReply) reply).getInventory()); trigger.next(); } else { trigger.fail(reply.getError()); } } }); } else { List<AllocateBackupStorageMsg> amsgs = CollectionUtils.transformToList(msg.getBackupStorageUuids(), new Function<AllocateBackupStorageMsg, String>() { @Override public AllocateBackupStorageMsg call(String arg) { AllocateBackupStorageMsg abmsg = new AllocateBackupStorageMsg(); abmsg.setSize(imageActualSize); abmsg.setBackupStorageUuid(arg); bus.makeLocalServiceId(abmsg, BackupStorageConstant.SERVICE_ID); return abmsg; } }); bus.send(amsgs, new CloudBusListCallBack(trigger) { @Override public void run(List<MessageReply> replies) { List<ErrorCode> errs = new ArrayList<>(); for (MessageReply r : replies) { if (r.isSuccess()) { targetBackupStorages.add(((AllocateBackupStorageReply) r).getInventory()); } else { errs.add(r.getError()); } } if (targetBackupStorages.isEmpty()) { trigger.fail(operr("unable to allocate backup storage specified by uuids%s, list errors are: %s", msg.getBackupStorageUuids(), JSONObjectUtil.toJsonString(errs))); } else { trigger.next(); } } }); } } @Override public void rollback(final FlowRollback trigger, Map data) { if (targetBackupStorages.isEmpty()) { trigger.rollback(); return; } List<ReturnBackupStorageMsg> rmsgs = CollectionUtils.transformToList(targetBackupStorages, new Function<ReturnBackupStorageMsg, BackupStorageInventory>() { @Override public ReturnBackupStorageMsg call(BackupStorageInventory arg) { ReturnBackupStorageMsg rmsg = new ReturnBackupStorageMsg(); rmsg.setBackupStorageUuid(arg.getUuid()); rmsg.setSize(imageActualSize); bus.makeLocalServiceId(rmsg, BackupStorageConstant.SERVICE_ID); return rmsg; } }); bus.send(rmsgs, new CloudBusListCallBack(trigger) { @Override public void run(List<MessageReply> replies) { for (MessageReply r : replies) { if (!r.isSuccess()) { BackupStorageInventory bs = targetBackupStorages.get(replies.indexOf(r)); logger.warn(String.format("failed to return capacity[%s] to backup storage[uuid:%s], because %s", imageActualSize, bs.getUuid(), r.getError())); } } trigger.rollback(); } }); } }); flow(new NoRollbackFlow() { String __name__ = String.format("start-creating-template"); @Override public void run(final FlowTrigger trigger, Map data) { List<CreateTemplateFromVmRootVolumeMsg> cmsgs = CollectionUtils.transformToList(targetBackupStorages, new Function<CreateTemplateFromVmRootVolumeMsg, BackupStorageInventory>() { @Override public CreateTemplateFromVmRootVolumeMsg call(BackupStorageInventory arg) { CreateTemplateFromVmRootVolumeMsg cmsg = new CreateTemplateFromVmRootVolumeMsg(); cmsg.setRootVolumeInventory(rootVolume); cmsg.setBackupStorageUuid(arg.getUuid()); cmsg.setImageInventory(ImageInventory.valueOf(imageVO)); bus.makeTargetServiceIdByResourceUuid(cmsg, VmInstanceConstant.SERVICE_ID, rootVolume.getVmInstanceUuid()); return cmsg; } }); bus.send(cmsgs, new CloudBusListCallBack(trigger) { @Override public void run(List<MessageReply> replies) { boolean success = false; ErrorCode err = null; for (MessageReply r : replies) { BackupStorageInventory bs = targetBackupStorages.get(replies.indexOf(r)); if (!r.isSuccess()) { logger.warn(String.format("failed to create image from root volume[uuid:%s] on backup storage[uuid:%s], because %s", msg.getRootVolumeUuid(), bs.getUuid(), r.getError())); err = r.getError(); continue; } CreateTemplateFromVmRootVolumeReply reply = (CreateTemplateFromVmRootVolumeReply) r; ImageBackupStorageRefVO ref = new ImageBackupStorageRefVO(); ref.setBackupStorageUuid(bs.getUuid()); ref.setStatus(ImageStatus.Ready); ref.setImageUuid(imageVO.getUuid()); ref.setInstallPath(reply.getInstallPath()); dbf.persist(ref); imageVO.setStatus(ImageStatus.Ready); if (reply.getFormat() != null) { imageVO.setFormat(reply.getFormat()); } dbf.update(imageVO); imageVO = dbf.reload(imageVO); success = true; logger.debug(String.format("successfully created image[uuid:%s] from root volume[uuid:%s] on backup storage[uuid:%s]", imageVO.getUuid(), msg.getRootVolumeUuid(), bs.getUuid())); } if (success) { trigger.next(); } else { trigger.fail(operr("failed to create image from root volume[uuid:%s] on all backup storage, see cause for one of errors", msg.getRootVolumeUuid()).causedBy(err)); } } }); } }); flow(new Flow() { String __name__ = "copy-system-tag-to-image"; public void run(FlowTrigger trigger, Map data) { // find the rootimage and create some systemtag if it has SimpleQuery<VolumeVO> q = dbf.createQuery(VolumeVO.class); q.add(VolumeVO_.uuid, SimpleQuery.Op.EQ, msg.getRootVolumeUuid()); q.select(VolumeVO_.vmInstanceUuid); String vmInstanceUuid = q.findValue(); if (tagMgr.hasSystemTag(vmInstanceUuid, ImageSystemTags.IMAGE_INJECT_QEMUGA.getTagFormat())) { tagMgr.createNonInherentSystemTag(imageVO.getUuid(), ImageSystemTags.IMAGE_INJECT_QEMUGA.getTagFormat(), ImageVO.class.getSimpleName()); } trigger.next(); } @Override public void rollback(FlowRollback trigger, Map data) { trigger.rollback(); } }); done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { APICreateRootVolumeTemplateFromRootVolumeEvent evt = new APICreateRootVolumeTemplateFromRootVolumeEvent(msg.getId()); imageVO = dbf.reload(imageVO); ImageInventory iinv = ImageInventory.valueOf(imageVO); evt.setInventory(iinv); logger.warn(String.format("successfully create template[uuid:%s] from root volume[uuid:%s]", iinv.getUuid(), msg.getRootVolumeUuid())); bus.publish(evt); } }); error(new FlowErrorHandler(msg) { @Override public void handle(ErrorCode errCode, Map data) { APICreateRootVolumeTemplateFromRootVolumeEvent evt = new APICreateRootVolumeTemplateFromRootVolumeEvent(msg.getId()); evt.setError(errCode); logger.warn(String.format("failed to create template from root volume[uuid:%s], because %s", msg.getRootVolumeUuid(), errCode)); bus.publish(evt); } }); } }).start(); } private void handle(APIGetImageMsg msg) { SearchQuery<ImageInventory> sq = new SearchQuery(ImageInventory.class); sq.addAccountAsAnd(msg); sq.add("uuid", SearchOp.AND_EQ, msg.getUuid()); List<ImageInventory> invs = sq.list(); APIGetImageReply reply = new APIGetImageReply(); if (!invs.isEmpty()) { reply.setInventory(JSONObjectUtil.toJsonString(invs.get(0))); } bus.reply(msg, reply); } private void handle(APISearchImageMsg msg) { SearchQuery<ImageInventory> sq = SearchQuery.create(msg, ImageInventory.class); sq.addAccountAsAnd(msg); String content = sq.listAsString(); APISearchImageReply reply = new APISearchImageReply(); reply.setContent(content); bus.reply(msg, reply); } private void handle(APIListImageMsg msg) { List<ImageVO> vos = dbf.listAll(ImageVO.class); List<ImageInventory> invs = ImageInventory.valueOf(vos); APIListImageReply reply = new APIListImageReply(); reply.setInventories(invs); bus.reply(msg, reply); } @Deferred private void handle(final APIAddImageMsg msg) { String imageType = msg.getType(); imageType = imageType == null ? DefaultImageFactory.type.toString() : imageType; final APIAddImageEvent evt = new APIAddImageEvent(msg.getId()); ImageVO vo = new ImageVO(); if (msg.getResourceUuid() != null) { vo.setUuid(msg.getResourceUuid()); } else { vo.setUuid(Platform.getUuid()); } vo.setName(msg.getName()); vo.setDescription(msg.getDescription()); if (msg.getFormat().equals(ImageConstant.ISO_FORMAT_STRING)) { vo.setMediaType(ImageMediaType.ISO); } else { vo.setMediaType(ImageMediaType.valueOf(msg.getMediaType())); } vo.setType(imageType); vo.setSystem(msg.isSystem()); vo.setGuestOsType(msg.getGuestOsType()); vo.setFormat(msg.getFormat()); vo.setStatus(ImageStatus.Downloading); vo.setState(ImageState.Enabled); vo.setUrl(msg.getUrl()); vo.setDescription(msg.getDescription()); vo.setPlatform(ImagePlatform.valueOf(msg.getPlatform())); ImageFactory factory = getImageFacotry(ImageType.valueOf(imageType)); final ImageVO ivo = new SQLBatchWithReturn<ImageVO>() { @Override protected ImageVO scripts() { final ImageVO ivo = factory.createImage(vo, msg); acntMgr.createAccountResourceRef(msg.getSession().getAccountUuid(), vo.getUuid(), ImageVO.class); tagMgr.createTagsFromAPICreateMessage(msg, vo.getUuid(), ImageVO.class.getSimpleName()); return ivo; } }.execute(); Defer.guard(() -> dbf.remove(ivo)); final ImageInventory inv = ImageInventory.valueOf(ivo); for (AddImageExtensionPoint ext : pluginRgty.getExtensionList(AddImageExtensionPoint.class)) { ext.preAddImage(inv); } final List<DownloadImageMsg> dmsgs = CollectionUtils.transformToList(msg.getBackupStorageUuids(), new Function<DownloadImageMsg, String>() { @Override public DownloadImageMsg call(String arg) { DownloadImageMsg dmsg = new DownloadImageMsg(inv); dmsg.setBackupStorageUuid(arg); dmsg.setFormat(msg.getFormat()); dmsg.setSystemTags(msg.getSystemTags()); bus.makeTargetServiceIdByResourceUuid(dmsg, BackupStorageConstant.SERVICE_ID, arg); return dmsg; } }); CollectionUtils.safeForEach(pluginRgty.getExtensionList(AddImageExtensionPoint.class), new ForEachFunction<AddImageExtensionPoint>() { @Override public void run(AddImageExtensionPoint ext) { ext.beforeAddImage(inv); } }); new LoopAsyncBatch<DownloadImageMsg>(msg) { AtomicBoolean success = new AtomicBoolean(false); @Override protected Collection<DownloadImageMsg> collect() { return dmsgs; } @Override protected AsyncBatchRunner forEach(DownloadImageMsg dmsg) { return new AsyncBatchRunner() { @Override public void run(NoErrorCompletion completion) { ImageBackupStorageRefVO ref = new ImageBackupStorageRefVO(); ref.setImageUuid(ivo.getUuid()); ref.setInstallPath(""); ref.setBackupStorageUuid(dmsg.getBackupStorageUuid()); ref.setStatus(ImageStatus.Downloading); dbf.persist(ref); bus.send(dmsg, new CloudBusCallBack(completion) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { errors.add(reply.getError()); dbf.remove(ref); } else { DownloadImageReply re = reply.castReply(); ref.setStatus(ImageStatus.Ready); ref.setInstallPath(re.getInstallPath()); if (dbf.reload(ref) == null) { logger.debug(String.format("image[uuid: %s] has been deleted", ref.getImageUuid())); completion.done(); return; } dbf.update(ref); if (success.compareAndSet(false, true)) { // In case 'Platform' etc. is changed. ImageVO vo = dbf.reload(ivo); vo.setMd5Sum(re.getMd5sum()); vo.setSize(re.getSize()); vo.setActualSize(re.getActualSize()); vo.setStatus(ImageStatus.Ready); dbf.update(vo); } logger.debug(String.format("successfully downloaded image[uuid:%s, name:%s] to backup storage[uuid:%s]", inv.getUuid(), inv.getName(), dmsg.getBackupStorageUuid())); } completion.done(); } }); } }; } @Override protected void done() { // check if the database still has the record of the image // if there is no record, that means user delete the image during the downloading, // then we need to cleanup ImageVO vo = dbf.reload(ivo); if (vo == null) { evt.setError(operr("image [uuid:%s] has been deleted", ivo.getUuid())); SQL.New("delete from ImageBackupStorageRefVO where imageUuid = :uuid") .param("uuid", ivo.getUuid()) .execute(); bus.publish(evt); return; } if (success.get()) { final ImageInventory einv = ImageInventory.valueOf(vo); CollectionUtils.safeForEach(pluginRgty.getExtensionList(AddImageExtensionPoint.class), new ForEachFunction<AddImageExtensionPoint>() { @Override public void run(AddImageExtensionPoint ext) { ext.afterAddImage(einv); } }); evt.setInventory(einv); } else { final ErrorCode err = errf.instantiateErrorCode(SysErrors.CREATE_RESOURCE_ERROR, String.format("Failed to download image[name:%s] on all backup storage%s.", inv.getName(), msg.getBackupStorageUuids()), errors); CollectionUtils.safeForEach(pluginRgty.getExtensionList(AddImageExtensionPoint.class), new ForEachFunction<AddImageExtensionPoint>() { @Override public void run(AddImageExtensionPoint ext) { ext.failedToAddImage(inv, err); } }); dbf.remove(ivo); evt.setError(err); } bus.publish(evt); } }.start(); } @Override public String getId() { return bus.makeLocalServiceId(ImageConstant.SERVICE_ID); } private void populateExtensions() { for (ImageFactory f : pluginRgty.getExtensionList(ImageFactory.class)) { ImageFactory old = imageFactories.get(f.getType().toString()); if (old != null) { throw new CloudRuntimeException(String.format("duplicate ImageFactory[%s, %s] for type[%s]", f.getClass().getName(), old.getClass().getName(), f.getType())); } imageFactories.put(f.getType().toString(), f); } } @Override public boolean start() { populateExtensions(); installGlobalConfigUpdater(); return true; } private void installGlobalConfigUpdater() { ImageGlobalConfig.DELETION_POLICY.installUpdateExtension(new GlobalConfigUpdateExtensionPoint() { @Override public void updateGlobalConfig(GlobalConfig oldConfig, GlobalConfig newConfig) { startExpungeTask(); } }); ImageGlobalConfig.EXPUNGE_INTERVAL.installUpdateExtension(new GlobalConfigUpdateExtensionPoint() { @Override public void updateGlobalConfig(GlobalConfig oldConfig, GlobalConfig newConfig) { startExpungeTask(); } }); ImageGlobalConfig.EXPUNGE_PERIOD.installUpdateExtension(new GlobalConfigUpdateExtensionPoint() { @Override public void updateGlobalConfig(GlobalConfig oldConfig, GlobalConfig newConfig) { startExpungeTask(); } }); } private void startExpungeTask() { if (expungeTask != null) { expungeTask.cancel(true); } expungeTask = thdf.submitCancelablePeriodicTask(new CancelablePeriodicTask() { private List<Tuple> getDeletedImageManagedByUs() { int qun = 1000; SimpleQuery q = dbf.createQuery(ImageBackupStorageRefVO.class); q.add(ImageBackupStorageRefVO_.status, Op.EQ, ImageStatus.Deleted); long amount = q.count(); int times = (int) (amount / qun) + (amount % qun != 0 ? 1 : 0); int start = 0; List<Tuple> ret = new ArrayList<Tuple>(); for (int i = 0; i < times; i++) { q = dbf.createQuery(ImageBackupStorageRefVO.class); q.select(ImageBackupStorageRefVO_.imageUuid, ImageBackupStorageRefVO_.lastOpDate, ImageBackupStorageRefVO_.backupStorageUuid); q.add(ImageBackupStorageRefVO_.status, Op.EQ, ImageStatus.Deleted); q.setLimit(qun); q.setStart(start); List<Tuple> ts = q.listTuple(); start += qun; for (Tuple t : ts) { String imageUuid = t.get(0, String.class); if (!destMaker.isManagedByUs(imageUuid)) { continue; } ret.add(t); } } return ret; } @Override public boolean run() { final List<Tuple> images = getDeletedImageManagedByUs(); if (images.isEmpty()) { logger.debug("[Image Expunge Task]: no images to expunge"); return false; } for (Tuple t : images) { String imageUuid = t.get(0, String.class); Timestamp date = t.get(1, Timestamp.class); String bsUuid = t.get(2, String.class); final Timestamp current = dbf.getCurrentSqlTime(); if (current.getTime() >= date.getTime() + TimeUnit.SECONDS.toMillis(ImageGlobalConfig.EXPUNGE_PERIOD.value(Long.class))) { ImageDeletionPolicy deletionPolicy = deletionPolicyMgr.getDeletionPolicy(imageUuid); if (ImageDeletionPolicy.Never == deletionPolicy) { logger.debug(String.format("the deletion policy[Never] is set for the image[uuid:%s] on the backup storage[uuid:%s]," + "don't expunge it", images, bsUuid)); continue; } ExpungeImageMsg msg = new ExpungeImageMsg(); msg.setImageUuid(imageUuid); msg.setBackupStorageUuid(bsUuid); bus.makeTargetServiceIdByResourceUuid(msg, ImageConstant.SERVICE_ID, imageUuid); bus.send(msg, new CloudBusCallBack(null) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { N.New(ImageVO.class, imageUuid).warn_("failed to expunge the image[uuid:%s] on the backup storage[uuid:%s], will try it later. %s", imageUuid, bsUuid, reply.getError()); } } }); } } return false; } @Override public TimeUnit getTimeUnit() { return TimeUnit.SECONDS; } @Override public long getInterval() { return ImageGlobalConfig.EXPUNGE_INTERVAL.value(Long.class); } @Override public String getName() { return "expunge-image"; } }); } @Override public boolean stop() { return true; } private ImageFactory getImageFacotry(ImageType type) { ImageFactory factory = imageFactories.get(type.toString()); if (factory == null) { throw new CloudRuntimeException(String.format("Unable to find ImageFactory with type[%s]", type)); } return factory; } @Override public void managementNodeReady() { startExpungeTask(); } @Override public List<Quota> reportQuota() { Quota.QuotaOperator checker = new Quota.QuotaOperator() { @Override public void checkQuota(APIMessage msg, Map<String, Quota.QuotaPair> pairs) { if (!new QuotaUtil().isAdminAccount(msg.getSession().getAccountUuid())) { if (msg instanceof APIAddImageMsg) { check((APIAddImageMsg) msg, pairs); } else if (msg instanceof APIRecoverImageMsg) { check((APIRecoverImageMsg) msg, pairs); } else if (msg instanceof APIChangeResourceOwnerMsg) { check((APIChangeResourceOwnerMsg) msg, pairs); } } else { if (msg instanceof APIChangeResourceOwnerMsg) { check((APIChangeResourceOwnerMsg) msg, pairs); } } } @Override public void checkQuota(NeedQuotaCheckMessage msg, Map<String, Quota.QuotaPair> pairs) { } @Override public List<Quota.QuotaUsage> getQuotaUsageByAccount(String accountUuid) { List<Quota.QuotaUsage> usages = new ArrayList<>(); ImageQuotaUtil.ImageQuota imageQuota = new ImageQuotaUtil().getUsed(accountUuid); Quota.QuotaUsage usage = new Quota.QuotaUsage(); usage.setName(ImageConstant.QUOTA_IMAGE_NUM); usage.setUsed(imageQuota.imageNum); usages.add(usage); usage = new Quota.QuotaUsage(); usage.setName(ImageConstant.QUOTA_IMAGE_SIZE); usage.setUsed(imageQuota.imageSize); usages.add(usage); return usages; } @Transactional(readOnly = true) private void check(APIChangeResourceOwnerMsg msg, Map<String, Quota.QuotaPair> pairs) { String currentAccountUuid = msg.getSession().getAccountUuid(); String resourceTargetOwnerAccountUuid = msg.getAccountUuid(); if (new QuotaUtil().isAdminAccount(resourceTargetOwnerAccountUuid)) { return; } SimpleQuery<AccountResourceRefVO> q = dbf.createQuery(AccountResourceRefVO.class); q.add(AccountResourceRefVO_.resourceUuid, Op.EQ, msg.getResourceUuid()); AccountResourceRefVO accResRefVO = q.find(); if (accResRefVO.getResourceType().equals(ImageVO.class.getSimpleName())) { long imageNumQuota = pairs.get(ImageConstant.QUOTA_IMAGE_NUM).getValue(); long imageSizeQuota = pairs.get(ImageConstant.QUOTA_IMAGE_SIZE).getValue(); long imageNumUsed = new ImageQuotaUtil().getUsedImageNum(resourceTargetOwnerAccountUuid); long imageSizeUsed = new ImageQuotaUtil().getUsedImageSize(resourceTargetOwnerAccountUuid); ImageVO image = dbf.getEntityManager().find(ImageVO.class, msg.getResourceUuid()); long imageNumAsked = 1; long imageSizeAsked = image.getSize(); QuotaUtil.QuotaCompareInfo quotaCompareInfo; { quotaCompareInfo = new QuotaUtil.QuotaCompareInfo(); quotaCompareInfo.currentAccountUuid = currentAccountUuid; quotaCompareInfo.resourceTargetOwnerAccountUuid = resourceTargetOwnerAccountUuid; quotaCompareInfo.quotaName = ImageConstant.QUOTA_IMAGE_NUM; quotaCompareInfo.quotaValue = imageNumQuota; quotaCompareInfo.currentUsed = imageNumUsed; quotaCompareInfo.request = imageNumAsked; new QuotaUtil().CheckQuota(quotaCompareInfo); } { quotaCompareInfo = new QuotaUtil.QuotaCompareInfo(); quotaCompareInfo.currentAccountUuid = currentAccountUuid; quotaCompareInfo.resourceTargetOwnerAccountUuid = resourceTargetOwnerAccountUuid; quotaCompareInfo.quotaName = ImageConstant.QUOTA_IMAGE_SIZE; quotaCompareInfo.quotaValue = imageSizeQuota; quotaCompareInfo.currentUsed = imageSizeUsed; quotaCompareInfo.request = imageSizeAsked; new QuotaUtil().CheckQuota(quotaCompareInfo); } } } @Transactional(readOnly = true) private void check(APIRecoverImageMsg msg, Map<String, Quota.QuotaPair> pairs) { String currentAccountUuid = msg.getSession().getAccountUuid(); String resourceTargetOwnerAccountUuid = new QuotaUtil().getResourceOwnerAccountUuid(msg.getImageUuid()); long imageNumQuota = pairs.get(ImageConstant.QUOTA_IMAGE_NUM).getValue(); long imageSizeQuota = pairs.get(ImageConstant.QUOTA_IMAGE_SIZE).getValue(); long imageNumUsed = new ImageQuotaUtil().getUsedImageNum(resourceTargetOwnerAccountUuid); long imageSizeUsed = new ImageQuotaUtil().getUsedImageSize(resourceTargetOwnerAccountUuid); ImageVO image = dbf.getEntityManager().find(ImageVO.class, msg.getImageUuid()); long imageNumAsked = 1; long imageSizeAsked = image.getSize(); QuotaUtil.QuotaCompareInfo quotaCompareInfo; { quotaCompareInfo = new QuotaUtil.QuotaCompareInfo(); quotaCompareInfo.currentAccountUuid = currentAccountUuid; quotaCompareInfo.resourceTargetOwnerAccountUuid = resourceTargetOwnerAccountUuid; quotaCompareInfo.quotaName = ImageConstant.QUOTA_IMAGE_NUM; quotaCompareInfo.quotaValue = imageNumQuota; quotaCompareInfo.currentUsed = imageNumUsed; quotaCompareInfo.request = imageNumAsked; new QuotaUtil().CheckQuota(quotaCompareInfo); } { quotaCompareInfo = new QuotaUtil.QuotaCompareInfo(); quotaCompareInfo.currentAccountUuid = currentAccountUuid; quotaCompareInfo.resourceTargetOwnerAccountUuid = resourceTargetOwnerAccountUuid; quotaCompareInfo.quotaName = ImageConstant.QUOTA_IMAGE_SIZE; quotaCompareInfo.quotaValue = imageSizeQuota; quotaCompareInfo.currentUsed = imageSizeUsed; quotaCompareInfo.request = imageSizeAsked; new QuotaUtil().CheckQuota(quotaCompareInfo); } } @Transactional(readOnly = true) private void check(APIAddImageMsg msg, Map<String, Quota.QuotaPair> pairs) { String currentAccountUuid = msg.getSession().getAccountUuid(); String resourceTargetOwnerAccountUuid = msg.getSession().getAccountUuid(); long imageNumQuota = pairs.get(ImageConstant.QUOTA_IMAGE_NUM).getValue(); long imageNumUsed = new ImageQuotaUtil().getUsedImageNum(resourceTargetOwnerAccountUuid); long imageNumAsked = 1; QuotaUtil.QuotaCompareInfo quotaCompareInfo; { quotaCompareInfo = new QuotaUtil.QuotaCompareInfo(); quotaCompareInfo.currentAccountUuid = currentAccountUuid; quotaCompareInfo.resourceTargetOwnerAccountUuid = resourceTargetOwnerAccountUuid; quotaCompareInfo.quotaName = ImageConstant.QUOTA_IMAGE_NUM; quotaCompareInfo.quotaValue = imageNumQuota; quotaCompareInfo.currentUsed = imageNumUsed; quotaCompareInfo.request = imageNumAsked; new QuotaUtil().CheckQuota(quotaCompareInfo); } new ImageQuotaUtil().checkImageSizeQuotaUseHttpHead(msg, pairs); } }; Quota quota = new Quota(); quota.setOperator(checker); quota.addMessageNeedValidation(APIAddImageMsg.class); quota.addMessageNeedValidation(APIRecoverImageMsg.class); quota.addMessageNeedValidation(APIChangeResourceOwnerMsg.class); Quota.QuotaPair p = new Quota.QuotaPair(); p.setName(ImageConstant.QUOTA_IMAGE_NUM); p.setValue(20); quota.addPair(p); p = new Quota.QuotaPair(); p.setName(ImageConstant.QUOTA_IMAGE_SIZE); p.setValue(SizeUnit.TERABYTE.toByte(10)); quota.addPair(p); return list(quota); } @Override @Transactional(readOnly = true) public void resourceOwnerPreChange(AccountResourceRefInventory ref, String newOwnerUuid) { } }
/* * Copyright 2015 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.services.client.api.command; import static org.kie.services.client.api.command.InternalJmsCommandHelper.internalExecuteJmsCommand; import static org.kie.services.shared.ServicesVersion.VERSION; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map.Entry; import java.util.Set; import javax.jms.Queue; import javax.ws.rs.core.MediaType; import org.jsoup.Jsoup; import org.jsoup.nodes.Document; import org.kie.api.command.Command; import org.kie.api.task.model.Task; import org.kie.internal.command.ProcessInstanceIdCommand; import org.kie.internal.jaxb.CorrelationKeyXmlAdapter; import org.kie.internal.jaxb.StringKeyObjectValueMap; import org.kie.internal.process.CorrelationKey; import org.kie.remote.client.api.exception.MissingRequiredInfoException; import org.kie.remote.client.api.exception.RemoteApiException; import org.kie.remote.client.api.exception.RemoteCommunicationException; import org.kie.remote.client.api.exception.RemoteTaskException; import org.kie.remote.client.jaxb.AcceptedClientCommands; import org.kie.remote.client.jaxb.JaxbCommandsRequest; import org.kie.remote.client.jaxb.JaxbCommandsResponse; import org.kie.remote.common.rest.KieRemoteHttpRequest; import org.kie.remote.common.rest.KieRemoteHttpResponse; import org.kie.remote.jaxb.gen.AddContentCommand; import org.kie.remote.jaxb.gen.AddTaskCommand; import org.kie.remote.jaxb.gen.AuditCommand; import org.kie.remote.jaxb.gen.CompleteTaskCommand; import org.kie.remote.jaxb.gen.CompleteWorkItemCommand; import org.kie.remote.jaxb.gen.Content; import org.kie.remote.jaxb.gen.ExecuteTaskRulesCommand; import org.kie.remote.jaxb.gen.FailTaskCommand; import org.kie.remote.jaxb.gen.InsertObjectCommand; import org.kie.remote.jaxb.gen.JaxbStringObjectPairArray; import org.kie.remote.jaxb.gen.ProcessSubTaskCommand; import org.kie.remote.jaxb.gen.SetGlobalCommand; import org.kie.remote.jaxb.gen.SetProcessInstanceVariablesCommand; import org.kie.remote.jaxb.gen.SetTaskPropertyCommand; import org.kie.remote.jaxb.gen.SignalEventCommand; import org.kie.remote.jaxb.gen.StartCorrelatedProcessCommand; import org.kie.remote.jaxb.gen.StartProcessCommand; import org.kie.remote.jaxb.gen.TaskCommand; import org.kie.remote.jaxb.gen.UpdateCommand; import org.kie.remote.jaxb.gen.AddContentFromUserCommand; import org.kie.remote.jaxb.gen.util.JaxbStringObjectPair; import org.kie.services.client.serialization.JaxbSerializationProvider; import org.kie.services.client.serialization.SerializationException; import org.kie.services.client.serialization.SerializationProvider; import org.kie.services.client.serialization.jaxb.impl.JaxbCommandResponse; import org.kie.services.client.serialization.jaxb.impl.JaxbRestRequestException; import org.kie.services.client.serialization.jaxb.rest.JaxbExceptionResponse; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class contains the logic to interact with the REST or JMS api's. It is the basis for all of the remote interface instances. */ public abstract class AbstractRemoteCommandObject { protected static final Logger logger = LoggerFactory.getLogger(AbstractRemoteCommandObject.class); protected final RemoteConfiguration config; protected boolean isTaskService = false; protected AbstractRemoteCommandObject(RemoteConfiguration config) { this.config = config; if( config.isJms() && config.getResponseQueue() == null ) { throw new MissingRequiredInfoException("A Response queue is necessary in order to create a Remote JMS Client instance."); } this.config.initializeJaxbSerializationProvider(); } RemoteConfiguration getConfig() { return config; } // Client object helper methods ----------------------------------------------------------------------------------------------- protected String convertCorrelationKeyToString(CorrelationKey correlationKey) { try { return CorrelationKeyXmlAdapter.marshalCorrelationKey(correlationKey); } catch( Exception e ) { throw new RemoteApiException("Unable to marshal correlation key to a string value", e); } } // Compatibility methods ----------------------------------------------------------------------------------------------------- public void readExternal( ObjectInput arg0 ) throws IOException, ClassNotFoundException { String methodName = (new Throwable()).getStackTrace()[0].getMethodName(); throw new UnsupportedOperationException(methodName + " is not supported on the JAXB " + Task.class.getSimpleName() + " implementation."); } public void writeExternal( ObjectOutput arg0 ) throws IOException { String methodName = (new Throwable()).getStackTrace()[0].getMethodName(); throw new UnsupportedOperationException(methodName + " is not supported on the JAXB " + Task.class.getSimpleName() + " implementation."); } // Execute methods ----------------------------------------------------------------------------------------------------- protected <T> T executeCommand( Command cmd ) { if( AcceptedClientCommands.isSendObjectParameterCommandClass(cmd.getClass()) ) { List<Object> extraClassInstanceList = new ArrayList<Object>(); preprocessParameterCommand(cmd, extraClassInstanceList); if( !extraClassInstanceList.isEmpty() ) { Set<Class<?>> extraJaxbClasses = new HashSet<Class<?>>(); for( Object jaxbObject : extraClassInstanceList ) { Class<?> jaxbClass = jaxbObject.getClass(); if( jaxbClass.isLocalClass() || jaxbClass.isAnonymousClass() ) { throw new SerializationException( "Only proper classes are allowed as parameters for the remote API: neither local nor anonymous classes are accepted: " + jaxbClass.getName()); } extraJaxbClasses.add(jaxbClass); } if( config.addJaxbClasses(extraJaxbClasses) ) { for( Class<?> extraClass : extraJaxbClasses ) { logger.debug( "Adding {} to the JAXBContext instance in this client instance.", extraClass.getName() ); } config.initializeJaxbSerializationProvider(); } } } preprocessCommand(cmd); if( config.isRest() ) { return executeRestCommand(cmd); } else { return executeJmsCommand(cmd); } } void preprocessCommand( Command cmd ) { String cmdName = cmd.getClass().getSimpleName(); if( cmd instanceof TaskCommand && cmdName.startsWith("GetTask") ) { TaskCommand taskCmd = (TaskCommand) cmd; String cmdUserId = taskCmd.getUserId(); String authUserId = config.getUserName(); if( cmdUserId == null ) { taskCmd.setUserId(authUserId); logger.debug("Using user id '" + authUserId + "' for '" + cmdName + "'."); } else if( ! cmdUserId.equals(authUserId) ) { throw new RemoteApiException("The user id used when retrieving task information (" + cmdUserId + ")" + " must match the authenticating user (" + authUserId + ")!"); } } } void preprocessParameterCommand( Object cmdObj, List<Object> extraClassInstanceList ) { if( cmdObj instanceof CompleteWorkItemCommand ) { addPossiblyNullObject(((CompleteWorkItemCommand) cmdObj).getResult(), extraClassInstanceList); } else if( cmdObj instanceof SignalEventCommand ) { addPossiblyNullObject(((SignalEventCommand) cmdObj).getEvent(), extraClassInstanceList); } else if( cmdObj instanceof StartCorrelatedProcessCommand ) { StartCorrelatedProcessCommand cmd = (StartCorrelatedProcessCommand) cmdObj; if( cmd.getData() != null ) { addPossiblyNullObject(cmd.getData().getDatas(), extraClassInstanceList); } addPossiblyNullObject(cmd.getParameter(), extraClassInstanceList); } else if( cmdObj instanceof StartProcessCommand ) { StartProcessCommand startProcCmd = (StartProcessCommand) cmdObj; if( startProcCmd.getData() != null ) { addPossiblyNullObject(startProcCmd.getData().getDatas(), extraClassInstanceList); } addPossiblyNullObject(((StartProcessCommand) cmdObj).getParameter(), extraClassInstanceList); } else if( cmdObj instanceof SetGlobalCommand ) { addPossiblyNullObject(((SetGlobalCommand) cmdObj).getObject(), extraClassInstanceList); } else if( cmdObj instanceof InsertObjectCommand ) { addPossiblyNullObject(((InsertObjectCommand) cmdObj).getObject(), extraClassInstanceList); } else if( cmdObj instanceof UpdateCommand ) { addPossiblyNullObject(((UpdateCommand) cmdObj).getObject(), extraClassInstanceList); } else if( cmdObj instanceof AddTaskCommand ) { addPossiblyNullObject(((AddTaskCommand) cmdObj).getParameter(), extraClassInstanceList); } else if( cmdObj instanceof CompleteTaskCommand ) { addPossiblyNullObject(((CompleteTaskCommand) cmdObj).getData(), extraClassInstanceList); } else if( cmdObj instanceof FailTaskCommand ) { addPossiblyNullObject(((FailTaskCommand) cmdObj).getData(), extraClassInstanceList); } else if( cmdObj instanceof AddContentFromUserCommand ) { addPossiblyNullObject(((AddContentFromUserCommand) cmdObj).getOutputContentMap(), extraClassInstanceList); } else if( cmdObj instanceof AddContentCommand ) { AddContentCommand cmd = (AddContentCommand) cmdObj; addPossiblyNullObject(cmd.getParameter(), extraClassInstanceList); Content jaxbContent = cmd.getJaxbContent(); if( jaxbContent != null ) { addPossiblyNullObject(jaxbContent.getContentMap(), extraClassInstanceList); } } else if( cmdObj instanceof SetTaskPropertyCommand ) { addPossiblyNullObject(((SetTaskPropertyCommand) cmdObj).getOutput(), extraClassInstanceList); } else if( cmdObj instanceof ExecuteTaskRulesCommand ) { addPossiblyNullObject(((ExecuteTaskRulesCommand) cmdObj).getData(), extraClassInstanceList); } else if( cmdObj instanceof ProcessSubTaskCommand ) { addPossiblyNullObject(((ProcessSubTaskCommand) cmdObj).getData(), extraClassInstanceList); } else if( cmdObj instanceof SetProcessInstanceVariablesCommand ) { addPossiblyNullObject(((SetProcessInstanceVariablesCommand) cmdObj).getVariables(), extraClassInstanceList); } } void addPossiblyNullObject( Object inputObject, List<Object> objectList ) { if( inputObject != null ) { if( inputObject instanceof List ) { objectList.addAll((List) inputObject); } else if( inputObject instanceof JaxbStringObjectPairArray ) { for( JaxbStringObjectPair stringObjectPair : ((JaxbStringObjectPairArray) inputObject).getItems() ) { objectList.add(stringObjectPair.getValue()); } } else if( inputObject instanceof StringKeyObjectValueMap ) { for( Object obj : ((StringKeyObjectValueMap) inputObject).values() ) { objectList.add(obj); } } else { objectList.add(inputObject); } } } static JaxbCommandsRequest prepareCommandRequest( Command command, String userName, String deploymentId, Long processInstanceId, Collection<String> correlationKeyProps) { if( deploymentId == null && !(command instanceof TaskCommand || command instanceof AuditCommand) ) { throw new MissingRequiredInfoException("A deployment id is required when sending commands involving the KieSession."); } JaxbCommandsRequest req; if( command instanceof AuditCommand ) { req = new JaxbCommandsRequest(command); } else { req = new JaxbCommandsRequest(deploymentId, command); } if( command instanceof TaskCommand ) { TaskCommand taskCmd = (TaskCommand) command; if( taskCmd.getUserId() == null ) { taskCmd.setUserId(userName); } } if( processInstanceId != null ) { if (command instanceof ProcessInstanceIdCommand) { processInstanceId = ((ProcessInstanceIdCommand) command).getProcessInstanceId(); } } if( correlationKeyProps != null && ! correlationKeyProps.isEmpty() ) { StringBuffer correlationKeyString = new StringBuffer(); Iterator<String> iter = correlationKeyProps.iterator(); correlationKeyString.append(iter.next()); while( iter.hasNext() ) { correlationKeyString.append(":").append(iter.next()); } req.setCorrelationKeyString(correlationKeyString.toString()); } req.setProcessInstanceId(processInstanceId); req.setUser(userName); req.setVersion(VERSION); return req; } /** * Method to communicate with the backend via JMS. * * @param command The {@link Command} object to be executed. * @return The result of the {@link Command} object execution. */ private <T> T executeJmsCommand( Command command ) { Queue sendQueue; boolean isTaskCommand = (command instanceof TaskCommand); if( isTaskCommand ) { sendQueue = config.getTaskQueue(); if( ! config.getUseUssl() && ! config.getDisableTaskSecurity() ) { throw new RemoteCommunicationException("Task operation requests can only be sent via JMS if SSL is used."); } } else { sendQueue = config.getKsessionQueue(); } return internalExecuteJmsCommand(command, config.getConnectionUserName(), config.getConnectionPassword(), config.getUserName(), config.getPassword(), config.getDeploymentId(), config.getProcessInstanceId(), config.getCorrelationProperties(), config.getConnectionFactory(), sendQueue, config.getResponseQueue(), (SerializationProvider) config.getJaxbSerializationProvider(), config.getExtraJaxbClasses(), config.getSerializationType(), config.getTimeout()); } /** * Method to communicate with the backend via REST. * * @param command The {@link Command} object to be executed. * @return The result of the {@link Command} object execution. */ private <T> T executeRestCommand( Command command ) { String cmdName = command.getClass().getSimpleName(); JaxbCommandsRequest jaxbRequest = prepareCommandRequest( command, config.getUserName(), config.getDeploymentId(), config.getProcessInstanceId(), config.getCorrelationProperties()); KieRemoteHttpRequest httpRequest = config.createHttpRequest().relativeRequest("/execute"); // necessary for deserialization String deploymentId = config.getDeploymentId(); if( ! emptyDeploymentId(deploymentId) ) { httpRequest.header(JaxbSerializationProvider.EXECUTE_DEPLOYMENT_ID_HEADER, deploymentId); } String jaxbRequestString = config.getJaxbSerializationProvider().serialize(jaxbRequest); if( logger.isTraceEnabled() ) { try { logger.trace("Sending {} via POST to {}", command.getClass().getSimpleName(), httpRequest.getUri()); } catch( Exception e ) { // do nothing because this should never happen.. } logger.trace("Serialized JaxbCommandsRequest:\n {}", jaxbRequestString); } KieRemoteHttpResponse httpResponse = null; try { logger.debug("Sending POST request with " + command.getClass().getSimpleName() + " to " + httpRequest.getUri()); httpRequest.contentType(MediaType.APPLICATION_XML); httpRequest.accept(MediaType.APPLICATION_XML); httpRequest.body(jaxbRequestString); httpRequest.post(); httpResponse = httpRequest.response(); } catch( Exception e ) { httpRequest.disconnect(); throw new RemoteCommunicationException("Unable to send HTTP POST request", e); } // Get response boolean htmlException = false; JaxbExceptionResponse exceptionResponse = null; JaxbCommandsResponse cmdResponse = null; int responseStatus; try { responseStatus = httpResponse.code(); String content = httpResponse.body(); if( responseStatus < 300 ) { cmdResponse = deserializeResponseContent(content, JaxbCommandsResponse.class); // check version String version = cmdResponse.getVersion(); if( version == null ) { version = "pre-6.0.3"; } if( !version.equals(VERSION) ) { logger.info("Response received from server version [{}] while client is version [{}]! This may cause problems.", version, VERSION); } } else { String contentType = httpResponse.contentType(); if( contentType.equals(MediaType.APPLICATION_XML) ) { Object response = deserializeResponseContent(content, JaxbExceptionResponse.class); if( response instanceof JaxbRestRequestException ) { JaxbRestRequestException exception = (JaxbRestRequestException) response; exceptionResponse = new JaxbExceptionResponse( exception.getUrl(), exception.getCause(), exception.getStatus()); exceptionResponse.setCommandName(cmdName); exceptionResponse.setIndex(0); exceptionResponse.setMessage(exception.getMessage()); } else if( response instanceof JaxbExceptionResponse ) { exceptionResponse = (JaxbExceptionResponse) response; } } else if( contentType.startsWith(MediaType.TEXT_HTML) ) { htmlException = true; exceptionResponse = new JaxbExceptionResponse(); Document doc = Jsoup.parse(content); String body = doc.body().text(); exceptionResponse.setMessage(body); exceptionResponse.setUrl(httpRequest.getUri().toString()); exceptionResponse.setStackTrace(""); } else { throw new RemoteCommunicationException("Unable to deserialize response with content type '" + contentType + "'"); } } } catch( Exception e ) { logger.error("Unable to retrieve response content from request with status {}: {}", e.getMessage(), e); throw new RemoteCommunicationException("Unable to retrieve content from response", e); } finally { httpRequest.disconnect(); } if( cmdResponse != null ) { List<JaxbCommandResponse<?>> responses = cmdResponse.getResponses(); if( responses.size() == 0 ) { return null; } else if( responses.size() == 1 ) { // The type information *should* come from the Command class -- but it's a jaxb-gen class, // which means that it has lost it's type information.. // TODO: fix this? JaxbCommandResponse<T> responseObject = (JaxbCommandResponse<T>) responses.get(0); if( responseObject instanceof JaxbExceptionResponse ) { exceptionResponse = (JaxbExceptionResponse) responseObject; } else { return responseObject.getResult(); } } else { throw new RemoteCommunicationException("Unexpected number of results from " + command.getClass().getSimpleName() + ":" + responses.size() + " results instead of only 1"); } } logger.error("Response with status {} returned.", responseStatus); // Process exception response switch ( responseStatus ) { case 409: throw new RemoteTaskException(exceptionResponse.getMessage(), exceptionResponse.getStackTrace()); default: if( exceptionResponse != null ) { if( ! htmlException ) { throw new RemoteApiException(exceptionResponse.getMessage(), exceptionResponse.getStackTrace()); } else { throw new RemoteCommunicationException(exceptionResponse.getMessage(), exceptionResponse.getStackTrace()); } } else { throw new RemoteCommunicationException("Unable to communicate with remote API via URL " + "'" + httpRequest.getUri().toString() + "'"); } } } private <T> T deserializeResponseContent(String responseBody, Class<T> entityClass) { JaxbSerializationProvider jaxbSerializationProvider = config.getJaxbSerializationProvider(); T responseEntity = null; try { responseEntity = (T) jaxbSerializationProvider.deserialize(responseBody); } catch( ClassCastException cce ) { throw new RemoteApiException("Unexpected entity in response body, expected " + entityClass.getName() + " instance.", cce); } return responseEntity; } // Command Object helper methods -- protected static <T> T getField( String fieldName, Class objClass, Object obj, Class<T> fieldClass ) throws Exception { Field field = objClass.getDeclaredField(fieldName); field.setAccessible(true); return (T) field.get(obj); } public static <T> T unsupported( Class<?> realClass, Class<T> returnClass ) { String methodName = (new Throwable()).getStackTrace()[1].getMethodName(); throw new UnsupportedOperationException("The " + realClass.getSimpleName() + "." + methodName + "(..) method is not supported on the Remote Client instance."); } public static boolean emptyDeploymentId(String deploymentId) { return deploymentId == null || deploymentId.trim().isEmpty(); } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2019.01.11 at 02:39:34 PM EST // package schemas.docbook; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElementRef; import javax.xml.bind.annotation.XmlElementRefs; import javax.xml.bind.annotation.XmlID; import javax.xml.bind.annotation.XmlIDREF; import javax.xml.bind.annotation.XmlMixed; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;choice maxOccurs="unbounded" minOccurs="0"> * &lt;element ref="{http://docbook.org/ns/docbook}inlinemediaobject"/> * &lt;element ref="{http://docbook.org/ns/docbook}remark"/> * &lt;element ref="{http://docbook.org/ns/docbook}superscript"/> * &lt;element ref="{http://docbook.org/ns/docbook}subscript"/> * &lt;element ref="{http://docbook.org/ns/docbook}xref"/> * &lt;element ref="{http://docbook.org/ns/docbook}link"/> * &lt;element ref="{http://docbook.org/ns/docbook}olink"/> * &lt;element ref="{http://docbook.org/ns/docbook}anchor"/> * &lt;element ref="{http://docbook.org/ns/docbook}biblioref"/> * &lt;element ref="{http://docbook.org/ns/docbook}alt"/> * &lt;element ref="{http://docbook.org/ns/docbook}annotation"/> * &lt;element ref="{http://docbook.org/ns/docbook}indexterm"/> * &lt;element ref="{http://docbook.org/ns/docbook}phrase"/> * &lt;element ref="{http://docbook.org/ns/docbook}replaceable"/> * &lt;/choice> * &lt;attGroup ref="{http://docbook.org/ns/docbook}db.common.attributes"/> * &lt;attGroup ref="{http://docbook.org/ns/docbook}db.common.linking.attributes"/> * &lt;attribute name="role" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "content" }) @XmlRootElement(name = "classname") public class Classname { @XmlElementRefs({ @XmlElementRef(name = "link", namespace = "http://docbook.org/ns/docbook", type = Link.class, required = false), @XmlElementRef(name = "phrase", namespace = "http://docbook.org/ns/docbook", type = Phrase.class, required = false), @XmlElementRef(name = "anchor", namespace = "http://docbook.org/ns/docbook", type = Anchor.class, required = false), @XmlElementRef(name = "alt", namespace = "http://docbook.org/ns/docbook", type = Alt.class, required = false), @XmlElementRef(name = "indexterm", namespace = "http://docbook.org/ns/docbook", type = Indexterm.class, required = false), @XmlElementRef(name = "remark", namespace = "http://docbook.org/ns/docbook", type = Remark.class, required = false), @XmlElementRef(name = "xref", namespace = "http://docbook.org/ns/docbook", type = Xref.class, required = false), @XmlElementRef(name = "inlinemediaobject", namespace = "http://docbook.org/ns/docbook", type = Inlinemediaobject.class, required = false), @XmlElementRef(name = "olink", namespace = "http://docbook.org/ns/docbook", type = Olink.class, required = false), @XmlElementRef(name = "biblioref", namespace = "http://docbook.org/ns/docbook", type = Biblioref.class, required = false), @XmlElementRef(name = "annotation", namespace = "http://docbook.org/ns/docbook", type = Annotation.class, required = false), @XmlElementRef(name = "replaceable", namespace = "http://docbook.org/ns/docbook", type = Replaceable.class, required = false), @XmlElementRef(name = "superscript", namespace = "http://docbook.org/ns/docbook", type = Superscript.class, required = false), @XmlElementRef(name = "subscript", namespace = "http://docbook.org/ns/docbook", type = Subscript.class, required = false) }) @XmlMixed protected List<Object> content; @XmlAttribute(name = "role") @XmlSchemaType(name = "anySimpleType") protected String role; @XmlAttribute(name = "id", namespace = "http://www.w3.org/XML/1998/namespace") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlID @XmlSchemaType(name = "ID") protected String id; @XmlAttribute(name = "version") @XmlSchemaType(name = "anySimpleType") protected String commonVersion; @XmlAttribute(name = "lang", namespace = "http://www.w3.org/XML/1998/namespace") @XmlSchemaType(name = "anySimpleType") protected String xmlLang; @XmlAttribute(name = "base", namespace = "http://www.w3.org/XML/1998/namespace") @XmlSchemaType(name = "anySimpleType") protected String base; @XmlAttribute(name = "remap") @XmlSchemaType(name = "anySimpleType") protected String remap; @XmlAttribute(name = "xreflabel") @XmlSchemaType(name = "anySimpleType") protected String xreflabel; @XmlAttribute(name = "revisionflag") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String revisionflag; @XmlAttribute(name = "dir") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String dir; @XmlAttribute(name = "arch") @XmlSchemaType(name = "anySimpleType") protected String arch; @XmlAttribute(name = "audience") @XmlSchemaType(name = "anySimpleType") protected String audience; @XmlAttribute(name = "condition") @XmlSchemaType(name = "anySimpleType") protected String condition; @XmlAttribute(name = "conformance") @XmlSchemaType(name = "anySimpleType") protected String conformance; @XmlAttribute(name = "os") @XmlSchemaType(name = "anySimpleType") protected String os; @XmlAttribute(name = "revision") @XmlSchemaType(name = "anySimpleType") protected String commonRevision; @XmlAttribute(name = "security") @XmlSchemaType(name = "anySimpleType") protected String security; @XmlAttribute(name = "userlevel") @XmlSchemaType(name = "anySimpleType") protected String userlevel; @XmlAttribute(name = "vendor") @XmlSchemaType(name = "anySimpleType") protected String vendor; @XmlAttribute(name = "wordsize") @XmlSchemaType(name = "anySimpleType") protected String wordsize; @XmlAttribute(name = "annotations") @XmlSchemaType(name = "anySimpleType") protected String annotations; @XmlAttribute(name = "linkend") @XmlIDREF @XmlSchemaType(name = "IDREF") protected Object linkend; @XmlAttribute(name = "href", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String href; @XmlAttribute(name = "type", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String xlinkType; @XmlAttribute(name = "role", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String xlinkRole; @XmlAttribute(name = "arcrole", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String arcrole; @XmlAttribute(name = "title", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String xlinkTitle; @XmlAttribute(name = "show", namespace = "http://www.w3.org/1999/xlink") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String show; @XmlAttribute(name = "actuate", namespace = "http://www.w3.org/1999/xlink") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String actuate; /** * Gets the value of the content property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the content property. * * <p> * For example, to add a new item, do as follows: * <pre> * getContent().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Link } * {@link Phrase } * {@link Anchor } * {@link Alt } * {@link Indexterm } * {@link Remark } * {@link Xref } * {@link Inlinemediaobject } * {@link Olink } * {@link Biblioref } * {@link Annotation } * {@link Replaceable } * {@link String } * {@link Superscript } * {@link Subscript } * * */ public List<Object> getContent() { if (content == null) { content = new ArrayList<Object>(); } return this.content; } /** * Gets the value of the role property. * * @return * possible object is * {@link String } * */ public String getRole() { return role; } /** * Sets the value of the role property. * * @param value * allowed object is * {@link String } * */ public void setRole(String value) { this.role = value; } /** * Gets the value of the id property. * * @return * possible object is * {@link String } * */ public String getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link String } * */ public void setId(String value) { this.id = value; } /** * Gets the value of the commonVersion property. * * @return * possible object is * {@link String } * */ public String getCommonVersion() { return commonVersion; } /** * Sets the value of the commonVersion property. * * @param value * allowed object is * {@link String } * */ public void setCommonVersion(String value) { this.commonVersion = value; } /** * Gets the value of the xmlLang property. * * @return * possible object is * {@link String } * */ public String getXmlLang() { return xmlLang; } /** * Sets the value of the xmlLang property. * * @param value * allowed object is * {@link String } * */ public void setXmlLang(String value) { this.xmlLang = value; } /** * Gets the value of the base property. * * @return * possible object is * {@link String } * */ public String getBase() { return base; } /** * Sets the value of the base property. * * @param value * allowed object is * {@link String } * */ public void setBase(String value) { this.base = value; } /** * Gets the value of the remap property. * * @return * possible object is * {@link String } * */ public String getRemap() { return remap; } /** * Sets the value of the remap property. * * @param value * allowed object is * {@link String } * */ public void setRemap(String value) { this.remap = value; } /** * Gets the value of the xreflabel property. * * @return * possible object is * {@link String } * */ public String getXreflabel() { return xreflabel; } /** * Sets the value of the xreflabel property. * * @param value * allowed object is * {@link String } * */ public void setXreflabel(String value) { this.xreflabel = value; } /** * Gets the value of the revisionflag property. * * @return * possible object is * {@link String } * */ public String getRevisionflag() { return revisionflag; } /** * Sets the value of the revisionflag property. * * @param value * allowed object is * {@link String } * */ public void setRevisionflag(String value) { this.revisionflag = value; } /** * Gets the value of the dir property. * * @return * possible object is * {@link String } * */ public String getDir() { return dir; } /** * Sets the value of the dir property. * * @param value * allowed object is * {@link String } * */ public void setDir(String value) { this.dir = value; } /** * Gets the value of the arch property. * * @return * possible object is * {@link String } * */ public String getArch() { return arch; } /** * Sets the value of the arch property. * * @param value * allowed object is * {@link String } * */ public void setArch(String value) { this.arch = value; } /** * Gets the value of the audience property. * * @return * possible object is * {@link String } * */ public String getAudience() { return audience; } /** * Sets the value of the audience property. * * @param value * allowed object is * {@link String } * */ public void setAudience(String value) { this.audience = value; } /** * Gets the value of the condition property. * * @return * possible object is * {@link String } * */ public String getCondition() { return condition; } /** * Sets the value of the condition property. * * @param value * allowed object is * {@link String } * */ public void setCondition(String value) { this.condition = value; } /** * Gets the value of the conformance property. * * @return * possible object is * {@link String } * */ public String getConformance() { return conformance; } /** * Sets the value of the conformance property. * * @param value * allowed object is * {@link String } * */ public void setConformance(String value) { this.conformance = value; } /** * Gets the value of the os property. * * @return * possible object is * {@link String } * */ public String getOs() { return os; } /** * Sets the value of the os property. * * @param value * allowed object is * {@link String } * */ public void setOs(String value) { this.os = value; } /** * Gets the value of the commonRevision property. * * @return * possible object is * {@link String } * */ public String getCommonRevision() { return commonRevision; } /** * Sets the value of the commonRevision property. * * @param value * allowed object is * {@link String } * */ public void setCommonRevision(String value) { this.commonRevision = value; } /** * Gets the value of the security property. * * @return * possible object is * {@link String } * */ public String getSecurity() { return security; } /** * Sets the value of the security property. * * @param value * allowed object is * {@link String } * */ public void setSecurity(String value) { this.security = value; } /** * Gets the value of the userlevel property. * * @return * possible object is * {@link String } * */ public String getUserlevel() { return userlevel; } /** * Sets the value of the userlevel property. * * @param value * allowed object is * {@link String } * */ public void setUserlevel(String value) { this.userlevel = value; } /** * Gets the value of the vendor property. * * @return * possible object is * {@link String } * */ public String getVendor() { return vendor; } /** * Sets the value of the vendor property. * * @param value * allowed object is * {@link String } * */ public void setVendor(String value) { this.vendor = value; } /** * Gets the value of the wordsize property. * * @return * possible object is * {@link String } * */ public String getWordsize() { return wordsize; } /** * Sets the value of the wordsize property. * * @param value * allowed object is * {@link String } * */ public void setWordsize(String value) { this.wordsize = value; } /** * Gets the value of the annotations property. * * @return * possible object is * {@link String } * */ public String getAnnotations() { return annotations; } /** * Sets the value of the annotations property. * * @param value * allowed object is * {@link String } * */ public void setAnnotations(String value) { this.annotations = value; } /** * Gets the value of the linkend property. * * @return * possible object is * {@link Object } * */ public Object getLinkend() { return linkend; } /** * Sets the value of the linkend property. * * @param value * allowed object is * {@link Object } * */ public void setLinkend(Object value) { this.linkend = value; } /** * Gets the value of the href property. * * @return * possible object is * {@link String } * */ public String getHref() { return href; } /** * Sets the value of the href property. * * @param value * allowed object is * {@link String } * */ public void setHref(String value) { this.href = value; } /** * Gets the value of the xlinkType property. * * @return * possible object is * {@link String } * */ public String getXlinkType() { return xlinkType; } /** * Sets the value of the xlinkType property. * * @param value * allowed object is * {@link String } * */ public void setXlinkType(String value) { this.xlinkType = value; } /** * Gets the value of the xlinkRole property. * * @return * possible object is * {@link String } * */ public String getXlinkRole() { return xlinkRole; } /** * Sets the value of the xlinkRole property. * * @param value * allowed object is * {@link String } * */ public void setXlinkRole(String value) { this.xlinkRole = value; } /** * Gets the value of the arcrole property. * * @return * possible object is * {@link String } * */ public String getArcrole() { return arcrole; } /** * Sets the value of the arcrole property. * * @param value * allowed object is * {@link String } * */ public void setArcrole(String value) { this.arcrole = value; } /** * Gets the value of the xlinkTitle property. * * @return * possible object is * {@link String } * */ public String getXlinkTitle() { return xlinkTitle; } /** * Sets the value of the xlinkTitle property. * * @param value * allowed object is * {@link String } * */ public void setXlinkTitle(String value) { this.xlinkTitle = value; } /** * Gets the value of the show property. * * @return * possible object is * {@link String } * */ public String getShow() { return show; } /** * Sets the value of the show property. * * @param value * allowed object is * {@link String } * */ public void setShow(String value) { this.show = value; } /** * Gets the value of the actuate property. * * @return * possible object is * {@link String } * */ public String getActuate() { return actuate; } /** * Sets the value of the actuate property. * * @param value * allowed object is * {@link String } * */ public void setActuate(String value) { this.actuate = value; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.managedblockchain.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/ListProposals" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListProposalsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * The summary of each proposal made on the network. * </p> */ private java.util.List<ProposalSummary> proposals; /** * <p> * The pagination token that indicates the next set of results to retrieve. * </p> */ private String nextToken; /** * <p> * The summary of each proposal made on the network. * </p> * * @return The summary of each proposal made on the network. */ public java.util.List<ProposalSummary> getProposals() { return proposals; } /** * <p> * The summary of each proposal made on the network. * </p> * * @param proposals * The summary of each proposal made on the network. */ public void setProposals(java.util.Collection<ProposalSummary> proposals) { if (proposals == null) { this.proposals = null; return; } this.proposals = new java.util.ArrayList<ProposalSummary>(proposals); } /** * <p> * The summary of each proposal made on the network. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setProposals(java.util.Collection)} or {@link #withProposals(java.util.Collection)} if you want to * override the existing values. * </p> * * @param proposals * The summary of each proposal made on the network. * @return Returns a reference to this object so that method calls can be chained together. */ public ListProposalsResult withProposals(ProposalSummary... proposals) { if (this.proposals == null) { setProposals(new java.util.ArrayList<ProposalSummary>(proposals.length)); } for (ProposalSummary ele : proposals) { this.proposals.add(ele); } return this; } /** * <p> * The summary of each proposal made on the network. * </p> * * @param proposals * The summary of each proposal made on the network. * @return Returns a reference to this object so that method calls can be chained together. */ public ListProposalsResult withProposals(java.util.Collection<ProposalSummary> proposals) { setProposals(proposals); return this; } /** * <p> * The pagination token that indicates the next set of results to retrieve. * </p> * * @param nextToken * The pagination token that indicates the next set of results to retrieve. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The pagination token that indicates the next set of results to retrieve. * </p> * * @return The pagination token that indicates the next set of results to retrieve. */ public String getNextToken() { return this.nextToken; } /** * <p> * The pagination token that indicates the next set of results to retrieve. * </p> * * @param nextToken * The pagination token that indicates the next set of results to retrieve. * @return Returns a reference to this object so that method calls can be chained together. */ public ListProposalsResult withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getProposals() != null) sb.append("Proposals: ").append(getProposals()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListProposalsResult == false) return false; ListProposalsResult other = (ListProposalsResult) obj; if (other.getProposals() == null ^ this.getProposals() == null) return false; if (other.getProposals() != null && other.getProposals().equals(this.getProposals()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getProposals() == null) ? 0 : getProposals().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public ListProposalsResult clone() { try { return (ListProposalsResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
package com.example.smlouvy.dto; import java.util.HashMap; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; import org.apache.commons.lang3.builder.ReflectionToStringBuilder; @JsonInclude(JsonInclude.Include.NON_NULL) @JsonPropertyOrder({ "FileMetadata", "data", "nazevSouboru", "hash", "odkaz", "DatlClassification", "PlainTextContent", "PlainTextContentQuality", "LastUpdate", "LocalCopy", "ContentType", "Lenght", "WordCount", "Pages", "EnoughExtractedText" }) public class Prilohy { @JsonProperty("FileMetadata") private Object fileMetadata; @JsonProperty("data") private Object data; @JsonProperty("nazevSouboru") private String nazevSouboru; @JsonProperty("hash") private Hash hash; @JsonProperty("odkaz") private String odkaz; @JsonProperty("DatlClassification") private Object datlClassification; @JsonProperty("PlainTextContent") private String plainTextContent; @JsonProperty("PlainTextContentQuality") private Integer plainTextContentQuality; @JsonProperty("LastUpdate") private String lastUpdate; @JsonProperty("LocalCopy") private Object localCopy; @JsonProperty("ContentType") private String contentType; @JsonProperty("Lenght") private Integer lenght; @JsonProperty("WordCount") private Integer wordCount; @JsonProperty("Pages") private Integer pages; @JsonProperty("EnoughExtractedText") private Boolean enoughExtractedText; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); @JsonProperty("FileMetadata") public Object getFileMetadata() { return fileMetadata; } @JsonProperty("FileMetadata") public void setFileMetadata(Object fileMetadata) { this.fileMetadata = fileMetadata; } @JsonProperty("data") public Object getData() { return data; } @JsonProperty("data") public void setData(Object data) { this.data = data; } @JsonProperty("nazevSouboru") public String getNazevSouboru() { return nazevSouboru; } @JsonProperty("nazevSouboru") public void setNazevSouboru(String nazevSouboru) { this.nazevSouboru = nazevSouboru; } @JsonProperty("hash") public Hash getHash() { return hash; } @JsonProperty("hash") public void setHash(Hash hash) { this.hash = hash; } @JsonProperty("odkaz") public String getOdkaz() { return odkaz; } @JsonProperty("odkaz") public void setOdkaz(String odkaz) { this.odkaz = odkaz; } @JsonProperty("DatlClassification") public Object getDatlClassification() { return datlClassification; } @JsonProperty("DatlClassification") public void setDatlClassification(Object datlClassification) { this.datlClassification = datlClassification; } @JsonProperty("PlainTextContent") public String getPlainTextContent() { return plainTextContent; } @JsonProperty("PlainTextContent") public void setPlainTextContent(String plainTextContent) { this.plainTextContent = plainTextContent; } @JsonProperty("PlainTextContentQuality") public Integer getPlainTextContentQuality() { return plainTextContentQuality; } @JsonProperty("PlainTextContentQuality") public void setPlainTextContentQuality(Integer plainTextContentQuality) { this.plainTextContentQuality = plainTextContentQuality; } @JsonProperty("LastUpdate") public String getLastUpdate() { return lastUpdate; } @JsonProperty("LastUpdate") public void setLastUpdate(String lastUpdate) { this.lastUpdate = lastUpdate; } @JsonProperty("LocalCopy") public Object getLocalCopy() { return localCopy; } @JsonProperty("LocalCopy") public void setLocalCopy(Object localCopy) { this.localCopy = localCopy; } @JsonProperty("ContentType") public String getContentType() { return contentType; } @JsonProperty("ContentType") public void setContentType(String contentType) { this.contentType = contentType; } @JsonProperty("Lenght") public Integer getLenght() { return lenght; } @JsonProperty("Lenght") public void setLenght(Integer lenght) { this.lenght = lenght; } @JsonProperty("WordCount") public Integer getWordCount() { return wordCount; } @JsonProperty("WordCount") public void setWordCount(Integer wordCount) { this.wordCount = wordCount; } @JsonProperty("Pages") public Integer getPages() { return pages; } @JsonProperty("Pages") public void setPages(Integer pages) { this.pages = pages; } @JsonProperty("EnoughExtractedText") public Boolean getEnoughExtractedText() { return enoughExtractedText; } @JsonProperty("EnoughExtractedText") public void setEnoughExtractedText(Boolean enoughExtractedText) { this.enoughExtractedText = enoughExtractedText; } @Override public String toString() { return ReflectionToStringBuilder.toStringExclude(this, new String[] {""}); } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.orc.impl; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.util.Progressable; import org.apache.orc.CompressionKind; import org.apache.orc.OrcFile; import org.apache.orc.OrcProto; import org.apache.orc.PhysicalWriter; import org.apache.orc.TypeDescription; import org.junit.Test; import java.io.IOException; import java.io.OutputStream; import java.net.URI; import java.net.URISyntaxException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; import static org.junit.Assert.assertEquals; public class TestPhysicalFsWriter { final Configuration conf = new Configuration(); static class MemoryOutputStream extends OutputStream { private final List<byte[]> contents; MemoryOutputStream(List<byte[]> contents) { this.contents = contents; } @Override public void write(int b) { contents.add(new byte[]{(byte) b}); } @Override public void write(byte[] a, int offset, int length) { byte[] buffer = new byte[length]; System.arraycopy(a, offset, buffer, 0, length); contents.add(buffer); } } static class MemoryFileSystem extends FileSystem { @Override public URI getUri() { try { return new URI("test:///"); } catch (URISyntaxException e) { throw new IllegalStateException("bad url", e); } } @Override public FSDataInputStream open(Path f, int bufferSize) throws IOException { return null; } @Override public FSDataOutputStream create(Path f, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, Progressable progress) throws IOException { List<byte[]> contents = new ArrayList<>(); fileContents.put(f, contents); return new FSDataOutputStream(new MemoryOutputStream(contents)); } @Override public FSDataOutputStream append(Path f, int bufferSize, Progressable progress) { throw new UnsupportedOperationException("append not supported"); } @Override public boolean rename(Path src, Path dst) { boolean result = fileContents.containsKey(src) && !fileContents.containsKey(dst); if (result) { List<byte[]> contents = fileContents.remove(src); fileContents.put(dst, contents); } return result; } @Override public boolean delete(Path f, boolean recursive) { boolean result = fileContents.containsKey(f); fileContents.remove(f); return result; } @Override public FileStatus[] listStatus(Path f) { return new FileStatus[]{getFileStatus(f)}; } @Override public void setWorkingDirectory(Path new_dir) { currentWorkingDirectory = new_dir; } @Override public Path getWorkingDirectory() { return currentWorkingDirectory; } @Override public boolean mkdirs(Path f, FsPermission permission) { return false; } @Override public FileStatus getFileStatus(Path f) { List<byte[]> contents = fileContents.get(f); if (contents != null) { long sum = 0; for(byte[] b: contents) { sum += b.length; } return new FileStatus(sum, false, 1, 256 * 1024, 0, f); } return null; } private final Map<Path, List<byte[]>> fileContents = new HashMap<>(); private Path currentWorkingDirectory = new Path("/"); } @Test public void testStripePadding() throws IOException { TypeDescription schema = TypeDescription.fromString("int"); OrcFile.WriterOptions opts = OrcFile.writerOptions(conf) .stripeSize(32 * 1024) .blockSize(64 * 1024) .compress(CompressionKind.NONE) .setSchema(schema); MemoryFileSystem fs = new MemoryFileSystem(); PhysicalFsWriter writer = new PhysicalFsWriter(fs, new Path("test1.orc"), opts); writer.writeHeader(); StreamName stream0 = new StreamName(0, OrcProto.Stream.Kind.DATA); PhysicalWriter.OutputReceiver output = writer.createDataStream(stream0); byte[] buffer = new byte[1024]; for(int i=0; i < buffer.length; ++i) { buffer[i] = (byte) i; } for(int i=0; i < 63; ++i) { output.output(ByteBuffer.wrap(buffer)); } OrcProto.StripeFooter.Builder footer = OrcProto.StripeFooter.newBuilder(); OrcProto.StripeInformation.Builder dirEntry = OrcProto.StripeInformation.newBuilder(); writer.finalizeStripe(footer, dirEntry); // check to make sure that it laid it out without padding assertEquals(0L, dirEntry.getIndexLength()); assertEquals(63 * 1024L, dirEntry.getDataLength()); assertEquals(3, dirEntry.getOffset()); for(int i=0; i < 62; ++i) { output.output(ByteBuffer.wrap(buffer)); } footer = OrcProto.StripeFooter.newBuilder(); dirEntry = OrcProto.StripeInformation.newBuilder(); writer.finalizeStripe(footer, dirEntry); // the second one should pad assertEquals(64 * 1024, dirEntry.getOffset()); assertEquals(62 * 1024, dirEntry.getDataLength()); long endOfStripe = dirEntry.getOffset() + dirEntry.getIndexLength() + dirEntry.getDataLength() + dirEntry.getFooterLength(); for(int i=0; i < 3; ++i) { output.output(ByteBuffer.wrap(buffer)); } footer = OrcProto.StripeFooter.newBuilder(); dirEntry = OrcProto.StripeInformation.newBuilder(); writer.finalizeStripe(footer, dirEntry); // the third one should be over the padding limit assertEquals(endOfStripe, dirEntry.getOffset()); assertEquals(3 * 1024, dirEntry.getDataLength()); } @Test public void testNoStripePadding() throws IOException { TypeDescription schema = TypeDescription.fromString("int"); OrcFile.WriterOptions opts = OrcFile.writerOptions(conf) .blockPadding(false) .stripeSize(32 * 1024) .blockSize(64 * 1024) .compress(CompressionKind.NONE) .setSchema(schema); MemoryFileSystem fs = new MemoryFileSystem(); PhysicalFsWriter writer = new PhysicalFsWriter(fs, new Path("test1.orc"), opts); writer.writeHeader(); StreamName stream0 = new StreamName(0, OrcProto.Stream.Kind.DATA); PhysicalWriter.OutputReceiver output = writer.createDataStream(stream0); byte[] buffer = new byte[1024]; for(int i=0; i < buffer.length; ++i) { buffer[i] = (byte) i; } for(int i=0; i < 63; ++i) { output.output(ByteBuffer.wrap(buffer)); } OrcProto.StripeFooter.Builder footer = OrcProto.StripeFooter.newBuilder(); OrcProto.StripeInformation.Builder dirEntry = OrcProto.StripeInformation.newBuilder(); writer.finalizeStripe(footer, dirEntry); // check to make sure that it laid it out without padding assertEquals(0L, dirEntry.getIndexLength()); assertEquals(63 * 1024L, dirEntry.getDataLength()); assertEquals(3, dirEntry.getOffset()); long endOfStripe = dirEntry.getOffset() + dirEntry.getDataLength() + dirEntry.getFooterLength(); for(int i=0; i < 62; ++i) { output.output(ByteBuffer.wrap(buffer)); } footer = OrcProto.StripeFooter.newBuilder(); dirEntry = OrcProto.StripeInformation.newBuilder(); writer.finalizeStripe(footer, dirEntry); // no padding, because we turned it off assertEquals(endOfStripe, dirEntry.getOffset()); assertEquals(62 * 1024, dirEntry.getDataLength()); } static class MockHadoopShim implements HadoopShims { long lastShortBlock = -1; @Override public DirectDecompressor getDirectDecompressor(DirectCompressionType codec) { return null; } @Override public ZeroCopyReaderShim getZeroCopyReader(FSDataInputStream in, ByteBufferPoolShim pool) { return null; } @Override public boolean endVariableLengthBlock(OutputStream output) throws IOException { if (output instanceof FSDataOutputStream) { lastShortBlock = ((FSDataOutputStream) output).getPos(); return true; } return false; } @Override public KeyProvider getHadoopKeyProvider(Configuration conf, Random random) { return null; } } @Test public void testShortBlock() throws IOException { MockHadoopShim shim = new MockHadoopShim(); TypeDescription schema = TypeDescription.fromString("int"); OrcFile.WriterOptions opts = OrcFile.writerOptions(conf) .blockPadding(false) .stripeSize(32 * 1024) .blockSize(64 * 1024) .compress(CompressionKind.NONE) .setSchema(schema) .setShims(shim) .writeVariableLengthBlocks(true); MemoryFileSystem fs = new MemoryFileSystem(); PhysicalFsWriter writer = new PhysicalFsWriter(fs, new Path("test1.orc"), opts); writer.writeHeader(); StreamName stream0 = new StreamName(0, OrcProto.Stream.Kind.DATA); PhysicalWriter.OutputReceiver output = writer.createDataStream(stream0); byte[] buffer = new byte[1024]; for(int i=0; i < buffer.length; ++i) { buffer[i] = (byte) i; } for(int i=0; i < 63; ++i) { output.output(ByteBuffer.wrap(buffer)); } OrcProto.StripeFooter.Builder footer = OrcProto.StripeFooter.newBuilder(); OrcProto.StripeInformation.Builder dirEntry = OrcProto.StripeInformation.newBuilder(); writer.finalizeStripe(footer, dirEntry); // check to make sure that it laid it out without padding assertEquals(0L, dirEntry.getIndexLength()); assertEquals(63 * 1024L, dirEntry.getDataLength()); assertEquals(3, dirEntry.getOffset()); long endOfStripe = dirEntry.getOffset() + dirEntry.getDataLength() + dirEntry.getFooterLength(); for(int i=0; i < 62; ++i) { output.output(ByteBuffer.wrap(buffer)); } footer = OrcProto.StripeFooter.newBuilder(); dirEntry = OrcProto.StripeInformation.newBuilder(); writer.finalizeStripe(footer, dirEntry); // we should get a short block and no padding assertEquals(endOfStripe, dirEntry.getOffset()); assertEquals(62 * 1024, dirEntry.getDataLength()); assertEquals(endOfStripe, shim.lastShortBlock); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.expressions.converter; import org.apache.flink.table.api.DataTypes; import org.apache.flink.table.api.TableConfig; import org.apache.flink.table.catalog.CatalogManager; import org.apache.flink.table.catalog.FunctionCatalog; import org.apache.flink.table.expressions.TimePointUnit; import org.apache.flink.table.module.ModuleManager; import org.apache.flink.table.planner.delegation.PlannerContext; import org.apache.flink.table.planner.plan.metadata.MetadataTestUtil; import org.apache.flink.table.planner.plan.trait.FlinkRelDistributionTraitDef; import org.apache.flink.table.utils.CatalogManagerMocks; import org.apache.calcite.avatica.util.TimeUnit; import org.apache.calcite.jdbc.CalciteSchema; import org.apache.calcite.plan.ConventionTraitDef; import org.apache.calcite.rel.RelCollationTraitDef; import org.apache.calcite.rex.RexLiteral; import org.apache.calcite.rex.RexNode; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.calcite.util.DateString; import org.apache.calcite.util.TimeString; import org.apache.calcite.util.TimestampString; import org.junit.Assert; import org.junit.Test; import java.math.BigDecimal; import java.time.Duration; import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; import java.time.Period; import java.util.Arrays; import static org.apache.flink.table.expressions.ApiExpressionUtils.valueLiteral; import static org.hamcrest.CoreMatchers.equalTo; import static org.junit.Assert.assertThat; /** Test for {@link ExpressionConverter}. */ public class ExpressionConverterTest { private final TableConfig tableConfig = new TableConfig(); private final CatalogManager catalogManager = CatalogManagerMocks.createEmptyCatalogManager(); private final PlannerContext plannerContext = new PlannerContext( tableConfig, new FunctionCatalog(tableConfig, catalogManager, new ModuleManager()), catalogManager, CalciteSchema.from(MetadataTestUtil.initRootSchema()), Arrays.asList( ConventionTraitDef.INSTANCE, FlinkRelDistributionTraitDef.INSTANCE(), RelCollationTraitDef.INSTANCE)); private final ExpressionConverter converter = new ExpressionConverter( plannerContext.createRelBuilder( CatalogManagerMocks.DEFAULT_CATALOG, CatalogManagerMocks.DEFAULT_DATABASE)); @Test public void testLiteral() { RexNode rex = converter.visit(valueLiteral((byte) 1)); Assert.assertEquals(1, (int) ((RexLiteral) rex).getValueAs(Integer.class)); Assert.assertEquals(SqlTypeName.TINYINT, rex.getType().getSqlTypeName()); rex = converter.visit(valueLiteral((short) 1)); Assert.assertEquals(1, (int) ((RexLiteral) rex).getValueAs(Integer.class)); Assert.assertEquals(SqlTypeName.SMALLINT, rex.getType().getSqlTypeName()); rex = converter.visit(valueLiteral(1)); Assert.assertEquals(1, (int) ((RexLiteral) rex).getValueAs(Integer.class)); Assert.assertEquals(SqlTypeName.INTEGER, rex.getType().getSqlTypeName()); } @Test public void testCharLiteral() { RexNode rex = converter.visit(valueLiteral("ABC", DataTypes.CHAR(4).notNull())); assertThat(((RexLiteral) rex).getValueAs(String.class), equalTo("ABC ")); assertThat(rex.getType().getSqlTypeName(), equalTo(SqlTypeName.CHAR)); assertThat(rex.getType().getPrecision(), equalTo(4)); } @Test public void testVarCharLiteral() { RexNode rex = converter.visit(valueLiteral("ABC", DataTypes.STRING().notNull())); assertThat(((RexLiteral) rex).getValueAs(String.class), equalTo("ABC")); assertThat(rex.getType().getSqlTypeName(), equalTo(SqlTypeName.VARCHAR)); assertThat(rex.getType().getPrecision(), equalTo(Integer.MAX_VALUE)); } @Test public void testBinaryLiteral() { RexNode rex = converter.visit(valueLiteral(new byte[] {1, 2, 3}, DataTypes.BINARY(4).notNull())); assertThat(((RexLiteral) rex).getValueAs(byte[].class), equalTo(new byte[] {1, 2, 3, 0})); assertThat(rex.getType().getSqlTypeName(), equalTo(SqlTypeName.BINARY)); assertThat(rex.getType().getPrecision(), equalTo(4)); } @Test public void testTimestampLiteral() { RexNode rex = converter.visit( valueLiteral( LocalDateTime.parse("2012-12-12T12:12:12.12345"), DataTypes.TIMESTAMP(3).notNull())); assertThat( ((RexLiteral) rex).getValueAs(TimestampString.class), equalTo(new TimestampString("2012-12-12 12:12:12.123"))); assertThat(rex.getType().getSqlTypeName(), equalTo(SqlTypeName.TIMESTAMP)); assertThat(rex.getType().getPrecision(), equalTo(3)); } @Test public void testTimestampWithLocalZoneLiteral() { RexNode rex = converter.visit( valueLiteral( Instant.ofEpochMilli(100), DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3).notNull())); assertThat( ((RexLiteral) rex).getValueAs(TimestampString.class), equalTo(TimestampString.fromMillisSinceEpoch(100))); assertThat( rex.getType().getSqlTypeName(), equalTo(SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE)); assertThat(rex.getType().getPrecision(), equalTo(3)); } @Test public void testTimeLiteral() { RexNode rex = converter.visit( valueLiteral( LocalTime.parse("12:12:12.12345"), DataTypes.TIME(2).notNull())); assertThat( ((RexLiteral) rex).getValueAs(TimeString.class), equalTo(new TimeString("12:12:12.12"))); assertThat(rex.getType().getSqlTypeName(), equalTo(SqlTypeName.TIME)); assertThat(rex.getType().getPrecision(), equalTo(2)); } @Test public void testTimeLiteralBiggerPrecision() { RexNode rex = converter.visit( valueLiteral( LocalTime.parse("12:12:12.12345"), DataTypes.TIME(5).notNull())); // TODO planner supports up to TIME(3) assertThat( ((RexLiteral) rex).getValueAs(TimeString.class), equalTo(new TimeString("12:12:12.123"))); assertThat(rex.getType().getSqlTypeName(), equalTo(SqlTypeName.TIME)); assertThat(rex.getType().getPrecision(), equalTo(3)); } @Test public void testDateLiteral() { RexNode rex = converter.visit( valueLiteral(LocalDate.parse("2012-12-12"), DataTypes.DATE().notNull())); assertThat( ((RexLiteral) rex).getValueAs(DateString.class), equalTo(new DateString("2012-12-12"))); assertThat(rex.getType().getSqlTypeName(), equalTo(SqlTypeName.DATE)); } @Test public void testIntervalDayTime() { Duration value = Duration.ofDays(3).plusMillis(21); RexNode rex = converter.visit(valueLiteral(value)); assertThat( ((RexLiteral) rex).getValueAs(BigDecimal.class), equalTo(BigDecimal.valueOf(value.toMillis()))); assertThat(rex.getType().getSqlTypeName(), equalTo(SqlTypeName.INTERVAL_DAY_SECOND)); // TODO planner ignores the precision assertThat(rex.getType().getPrecision(), equalTo(2)); // day precision, should actually be 1 assertThat( rex.getType().getScale(), equalTo(6)); // fractional precision, should actually be 3 } @Test public void testIntervalYearMonth() { Period value = Period.of(999, 3, 1); RexNode rex = converter.visit(valueLiteral(value)); assertThat( ((RexLiteral) rex).getValueAs(BigDecimal.class), equalTo(BigDecimal.valueOf(value.toTotalMonths()))); // TODO planner ignores the precision assertThat(rex.getType().getSqlTypeName(), equalTo(SqlTypeName.INTERVAL_YEAR_MONTH)); assertThat( rex.getType().getPrecision(), equalTo(2)); // year precision, should actually be 3 } @Test public void testDecimalLiteral() { BigDecimal value = new BigDecimal("12345678.999"); RexNode rex = converter.visit(valueLiteral(value)); assertThat(((RexLiteral) rex).getValueAs(BigDecimal.class), equalTo(value)); assertThat(rex.getType().getSqlTypeName(), equalTo(SqlTypeName.DECIMAL)); assertThat(rex.getType().getPrecision(), equalTo(11)); assertThat(rex.getType().getScale(), equalTo(3)); } @Test public void testSymbolLiteral() { RexNode rex = converter.visit(valueLiteral(TimePointUnit.MICROSECOND)); assertThat(((RexLiteral) rex).getValueAs(TimeUnit.class), equalTo(TimeUnit.MICROSECOND)); assertThat(rex.getType().getSqlTypeName(), equalTo(SqlTypeName.SYMBOL)); } }
package com.example.c301_w16_g5.c301_w16_g5; import android.content.Context; import android.net.ConnectivityManager; import android.os.AsyncTask; import android.util.Log; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.concurrent.ExecutionException; /** * <code>SearchController</code> handles calls to objects and methods inside the * ElasticSearchBackend class. * * @author Alex * @version 1.4, 03/02/2016 * @see Chicken * @see User * @see Notification * @see Bid * @see ElasticSearchBackend */ public class SearchController { private ArrayList<Chicken> offlineChickens; /* Offline Behaviour */ public SearchController() { offlineChickens = new ArrayList<>(); } private void saveOfflineChickens() { try { FileOutputStream fos = ChickBidsApplication.getApp().openFileOutput("chickens.sav", 0); BufferedWriter out = new BufferedWriter(new OutputStreamWriter(fos)); Gson gson = new Gson(); gson.toJson(this.offlineChickens, out); out.flush(); fos.close(); } catch (IOException e) { throw new RuntimeException(); } } private void loadOfflineChickens() { try { FileInputStream fis = ChickBidsApplication.getApp().openFileInput("chickens.sav"); BufferedReader in = new BufferedReader(new InputStreamReader(fis)); Gson gson = new Gson(); // Taken from https://google-gson.googlecode.com/svn/trunk/gson/docs/javadocs/com/google/gson/Gson.html 01-19 2016 Type listType = new TypeToken<ArrayList<Chicken>>() {}.getType(); this.offlineChickens = gson.fromJson(in, listType); } catch (FileNotFoundException e) { Log.i("ERROR", "Chickens were not loaded from file"); } } public void saveUserOffline(User user) { try { FileOutputStream fos = ChickBidsApplication.getApp().openFileOutput(user.getUsername() + ".sav", 0); BufferedWriter out = new BufferedWriter(new OutputStreamWriter(fos)); Gson gson = new Gson(); gson.toJson(user, out); out.flush(); fos.close(); } catch (IOException e) { throw new RuntimeException(); } } public User loadUserOffline(String username) { User user = null; try { FileInputStream fis = ChickBidsApplication.getApp().openFileInput(username + ".sav"); BufferedReader in = new BufferedReader(new InputStreamReader(fis)); Gson gson = new Gson(); user = gson.fromJson(in, User.class); } catch (FileNotFoundException e) { Log.i("ERROR", "User was not loaded from file"); } return user; } /** * Updates the database with offline chicken push requests. * * @return list of chickens pushed to database */ public ArrayList<Chicken> pushOfflineChickensToDatabase() { ArrayList<Chicken> chickens = new ArrayList<>(); if (checkOnline()) { loadOfflineChickens(); for (Chicken chicken : offlineChickens) { Chicken chicken1 = addChickenToDatabase(chicken); chickens.add(chicken1); } offlineChickens.clear(); saveOfflineChickens(); } return chickens; } /* Searching */ /** * Finds the chickens matching given keyword criteria. * * @return list of all chickens whose info contains the keyword */ public ArrayList<Chicken> searchByKeyword(String keyword) { ArrayList<Chicken> chickens = new ArrayList<>(); if (checkOnline()) { ElasticSearchBackend.SearchChickenTask searchTask = new ElasticSearchBackend.SearchChickenTask(); searchTask.execute(keyword); try { chickens = searchTask.get(); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } return chickens; } /* Chickens */ /** * Gets all saved chickens from the Elasticsearch database. * * @return list of all chickens */ public ArrayList<Chicken> getAllChickens() { ArrayList<Chicken> chickens = new ArrayList<>(); if (checkOnline()) { ElasticSearchBackend.GetAllChickensTask searchTask = new ElasticSearchBackend.GetAllChickensTask(); searchTask.execute(""); try { chickens = searchTask.get(); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } return chickens; } /** * Saves the given chicken as a new chicken in the Elasticsearch database. * * @param chicken the chicken to add to the database * @return the saved chicken */ public Chicken addChickenToDatabase(Chicken chicken) { Chicken chicken2 = null; if (checkOnline()) { AsyncTask<Chicken, Void, Chicken> executable = new ElasticSearchBackend.AddChickenTask(); executable.execute(chicken); try { chicken2 = executable.get(); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } else { loadOfflineChickens(); offlineChickens.add(chicken); saveOfflineChickens(); chicken2 = chicken; } return chicken2; } /** * Updates the given existing chicken in the Elasticsearch database. * * @param chicken the chicken to update in the database * @return the updated chicken */ public Chicken updateChickenInDatabase(Chicken chicken) { Chicken chicken2 = null; if (checkOnline()) { AsyncTask<Chicken, Void, Chicken> executable = new ElasticSearchBackend.UpdateChickenTask(); executable.execute(chicken); try { chicken2 = executable.get(); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } return chicken2; } /** * Retrieves the chicken with the given ID from the Elasticsearch database. * ID is a unique identifier which allows chickens to be saved and * retrieved. * * @param id the id of the chicken to find * @return the matching chicken */ public Chicken getChickenFromDatabase(String id) { Chicken chicken = null; if (checkOnline()) { ElasticSearchBackend.GetChickenByIdTask getChickenTask = new ElasticSearchBackend.GetChickenByIdTask(); getChickenTask.execute(id); try { chicken = getChickenTask.get(); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } return chicken; } /** * Deletes the chicken with the given ID from the Elasticsearch database. * ID is a unique identifier which allows chickens to be saved and * retrieved. * * @param id the id of the chicken to delete */ public void removeChickenFromDatabase(String id) { if (checkOnline()) { ElasticSearchBackend.DeleteChickenTask deleteChickenTask = new ElasticSearchBackend.DeleteChickenTask(); deleteChickenTask.execute(id); } } /* Users */ /** * Saves the given user as a new user in the Elasticsearch database. * * @param user the user to add to the database */ public void addUserToDatabase(User user) { saveUserOffline(user); if (checkOnline()) { AsyncTask<User, Void, Void> executable = new ElasticSearchBackend.AddUserTask(); executable.execute(user); } } /** * Updates the given existing user in the Elasticsearch database. * * @param user the user to update in the database */ public void updateUserInDatabase(User user) { saveUserOffline(user); if (checkOnline()) { AsyncTask<User, Void, Void> executable = new ElasticSearchBackend.AddUserTask(); executable.execute(user); } } /** * Retrieves the user with the given username from the Elasticsearch * database. Username is a unique identifier of users. * * @param username the username of the user to find * @return the matching user */ public User getUserFromDatabase(String username) { User user = null; if (checkOnline()) { ElasticSearchBackend.GetUserByUsernameTask getUserTask = new ElasticSearchBackend.GetUserByUsernameTask(); getUserTask.execute(username); try { user = getUserTask.get(); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } else { user = loadUserOffline(username); } return user; } /** * Deletes the user with the given username from the Elasticsearch * database. Username is a unique identifier of users. * * @param username the username of the user to delete */ public void removeUserFromDatabase(String username) { if (checkOnline()) { ElasticSearchBackend.DeleteUserTask deleteUserTask = new ElasticSearchBackend.DeleteUserTask(); deleteUserTask.execute(username); } } /** * Updates the given existing user's username in the Elasticsearch database. * * @param user the user to update in the database * @param oldUsername the user's previous username, currently saved */ public void changeUsernameInDatabase(User user, String oldUsername) { AsyncTask<User, Void, Void> executable = new ElasticSearchBackend.AddUserTask(); executable.execute(user); ElasticSearchBackend.DeleteUserTask deleteUserTask = new ElasticSearchBackend.DeleteUserTask(); deleteUserTask.execute(oldUsername); } /* Notifications */ /** * Saves the given notification in the Elasticsearch database. * * @param notification the notification to save */ public Notification addNotificationToDatabase(Notification notification) { Notification notification2 = null; if (checkOnline()) { AsyncTask<Notification, Void, Notification> executable = new ElasticSearchBackend.AddNotificationTask(); executable.execute(notification); try { notification2 = executable.get(); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } return notification2; } /** * Updates the given existing notification in the Elasticsearch database. * * @param notification the notification to update in the database * @return the updated notification */ public Notification updateNotificationInDatabase(Notification notification) { Notification notification2 = null; if (checkOnline()) { AsyncTask<Notification, Void, Notification> executable = new ElasticSearchBackend.UpdateNotificationTask(); executable.execute(notification); try { notification2 = executable.get(); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } return notification2; } /** * Retrieves the notification with the given ID from the Elasticsearch * database. ID is a unique identifier which allows notifications to be * saved and retrieved. * * @param id the id of the notification to find * @return the matching notification */ public Notification getNotificationFromDatabase(String id) { Notification notification = null; if (checkOnline()) { ElasticSearchBackend.GetNotificationByIdTask getNotificationTask = new ElasticSearchBackend.GetNotificationByIdTask(); getNotificationTask.execute(id); try { notification = getNotificationTask.get(); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } return notification; } /** * Deletes the notification with the given ID from the Elasticsearch * database. ID is a unique identifier which allows notifications to be * saved and retrieved. * * @param id the id of the notification to delete */ public void removeNotificationFromDatabase(String id) { if (checkOnline()) { ElasticSearchBackend.DeleteNotificationTask deleteNotificationTask = new ElasticSearchBackend.DeleteNotificationTask(); deleteNotificationTask.execute(id); } } /* Bids */ /** * Saves the given bid in the Elasticsearch database. * * @param bid the bid to save */ public Bid addBidToDatabase(Bid bid) { Bid bid2 = null; if (checkOnline()) { AsyncTask<Bid, Void, Bid> executable = new ElasticSearchBackend.AddBidTask(); executable.execute(bid); try { bid2 = executable.get(); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } return bid2; } /** * Updates the given existing bid in the Elasticsearch database. * * @param bid the bid to update in the database * @return the updated bid */ public Bid updateBidInDatabase(Bid bid) { Bid bid2 = null; if (checkOnline()) { AsyncTask<Bid, Void, Bid> executable = new ElasticSearchBackend.UpdateBidTask(); executable.execute(bid); try { bid2 = executable.get(); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } return bid2; } /** * Retrieves the bid with the given ID from the Elasticsearch database. * ID is a unique identifier which allows bids to be saved and * retrieved. * * @param id the id of the bid to find * @return the matching bid */ public Bid getBidFromDatabase(String id) { Bid bid = null; if (checkOnline()) { ElasticSearchBackend.GetBidByIdTask getBidTask = new ElasticSearchBackend.GetBidByIdTask(); getBidTask.execute(id); try { bid = getBidTask.get(); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } return bid; } /** * Deletes the bid with the given ID from the Elasticsearch database. ID * is a unique identifier which allows bids to be saved and retrieved. * * @param id the id of the bid to delete */ public void removeBidFromDatabase(String id) { if (checkOnline()) { ElasticSearchBackend.DeleteBidTask deleteBidTask = new ElasticSearchBackend.DeleteBidTask(); deleteBidTask.execute(id); } } /* Location */ /** * Saves the given location in the Elasticsearch database. * * @param location the location to save */ public Location addLocationToDatabase(Location location) { Location location1 = null; if (checkOnline()) { AsyncTask<Location, Void, Location> executable = new ElasticSearchBackend.AddLocationTask(); executable.execute(location); try { location1 = executable.get(); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } return location1; } /** * Updates the given existing location in the Elasticsearch database. * * @param location the location to update in the database * @return the updated location */ public Location updateLocationInDatabase(Location location) { Location location1 = null; if (checkOnline()) { AsyncTask<Location, Void, Location> executable = new ElasticSearchBackend.UpdateLocationTask(); executable.execute(location); try { location1 = executable.get(); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } return location1; } /** * Retrieves the location with the given ID from the Elasticsearch * database. ID is a unique identifier which allows locations to be saved * and retrieved. * * @param id the id of the location to find * @return the matching location */ public Location getLocationFromDatabase(String id) { Location location = null; if (checkOnline()) { ElasticSearchBackend.GetLocationByIdTask getLocationTask = new ElasticSearchBackend.GetLocationByIdTask(); getLocationTask.execute(id); try { location = getLocationTask.get(); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } return location; } /** * Deletes the location with the given ID from the Elasticsearch database. * ID is a unique identifier which allows locations to be saved and retrieved. * * @param id the id of the location to delete */ public void removeLocationFromDatabase(String id) { if (checkOnline()) { ElasticSearchBackend.DeleteLocationTask deleteLocationTask = new ElasticSearchBackend.DeleteLocationTask(); deleteLocationTask.execute(id); } } /* Letter */ /** * Saves the given letter in the Elasticsearch database. * * @param letter the letter to save */ public Letter addLetterToDatabase(Letter letter) { Letter letter2 = null; if (checkOnline()) { AsyncTask<Letter, Void, Letter> executable = new ElasticSearchBackend.AddLetterTask(); executable.execute(letter); try { letter2 = executable.get(); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } return letter2; } /** * Updates the given existing letter in the Elasticsearch database. * * @param letter the letter to update in the database * @return the updated letter */ public Letter updateLetterInDatabase(Letter letter) { Letter letter1 = null; if (checkOnline()) { AsyncTask<Letter, Void, Letter> executable = new ElasticSearchBackend.UpdateLetterTask(); executable.execute(letter); try { letter1 = executable.get(); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } return letter1; } /** * Retrieves the letter with the given ID from the Elasticsearch database. * ID is a unique identifier which allows letters to be saved and * retrieved. * * @param id the id of the letter to find * @return the matching letter */ public Letter getLetterFromDatabase(String id) { Letter letter = null; if (checkOnline()) { ElasticSearchBackend.GetLetterByIdTask getLetterTask = new ElasticSearchBackend.GetLetterByIdTask(); getLetterTask.execute(id); try { letter = getLetterTask.get(); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } return letter; } /** * Deletes the letter with the given ID from the Elasticsearch database. * ID is a unique identifier which allows letters to be saved and * retrieved. * * @param id the id of the letter to delete */ public void removeLetterFromDatabase(String id) { if (checkOnline()) { ElasticSearchBackend.DeleteLetterTask deleteLetterTask = new ElasticSearchBackend.DeleteLetterTask(); deleteLetterTask.execute(id); } } /** * Retrieves the letters of the given user from the Elasticsearch database. * * @param user the user whose letters are retrieved * @return the list of all letters of that user */ public ArrayList<Letter> getLettersForUser(User user) { ArrayList<Letter> letters = new ArrayList<>(); if (checkOnline()) { ElasticSearchBackend.GetLettersForUserTask getLettersTask = new ElasticSearchBackend.GetLettersForUserTask(); getLettersTask.execute(user); try { letters = getLettersTask.get(); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } return letters; } /** * Checks if a device is connected to the internet. * * @return true if connected, false otherwise */ public boolean checkOnline() { // Taken from http://stackoverflow.com/questions/9570237/android-check-internet-connection // Answer by Seshu Vinay // Accessed by athompson0 on March 29 2016 ConnectivityManager cm = (ConnectivityManager) ChickBidsApplication.getApp().getSystemService(Context.CONNECTIVITY_SERVICE); return cm.getActiveNetworkInfo() != null; } }
/* * 2012-3 Red Hat Inc. and/or its affiliates and other contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.overlord.rtgov.analytics.util; import static org.junit.Assert.*; import org.junit.Test; import org.overlord.rtgov.activity.model.ActivityUnit; import org.overlord.rtgov.activity.model.Context; import org.overlord.rtgov.activity.model.soa.RequestReceived; import org.overlord.rtgov.activity.model.soa.RequestSent; import org.overlord.rtgov.activity.model.soa.ResponseReceived; import org.overlord.rtgov.activity.model.soa.ResponseSent; import org.overlord.rtgov.analytics.service.InterfaceDefinition; import org.overlord.rtgov.analytics.service.InvocationDefinition; import org.overlord.rtgov.analytics.service.InvocationMetric; import org.overlord.rtgov.analytics.service.OperationDefinition; import org.overlord.rtgov.analytics.service.RequestFaultDefinition; import org.overlord.rtgov.analytics.service.RequestResponseDefinition; import org.overlord.rtgov.analytics.service.ServiceDefinition; import org.overlord.rtgov.analytics.util.ServiceDefinitionUtil; public class ServiceDefinitionUtilTest { private static final String FAULT_1 = "fault1"; private static final String OPERATION_1 = "op1"; private static final String OPERATION_2 = "op2"; private static final String INTERFACE_1="intf1"; private static final String INTERFACE_2="intf2"; private static final String SERVICE_TYPE_1="st1"; private static final String SERVICE_TYPE_2="st2"; @Test public void testSerializeServiceDefiniton() { ServiceDefinition st1=new ServiceDefinition(); st1.setServiceType(SERVICE_TYPE_1); InterfaceDefinition intf1=new InterfaceDefinition(); intf1.setInterface(INTERFACE_1); st1.getInterfaces().add(intf1); OperationDefinition op1=new OperationDefinition(); intf1.getOperations().add(op1); op1.setName(OPERATION_1); RequestResponseDefinition nrd1=new RequestResponseDefinition(); nrd1.getMetrics().setCount(10); nrd1.getMetrics().setAverage(1000); nrd1.getMetrics().setMin(500); nrd1.getMetrics().setMax(1500); nrd1.getMetrics().setCountChange(+5); nrd1.getMetrics().setAverageChange(+2); nrd1.getMetrics().setMinChange(-5); nrd1.getMetrics().setMaxChange(+20); op1.setRequestResponse(nrd1); InvocationDefinition id1=new InvocationDefinition(); id1.setInterface(INTERFACE_2); id1.setOperation(OPERATION_2); id1.getMetrics().setCount(10); id1.getMetrics().setAverage(500); id1.getMetrics().setMin(250); id1.getMetrics().setMax(750); nrd1.getInvocations().add(id1); RequestFaultDefinition frd1=new RequestFaultDefinition(); frd1.setFault("fault1"); frd1.getMetrics().setCount(20); frd1.getMetrics().setFaults(20); frd1.getMetrics().setAverage(2000); frd1.getMetrics().setMin(1500); frd1.getMetrics().setMax(2500); frd1.getMetrics().setCountChange(-10); frd1.getMetrics().setAverageChange(+6); frd1.getMetrics().setMinChange(0); frd1.getMetrics().setMaxChange(+10); op1.getRequestFaults().add(frd1); try { byte[] b = ServiceDefinitionUtil.serializeServiceDefinition(st1); System.out.println("SERVICE DEFINITION: "+new String(b)); } catch (Exception e) { e.printStackTrace(); fail("Failed to serialize"); } } @Test public void testDeserializeServiceDefiniton() { ServiceDefinition st1=new ServiceDefinition(); st1.setServiceType(SERVICE_TYPE_1); InterfaceDefinition intf1=new InterfaceDefinition(); intf1.setInterface(INTERFACE_1); st1.getInterfaces().add(intf1); OperationDefinition op1=new OperationDefinition(); intf1.getOperations().add(op1); op1.setName(OPERATION_1); RequestResponseDefinition nrd1=new RequestResponseDefinition(); nrd1.getMetrics().setCount(10); nrd1.getMetrics().setFaults(0); nrd1.getMetrics().setAverage(1000); nrd1.getMetrics().setMin(500); nrd1.getMetrics().setMax(1500); nrd1.getMetrics().setCountChange(+5); nrd1.getMetrics().setAverageChange(+2); nrd1.getMetrics().setMinChange(-5); nrd1.getMetrics().setMaxChange(+20); op1.setRequestResponse(nrd1); RequestFaultDefinition frd1=new RequestFaultDefinition(); frd1.setFault("fault1"); frd1.getMetrics().setCount(20); frd1.getMetrics().setFaults(20); frd1.getMetrics().setAverage(2000); frd1.getMetrics().setMin(1500); frd1.getMetrics().setMax(2500); frd1.getMetrics().setCountChange(-10); frd1.getMetrics().setAverageChange(+6); frd1.getMetrics().setMinChange(0); frd1.getMetrics().setMaxChange(+10); op1.getRequestFaults().add(frd1); byte[] b=null; try { b = ServiceDefinitionUtil.serializeServiceDefinition(st1); } catch (Exception e) { e.printStackTrace(); fail("Failed to serialize"); } ServiceDefinition result=null; try { result = ServiceDefinitionUtil.deserializeServiceDefinition(b); } catch (Exception e) { e.printStackTrace(); fail("Failed to serialize"); } if (result == null) { fail("Failed to deserialize service definition"); } if (!result.getServiceType().equals(st1.getServiceType())) { fail("Service type mismatch"); } if (result.getInterfaces().size() != 1) { fail("Expecting 1 interface: "+result.getInterfaces().size()); } InterfaceDefinition idresult=result.getInterfaces().get(0); if (!idresult.getInterface().equals(intf1.getInterface())) { fail("Operation mismatch"); } if (idresult.getOperations().size() != 1) { fail("Expecting 1 operation: "+idresult.getOperations().size()); } OperationDefinition opresult=idresult.getOperations().get(0); if (!opresult.getName().equals(op1.getName())) { fail("Operation mismatch"); } } @Test public void testSingleServiceInvokedNormal() { // Create example activity events ActivityUnit au=new ActivityUnit(); RequestReceived rqr1=new RequestReceived(); rqr1.setServiceType(SERVICE_TYPE_1); rqr1.setInterface(INTERFACE_1); rqr1.setOperation(OPERATION_1); rqr1.setMessageId("1"); rqr1.setTimestamp(10); au.getActivityTypes().add(rqr1); ResponseSent rps1=new ResponseSent(); rps1.setServiceType(SERVICE_TYPE_1); rps1.setInterface(INTERFACE_1); rps1.setOperation(OPERATION_1); rps1.setMessageId("2"); rps1.setReplyToId("1"); rps1.setTimestamp(20); au.getActivityTypes().add(rps1); // Create service definition java.util.Collection<ServiceDefinition> sdefs= ServiceDefinitionUtil.derive(au); if (sdefs.size() != 1) { fail("One definition expected: "+sdefs.size()); } ServiceDefinition sdef=sdefs.iterator().next(); if (!sdef.getServiceType().equals(SERVICE_TYPE_1)) { fail("Service type incorrect"); } if (sdef.getInterfaces().size() != 1) { fail("Expecting 1 interface: "+sdef.getInterfaces().size()); } InterfaceDefinition idef=sdef.getInterfaces().get(0); if (idef.getOperations().size() != 1) { fail("Only 1 operation expected: "+idef.getOperations().size()); } OperationDefinition op=idef.getOperation(OPERATION_1); if (op == null) { fail("Failed to retrieve op"); } if (op.getRequestResponse() == null) { fail("Request/response not found"); } if (op.getRequestFaults().size() > 0) { fail("No faults should have occurred"); } if (op.getRequestResponse().getInvocations().size() > 0) { fail("No external invocations expected"); } InvocationMetric metrics=op.getRequestResponse().getMetrics(); if (metrics.getAverage() != 10) { fail("Average not 10: "+metrics.getAverage()); } if (metrics.getMin() != 10) { fail("Min not 10: "+metrics.getMin()); } if (metrics.getMax() != 10) { fail("Max not 10: "+metrics.getMax()); } if (metrics.getCount() != 1) { fail("Count not 1: "+metrics.getCount()); } } @Test public void testSingleServiceInvokedNormalWithContext() { // Create example activity events ActivityUnit au=new ActivityUnit(); RequestReceived rqr1=new RequestReceived(); rqr1.setUnitId("unit1"); rqr1.setUnitIndex(1); rqr1.setServiceType(SERVICE_TYPE_1); rqr1.setInterface(INTERFACE_1); rqr1.setOperation(OPERATION_1); rqr1.setMessageId("1"); rqr1.setTimestamp(10); Context c1=new Context(Context.Type.Conversation, "c1"); rqr1.getContext().add(c1); au.getActivityTypes().add(rqr1); ResponseSent rps1=new ResponseSent(); rps1.setUnitId("unit1"); rps1.setUnitIndex(2); rps1.setServiceType(SERVICE_TYPE_1); rps1.setInterface(INTERFACE_1); rps1.setOperation(OPERATION_1); rps1.setMessageId("2"); rps1.setReplyToId("1"); rps1.setTimestamp(20); rps1.getContext().add(c1); Context c2=new Context(Context.Type.Conversation, "c2"); rps1.getContext().add(c2); au.getActivityTypes().add(rps1); // Create service definition java.util.Collection<ServiceDefinition> sdefs= ServiceDefinitionUtil.derive(au); if (sdefs.size() != 1) { fail("One definition expected: "+sdefs.size()); } ServiceDefinition sdef=sdefs.iterator().next(); // Check contexts - cardinality is same as combined activities, minus the 1 common context if (sdef.getContext().size() != (rqr1.getContext().size()+rps1.getContext().size()-1)) { fail("Unexpected number of contexts: "+sdef.getContext().size()); } if (!sdef.getContext().contains(c1)) { fail("Context does not contain c1"); } if (!sdef.getContext().contains(c2)) { fail("Context does not contain c2"); } InterfaceDefinition idef=sdef.getInterface(INTERFACE_1); if (idef == null) { fail("Failed to get interface defn"); } OperationDefinition op=idef.getOperation(OPERATION_1); RequestResponseDefinition rrd=op.getRequestResponse(); if (rrd.getRequestId() == null) { fail("Request id not set"); } if (rrd.getResponseId() == null) { fail("Response id not set"); } } @Test public void testDoubleServiceInvokedNormal() { // Create example activity events ActivityUnit au=new ActivityUnit(); RequestReceived rqr1=new RequestReceived(); rqr1.setServiceType(SERVICE_TYPE_1); rqr1.setOperation(OPERATION_1); rqr1.setInterface(INTERFACE_1); rqr1.setMessageId("1"); rqr1.setTimestamp(10); au.getActivityTypes().add(rqr1); ResponseSent rps1=new ResponseSent(); rps1.setServiceType(SERVICE_TYPE_1); rps1.setOperation(OPERATION_1); rps1.setInterface(INTERFACE_1); rps1.setMessageId("2"); rps1.setReplyToId("1"); rps1.setTimestamp(20); au.getActivityTypes().add(rps1); RequestReceived rqr2=new RequestReceived(); rqr2.setServiceType(SERVICE_TYPE_1); rqr2.setOperation(OPERATION_1); rqr2.setInterface(INTERFACE_1); rqr2.setMessageId("3"); rqr2.setTimestamp(30); au.getActivityTypes().add(rqr2); ResponseSent rps2=new ResponseSent(); rps2.setServiceType(SERVICE_TYPE_1); rps2.setOperation(OPERATION_1); rps2.setInterface(INTERFACE_1); rps2.setMessageId("4"); rps2.setReplyToId("3"); rps2.setTimestamp(50); au.getActivityTypes().add(rps2); // Create service definition java.util.Collection<ServiceDefinition> sdefs= ServiceDefinitionUtil.derive(au); if (sdefs.size() != 1) { fail("One definition expected: "+sdefs.size()); } ServiceDefinition sdef=sdefs.iterator().next(); if (!sdef.getServiceType().equals(SERVICE_TYPE_1)) { fail("Service type incorrect"); } InterfaceDefinition idef=sdef.getInterface(INTERFACE_1); if (idef == null) { fail("Interface defn not found"); } if (idef.getOperations().size() != 1) { fail("Only 1 operation expected: "+idef.getOperations().size()); } OperationDefinition op=idef.getOperation(OPERATION_1); if (op == null) { fail("Failed to retrieve op"); } if (op.getRequestResponse() == null) { fail("Request/response not found"); } if (op.getRequestFaults().size() > 0) { fail("No faults should have occurred"); } if (op.getRequestResponse().getInvocations().size() > 0) { fail("No external invocations expected"); } InvocationMetric metrics=op.getRequestResponse().getMetrics(); if (metrics.getAverage() != 15) { fail("Average not 15: "+metrics.getAverage()); } if (metrics.getMin() != 10) { fail("Min not 10: "+metrics.getMin()); } if (metrics.getMax() != 20) { fail("Max not 20: "+metrics.getMax()); } if (metrics.getCount() != 2) { fail("Count not 2: "+metrics.getCount()); } } @Test public void testSingleServiceInvokedFault() { // Create example activity events ActivityUnit au=new ActivityUnit(); RequestReceived rqr1=new RequestReceived(); rqr1.setServiceType(SERVICE_TYPE_1); rqr1.setOperation(OPERATION_1); rqr1.setInterface(INTERFACE_1); rqr1.setMessageId("1"); rqr1.setTimestamp(10); au.getActivityTypes().add(rqr1); ResponseSent rps1=new ResponseSent(); rps1.setServiceType(SERVICE_TYPE_1); rps1.setOperation(OPERATION_1); rps1.setInterface(INTERFACE_1); rps1.setFault(FAULT_1); rps1.setMessageId("2"); rps1.setReplyToId("1"); rps1.setTimestamp(20); au.getActivityTypes().add(rps1); // Create service definition java.util.Collection<ServiceDefinition> sdefs= ServiceDefinitionUtil.derive(au); if (sdefs.size() != 1) { fail("One definition expected: "+sdefs.size()); } ServiceDefinition sdef=sdefs.iterator().next(); if (!sdef.getServiceType().equals(SERVICE_TYPE_1)) { fail("Service type incorrect"); } InterfaceDefinition idef=sdef.getInterface(INTERFACE_1); if (idef == null) { fail("Failed to get interface defn"); } if (idef.getOperations().size() != 1) { fail("Only 1 operation expected: "+idef.getOperations().size()); } OperationDefinition op=idef.getOperation(OPERATION_1); if (op == null) { fail("Failed to retrieve op"); } if (op.getRequestResponse() != null) { fail("Request/response should be null"); } if (op.getRequestFaults().size() != 1) { fail("One fault should have occurred"); } RequestFaultDefinition rfd=op.getRequestFault(FAULT_1); if (rfd == null) { fail("Failed to retrieve fault"); } if (rfd.getInvocations().size() > 0) { fail("No external invocations expected"); } InvocationMetric metrics=rfd.getMetrics(); if (metrics.getAverage() != 10) { fail("Average not 10: "+metrics.getAverage()); } if (metrics.getMin() != 10) { fail("Min not 10: "+metrics.getMin()); } if (metrics.getMax() != 10) { fail("Max not 10: "+metrics.getMax()); } if (metrics.getCount() != 1) { fail("Count not 1: "+metrics.getCount()); } if (metrics.getFaults() != 1) { fail("Faults not 1: "+metrics.getFaults()); } } @Test public void testServiceInvokedWithExternalInvocations() { // Create example activity events ActivityUnit au=new ActivityUnit(); RequestReceived rqr1=new RequestReceived(); rqr1.setServiceType(SERVICE_TYPE_1); rqr1.setInterface(INTERFACE_1); rqr1.setOperation(OPERATION_1); rqr1.setMessageId("1"); rqr1.setTimestamp(10); au.getActivityTypes().add(rqr1); RequestSent rqs2=new RequestSent(); rqs2.setServiceType(SERVICE_TYPE_2); rqs2.setInterface(INTERFACE_2); rqs2.setOperation(OPERATION_2); rqs2.setMessageId("2"); rqs2.setTimestamp(15); au.getActivityTypes().add(rqs2); ResponseReceived rpr2=new ResponseReceived(); rpr2.setServiceType(SERVICE_TYPE_2); rpr2.setInterface(INTERFACE_2); rpr2.setOperation(OPERATION_2); rpr2.setMessageId("3"); rpr2.setReplyToId("2"); rpr2.setTimestamp(21); au.getActivityTypes().add(rpr2); RequestSent rqs3=new RequestSent(); rqs3.setServiceType(SERVICE_TYPE_2); rqs3.setInterface(INTERFACE_2); rqs3.setOperation(OPERATION_2); rqs3.setMessageId("4"); rqs3.setTimestamp(24); au.getActivityTypes().add(rqs3); ResponseReceived rpr3=new ResponseReceived(); rpr3.setServiceType(SERVICE_TYPE_2); rpr3.setInterface(INTERFACE_2); rpr3.setOperation(OPERATION_2); rpr3.setMessageId("5"); rpr3.setReplyToId("4"); rpr3.setTimestamp(36); au.getActivityTypes().add(rpr3); ResponseSent rps1=new ResponseSent(); rps1.setServiceType(SERVICE_TYPE_1); rps1.setInterface(INTERFACE_1); rps1.setOperation(OPERATION_1); rps1.setMessageId("6"); rps1.setReplyToId("1"); rps1.setTimestamp(40); au.getActivityTypes().add(rps1); // Create service definition java.util.Collection<ServiceDefinition> sdefs= ServiceDefinitionUtil.derive(au); if (sdefs.size() != 1) { fail("One definition expected: "+sdefs.size()); } ServiceDefinition sdef=sdefs.iterator().next(); if (!sdef.getServiceType().equals(SERVICE_TYPE_1)) { fail("Service type incorrect"); } InterfaceDefinition idef=sdef.getInterface(INTERFACE_1); if (idef == null) { fail("Failed to get interface defn"); } if (idef.getOperations().size() != 1) { fail("Only 1 operation expected: "+idef.getOperations().size()); } OperationDefinition op=idef.getOperation(OPERATION_1); if (op == null) { fail("Failed to retrieve op"); } if (op.getRequestResponse() == null) { fail("Request/response not found"); } if (op.getRequestFaults().size() > 0) { fail("No faults should have occurred"); } if (op.getRequestResponse().getInvocations().size() != 1) { fail("One external invocations expected"); } InvocationDefinition id=op.getRequestResponse().getInvocation(INTERFACE_2, OPERATION_2, null); if (id == null) { fail("Failed to get invocation definition"); } InvocationMetric metrics=id.getMetrics(); if (metrics.getAverage() != 9) { fail("Average not 9: "+metrics.getAverage()); } if (metrics.getMin() != 6) { fail("Min not 6: "+metrics.getMin()); } if (metrics.getMax() != 12) { fail("Max not 12: "+metrics.getMax()); } if (metrics.getCount() != 2) { fail("Count not 2: "+metrics.getCount()); } if (metrics.getFaults() != 0) { fail("Faults not 0: "+metrics.getFaults()); } } @Test public void testServiceInvokedWithExternalFaultInvocations() { // Create example activity events ActivityUnit au=new ActivityUnit(); RequestReceived rqr1=new RequestReceived(); rqr1.setServiceType(SERVICE_TYPE_1); rqr1.setInterface(INTERFACE_1); rqr1.setOperation(OPERATION_1); rqr1.setMessageId("1"); rqr1.setTimestamp(10); au.getActivityTypes().add(rqr1); RequestSent rqs2=new RequestSent(); rqs2.setServiceType(SERVICE_TYPE_2); rqs2.setInterface(INTERFACE_2); rqs2.setOperation(OPERATION_2); rqs2.setFault(FAULT_1); rqs2.setMessageId("2"); rqs2.setTimestamp(15); au.getActivityTypes().add(rqs2); ResponseReceived rpr2=new ResponseReceived(); rpr2.setServiceType(SERVICE_TYPE_2); rpr2.setInterface(INTERFACE_2); rpr2.setOperation(OPERATION_2); rpr2.setFault(FAULT_1); rpr2.setMessageId("3"); rpr2.setReplyToId("2"); rpr2.setTimestamp(21); au.getActivityTypes().add(rpr2); RequestSent rqs3=new RequestSent(); rqs3.setServiceType(SERVICE_TYPE_2); rqs3.setInterface(INTERFACE_2); rqs3.setOperation(OPERATION_2); rqs3.setFault(FAULT_1); rqs3.setMessageId("4"); rqs3.setTimestamp(24); au.getActivityTypes().add(rqs3); ResponseReceived rpr3=new ResponseReceived(); rpr3.setServiceType(SERVICE_TYPE_2); rpr3.setInterface(INTERFACE_2); rpr3.setOperation(OPERATION_2); rpr3.setFault(FAULT_1); rpr3.setMessageId("5"); rpr3.setReplyToId("4"); rpr3.setTimestamp(36); au.getActivityTypes().add(rpr3); ResponseSent rps1=new ResponseSent(); rps1.setServiceType(SERVICE_TYPE_1); rps1.setInterface(INTERFACE_1); rps1.setOperation(OPERATION_1); rps1.setMessageId("6"); rps1.setReplyToId("1"); rps1.setTimestamp(40); au.getActivityTypes().add(rps1); // Create service definition java.util.Collection<ServiceDefinition> sdefs= ServiceDefinitionUtil.derive(au); if (sdefs.size() != 1) { fail("One definition expected: "+sdefs.size()); } ServiceDefinition sdef=sdefs.iterator().next(); if (!sdef.getServiceType().equals(SERVICE_TYPE_1)) { fail("Service type incorrect"); } InterfaceDefinition idef=sdef.getInterface(INTERFACE_1); if (idef == null) { fail("Failed to get interface defn"); } if (idef.getOperations().size() != 1) { fail("Only 1 operation expected: "+idef.getOperations().size()); } OperationDefinition op=idef.getOperation(OPERATION_1); if (op == null) { fail("Failed to retrieve op"); } if (op.getRequestResponse() == null) { fail("Request/response not found"); } if (op.getRequestFaults().size() > 0) { fail("No faults should have occurred"); } if (op.getRequestResponse().getInvocations().size() != 1) { fail("One external invocations expected"); } InvocationDefinition id=op.getRequestResponse().getInvocation(INTERFACE_2, OPERATION_2, FAULT_1); if (id == null) { fail("Failed to get invocation definition"); } InvocationMetric metrics=id.getMetrics(); if (metrics.getAverage() != 9) { fail("Average not 9: "+metrics.getAverage()); } if (metrics.getMin() != 6) { fail("Min not 6: "+metrics.getMin()); } if (metrics.getMax() != 12) { fail("Max not 12: "+metrics.getMax()); } if (metrics.getCount() != 2) { fail("Count not 2: "+metrics.getCount()); } if (metrics.getFaults() != 2) { fail("Faults not 2: "+metrics.getFaults()); } } @Test public void testServiceInvokedWithExternalInvocationsAndOtherService() { // Create example activity events ActivityUnit au=new ActivityUnit(); RequestReceived rqr1=new RequestReceived(); rqr1.setServiceType(SERVICE_TYPE_1); rqr1.setInterface(INTERFACE_1); rqr1.setOperation(OPERATION_1); rqr1.setMessageId("1"); rqr1.setTimestamp(10); au.getActivityTypes().add(rqr1); RequestSent rqs2=new RequestSent(); rqs2.setServiceType(SERVICE_TYPE_2); rqs2.setInterface(INTERFACE_2); rqs2.setOperation(OPERATION_2); rqs2.setMessageId("2"); rqs2.setTimestamp(15); au.getActivityTypes().add(rqs2); RequestReceived rqr3=new RequestReceived(); rqr3.setServiceType(SERVICE_TYPE_2); rqr3.setInterface(INTERFACE_2); rqr3.setOperation(OPERATION_2); rqr3.setMessageId("3"); rqr3.setTimestamp(21); au.getActivityTypes().add(rqr3); ResponseSent rps3=new ResponseSent(); rps3.setServiceType(SERVICE_TYPE_2); rps3.setInterface(INTERFACE_2); rps3.setOperation(OPERATION_2); rps3.setMessageId("4"); rps3.setReplyToId("3"); rps3.setTimestamp(24); au.getActivityTypes().add(rps3); ResponseReceived rpr2=new ResponseReceived(); rpr2.setServiceType(SERVICE_TYPE_2); rpr2.setInterface(INTERFACE_2); rpr2.setOperation(OPERATION_2); rpr2.setMessageId("5"); rpr2.setReplyToId("2"); rpr2.setTimestamp(36); au.getActivityTypes().add(rpr2); ResponseSent rps1=new ResponseSent(); rps1.setServiceType(SERVICE_TYPE_1); rps1.setInterface(INTERFACE_1); rps1.setOperation(OPERATION_1); rps1.setMessageId("6"); rps1.setReplyToId("1"); rps1.setTimestamp(40); au.getActivityTypes().add(rps1); // Create service definition java.util.Collection<ServiceDefinition> sdefs= ServiceDefinitionUtil.derive(au); if (sdefs.size() != 2) { fail("Two definition expected: "+sdefs.size()); } ServiceDefinition sdef1=null; ServiceDefinition sdef2=null; for (ServiceDefinition sd : sdefs) { if (sd.getServiceType().equals(SERVICE_TYPE_1)) { sdef1 = sd; } else if (sd.getServiceType().equals(SERVICE_TYPE_2)) { sdef2 = sd; } } if (sdef1 == null) { fail("Service type 1 definition not found"); } if (sdef2 == null) { fail("Service type 2 definition not found"); } if (!sdef1.getServiceType().equals(SERVICE_TYPE_1)) { fail("Interface 1 incorrect"); } if (!sdef2.getServiceType().equals(SERVICE_TYPE_2)) { fail("Interface 2 incorrect"); } if (sdef1.getInterfaces().size() != 1) { fail("Only 1 interface expected for def 1: "+sdef1.getInterfaces().size()); } if (sdef2.getInterfaces().size() != 1) { fail("Only 1 interface expected for def 2: "+sdef2.getInterfaces().size()); } InterfaceDefinition idef1=sdef1.getInterfaces().get(0); InterfaceDefinition idef2=sdef2.getInterfaces().get(0); if (idef1.getOperations().size() != 1) { fail("Only 1 operation expected for def 1: "+idef1.getOperations().size()); } if (idef2.getOperations().size() != 1) { fail("Only 1 operation expected for def 2: "+idef2.getOperations().size()); } OperationDefinition op1=idef1.getOperation(OPERATION_1); if (op1 == null) { fail("Failed to retrieve op"); } if (op1.getRequestResponse() == null) { fail("Request/response not found"); } if (op1.getRequestFaults().size() > 0) { fail("No faults should have occurred"); } if (op1.getRequestResponse().getInvocations().size() != 1) { fail("One external invocations expected"); } InvocationDefinition id1=op1.getRequestResponse().getInvocation(INTERFACE_2, SERVICE_TYPE_2, OPERATION_2, null); if (id1 == null) { fail("Failed to get invocation definition 1"); } InvocationMetric metrics1=id1.getMetrics(); if (metrics1.getAverage() != 21) { fail("Average not 21: "+metrics1.getAverage()); } if (metrics1.getMin() != 21) { fail("Min not 21: "+metrics1.getMin()); } if (metrics1.getMax() != 21) { fail("Max not 21: "+metrics1.getMax()); } if (metrics1.getCount() != 1) { fail("Count not 1: "+metrics1.getCount()); } // Check external invoked operation details OperationDefinition op2=idef2.getOperation(OPERATION_2); if (op2 == null) { fail("Failed to retrieve op 2"); } if (op2.getRequestResponse() == null) { fail("Request/response not found"); } if (op2.getRequestFaults().size() > 0) { fail("No faults should have occurred"); } if (op2.getRequestResponse().getInvocations().size() != 0) { fail("No external invocations expected"); } InvocationMetric metrics2=op2.getRequestResponse().getMetrics(); if (metrics2.getAverage() != 3) { fail("Average not 3: "+metrics2.getAverage()); } if (metrics2.getMin() != 3) { fail("Min not 3: "+metrics2.getMin()); } if (metrics2.getMax() != 3) { fail("Max not 3: "+metrics2.getMax()); } if (metrics2.getCount() != 1) { fail("Count not 1: "+metrics2.getCount()); } } @Test public void testMergeSnapshots() { ServiceDefinition st1=new ServiceDefinition(); st1.setServiceType(SERVICE_TYPE_1); st1.getContext().add(new Context(Context.Type.Conversation, "c1")); InterfaceDefinition idef1=new InterfaceDefinition(); idef1.setInterface(INTERFACE_1); st1.getInterfaces().add(idef1); OperationDefinition op1=new OperationDefinition(); idef1.getOperations().add(op1); op1.setName(OPERATION_1); RequestResponseDefinition nrd1=new RequestResponseDefinition(); nrd1.getMetrics().setCount(10); nrd1.getMetrics().setFaults(0); nrd1.getMetrics().setAverage(1000); nrd1.getMetrics().setMin(500); nrd1.getMetrics().setMax(1500); nrd1.getMetrics().setCountChange(+5); nrd1.getMetrics().setAverageChange(+2); nrd1.getMetrics().setMinChange(-5); nrd1.getMetrics().setMaxChange(+20); op1.setRequestResponse(nrd1); RequestFaultDefinition frd1=new RequestFaultDefinition(); frd1.setFault("fault1"); frd1.getMetrics().setCount(20); frd1.getMetrics().setFaults(20); frd1.getMetrics().setAverage(2000); frd1.getMetrics().setMin(1500); frd1.getMetrics().setMax(2500); frd1.getMetrics().setCountChange(-10); frd1.getMetrics().setAverageChange(+6); frd1.getMetrics().setMinChange(0); frd1.getMetrics().setMaxChange(+10); op1.getRequestFaults().add(frd1); ServiceDefinition st2=new ServiceDefinition(); st2.setServiceType(SERVICE_TYPE_2); st2.getContext().add(new Context(Context.Type.Conversation, "c2")); InterfaceDefinition idef2=new InterfaceDefinition(); idef2.setInterface(INTERFACE_2); st2.getInterfaces().add(idef2); OperationDefinition op2=new OperationDefinition(); idef2.getOperations().add(op2); op2.setName(OPERATION_2); RequestResponseDefinition nrd2=new RequestResponseDefinition(); nrd2.getMetrics().setCount(10); nrd2.getMetrics().setFaults(0); nrd2.getMetrics().setAverage(1000); nrd2.getMetrics().setMin(500); nrd2.getMetrics().setMax(1500); nrd2.getMetrics().setCountChange(+5); nrd2.getMetrics().setAverageChange(+2); nrd2.getMetrics().setMinChange(-5); nrd2.getMetrics().setMaxChange(+20); op2.setRequestResponse(nrd1); RequestFaultDefinition frd2=new RequestFaultDefinition(); frd2.setFault("fault2"); frd2.getMetrics().setCount(20); frd2.getMetrics().setFaults(20); frd2.getMetrics().setAverage(2000); frd2.getMetrics().setMin(1500); frd2.getMetrics().setMax(2500); frd2.getMetrics().setCountChange(-10); frd2.getMetrics().setAverageChange(+6); frd2.getMetrics().setMinChange(0); frd2.getMetrics().setMaxChange(+10); op2.getRequestFaults().add(frd2); ServiceDefinition st3=new ServiceDefinition(); st3.setServiceType(SERVICE_TYPE_1); st3.getContext().add(new Context(Context.Type.Conversation, "c3")); InterfaceDefinition idef3=new InterfaceDefinition(); idef3.setInterface(INTERFACE_1); st3.getInterfaces().add(idef3); OperationDefinition op3=new OperationDefinition(); idef3.getOperations().add(op3); op3.setName(OPERATION_1); RequestResponseDefinition nrd3=new RequestResponseDefinition(); nrd3.getMetrics().setCount(5); nrd3.getMetrics().setFaults(0); nrd3.getMetrics().setAverage(500); nrd3.getMetrics().setMin(250); nrd3.getMetrics().setMax(750); nrd3.getMetrics().setCountChange(+2); nrd3.getMetrics().setAverageChange(+1); nrd3.getMetrics().setMinChange(-2); nrd3.getMetrics().setMaxChange(+10); op3.setRequestResponse(nrd3); RequestFaultDefinition frd3=new RequestFaultDefinition(); frd3.setFault("fault3"); frd3.getMetrics().setCount(20); frd3.getMetrics().setFaults(20); frd3.getMetrics().setAverage(2000); frd3.getMetrics().setMin(1500); frd3.getMetrics().setMax(2500); frd3.getMetrics().setCountChange(-10); frd3.getMetrics().setAverageChange(+6); frd3.getMetrics().setMinChange(0); frd3.getMetrics().setMaxChange(+10); op3.getRequestFaults().add(frd3); RequestFaultDefinition frd4=new RequestFaultDefinition(); frd4.setFault("fault1"); frd4.getMetrics().setCount(20); frd4.getMetrics().setFaults(20); frd4.getMetrics().setAverage(2000); frd4.getMetrics().setMin(1500); frd4.getMetrics().setMax(2500); frd4.getMetrics().setCountChange(-10); frd4.getMetrics().setAverageChange(+6); frd4.getMetrics().setMinChange(0); frd4.getMetrics().setMaxChange(+10); op3.getRequestFaults().add(frd4); java.util.Map<String,ServiceDefinition> sds1=new java.util.HashMap<String,ServiceDefinition>(); sds1.put(st1.getServiceType(), st1); sds1.put(st2.getServiceType(), st2); java.util.Map<String,ServiceDefinition> sds2=new java.util.HashMap<String,ServiceDefinition>(); sds2.put(st3.getServiceType(), st3); java.util.List<java.util.Map<String,ServiceDefinition>> list= new java.util.ArrayList<java.util.Map<String,ServiceDefinition>>(); list.add(sds1); list.add(sds2); java.util.Map<String,ServiceDefinition> merged=ServiceDefinitionUtil.mergeSnapshots(list, false); if (merged == null) { fail("No merged results"); } if (merged.size() != 2) { fail("Two service defintions expected"); } ServiceDefinition sd1=merged.get(SERVICE_TYPE_1); ServiceDefinition sd2=merged.get(SERVICE_TYPE_2); if (sd1 == null) { fail("SD1 is null"); } if (sd2 == null) { fail("SD2 is null"); } if (sd1.getContext().size() != 0) { fail("SD1 No context should be retained"); } if (sd2.getContext().size() != 0) { fail("SD2 No context should be retained"); } InterfaceDefinition idef1res=sd1.getInterface(INTERFACE_1); if (idef1res == null) { fail("Failed to get interface defn1"); } InterfaceDefinition idef2res=sd2.getInterface(INTERFACE_2); if (idef2res == null) { fail("Failed to get interface defn2"); } if (idef1res.getOperations().size() != 1) { fail("SD1 ops should be 1: "+idef1res.getOperations().size()); } if (idef2res.getOperations().size() != 1) { fail("SD2 ops should be 1: "+idef2res.getOperations().size()); } OperationDefinition opd1=idef1res.getOperations().get(0); OperationDefinition opd2=idef2res.getOperations().get(0); if (opd1.getRequestFaults().size() != 2) { fail("OP1 should have two faults: "+opd1.getRequestFaults().size()); } if (opd2.getRequestFaults().size() != 1) { fail("OP2 should have 1 fault: "+opd2.getRequestFaults().size()); } if (opd1.getRequestResponse().getMetrics().getCount() != 15) { fail("Expecting count 15: "+opd1.getRequestResponse().getMetrics().getCount()); } if (opd1.getRequestFaults().get(0).getMetrics().getFaults() != 40) { fail("Expecting faults 40: "+opd1.getRequestFaults().get(0).getMetrics().getFaults()); } } @Test public void testMergeSnapshotsWithContext() { ServiceDefinition st1=new ServiceDefinition(); st1.setServiceType(SERVICE_TYPE_1); st1.getContext().add(new Context(Context.Type.Conversation, "c1")); InterfaceDefinition idef1=new InterfaceDefinition(); idef1.setInterface(INTERFACE_1); OperationDefinition op1=new OperationDefinition(); idef1.getOperations().add(op1); op1.setName(OPERATION_1); RequestResponseDefinition nrd1=new RequestResponseDefinition(); nrd1.getMetrics().setCount(10); nrd1.getMetrics().setAverage(1000); nrd1.getMetrics().setMin(500); nrd1.getMetrics().setMax(1500); nrd1.getMetrics().setCountChange(+5); nrd1.getMetrics().setAverageChange(+2); nrd1.getMetrics().setMinChange(-5); nrd1.getMetrics().setMaxChange(+20); op1.setRequestResponse(nrd1); RequestFaultDefinition frd1=new RequestFaultDefinition(); frd1.setFault("fault1"); frd1.getMetrics().setCount(20); frd1.getMetrics().setAverage(2000); frd1.getMetrics().setMin(1500); frd1.getMetrics().setMax(2500); frd1.getMetrics().setCountChange(-10); frd1.getMetrics().setAverageChange(+6); frd1.getMetrics().setMinChange(0); frd1.getMetrics().setMaxChange(+10); op1.getRequestFaults().add(frd1); ServiceDefinition st2=new ServiceDefinition(); st2.setServiceType(SERVICE_TYPE_1); // Use same service type to support merge st2.getContext().add(new Context(Context.Type.Conversation, "c2")); InterfaceDefinition idef2=new InterfaceDefinition(); idef2.setInterface(INTERFACE_1); // Use same interface to support merge st2.getInterfaces().add(idef2); OperationDefinition op2=new OperationDefinition(); idef2.getOperations().add(op2); op2.setName(OPERATION_2); RequestResponseDefinition nrd2=new RequestResponseDefinition(); nrd2.getMetrics().setCount(10); nrd2.getMetrics().setAverage(1000); nrd2.getMetrics().setMin(500); nrd2.getMetrics().setMax(1500); nrd2.getMetrics().setCountChange(+5); nrd2.getMetrics().setAverageChange(+2); nrd2.getMetrics().setMinChange(-5); nrd2.getMetrics().setMaxChange(+20); op2.setRequestResponse(nrd1); RequestFaultDefinition frd2=new RequestFaultDefinition(); frd2.setFault("fault2"); frd2.getMetrics().setCount(20); frd2.getMetrics().setAverage(2000); frd2.getMetrics().setMin(1500); frd2.getMetrics().setMax(2500); frd2.getMetrics().setCountChange(-10); frd2.getMetrics().setAverageChange(+6); frd2.getMetrics().setMinChange(0); frd2.getMetrics().setMaxChange(+10); op2.getRequestFaults().add(frd2); ServiceDefinition st3=new ServiceDefinition(); st3.setServiceType(SERVICE_TYPE_1); // Use same service type for merge st3.getContext().add(new Context(Context.Type.Conversation, "c3")); InterfaceDefinition idef3=new InterfaceDefinition(); idef3.setInterface(INTERFACE_1); // Use same interface to support merge st3.getInterfaces().add(idef3); OperationDefinition op3=new OperationDefinition(); idef3.getOperations().add(op3); op3.setName(OPERATION_1); RequestResponseDefinition nrd3=new RequestResponseDefinition(); nrd3.getMetrics().setCount(5); nrd3.getMetrics().setAverage(500); nrd3.getMetrics().setMin(250); nrd3.getMetrics().setMax(750); nrd3.getMetrics().setCountChange(+2); nrd3.getMetrics().setAverageChange(+1); nrd3.getMetrics().setMinChange(-2); nrd3.getMetrics().setMaxChange(+10); op3.setRequestResponse(nrd3); RequestFaultDefinition frd3=new RequestFaultDefinition(); frd3.setFault("fault3"); frd3.getMetrics().setCount(20); frd3.getMetrics().setAverage(2000); frd3.getMetrics().setMin(1500); frd3.getMetrics().setMax(2500); frd3.getMetrics().setCountChange(-10); frd3.getMetrics().setAverageChange(+6); frd3.getMetrics().setMinChange(0); frd3.getMetrics().setMaxChange(+10); op3.getRequestFaults().add(frd3); java.util.Map<String,ServiceDefinition> sds1=new java.util.HashMap<String,ServiceDefinition>(); sds1.put(st1.getServiceType(), st1); java.util.Map<String,ServiceDefinition> sds2=new java.util.HashMap<String,ServiceDefinition>(); sds2.put(st2.getServiceType(), st2); java.util.Map<String,ServiceDefinition> sds3=new java.util.HashMap<String,ServiceDefinition>(); sds3.put(st3.getServiceType(), st3); java.util.List<java.util.Map<String,ServiceDefinition>> list= new java.util.ArrayList<java.util.Map<String,ServiceDefinition>>(); list.add(sds1); list.add(sds2); list.add(sds3); java.util.Map<String,ServiceDefinition> merged=ServiceDefinitionUtil.mergeSnapshots(list, true); if (merged == null) { fail("No merged results"); } if (merged.size() != 1) { fail("One service defintion expected"); } ServiceDefinition sd=merged.get(SERVICE_TYPE_1); if (sd == null) { fail("SD is null"); } if (sd.getContext().size() != 3) { fail("Expecting 3 context to be retained: "+sd.getContext().size()); } } // RTGOV-557 @Test public void testSeviceDefinitionInterfaceNull() { RequestReceived rqr=new RequestReceived(); rqr.setServiceType(SERVICE_TYPE_1); try { ServiceDefinitionUtil.processServiceInvoked(new java.util.HashMap<String,ServiceDefinition>(), null, rqr, null); } catch (Exception e) { fail("Failed to process request received"); } } }
package com.dalsps; import android.content.ContentProvider; import android.content.ContentUris; import android.content.ContentValues; import android.content.Context; import android.content.UriMatcher; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; import android.database.sqlite.SQLiteQueryBuilder; import android.net.Uri; import android.provider.BaseColumns; import android.text.TextUtils; import android.util.Log; public class StatusProvider extends ContentProvider { private static final String TAG = "StatusProvider"; private TimelineHelper dbHelper; private static final String DB_NAME = "timeline.db"; private static final int DB_VERSION = 3; private static final String T_TIMELINE = "timeline"; public static final String AUTHORITY = "com.marakana.android.yamba"; // The content:// style URL for this table public static final Uri CONTENT_URI = Uri.parse("content://" + AUTHORITY + "/status"); // The MIME type providing a single status public static final String CONTENT_ITEM_TYPE = "vnd.android.cursor.item/vnd.marakana.status"; // The MIME type providing a set of statuses public static final String CONTENT_DIR_TYPE = "vnd.android.cursor.dir/vnd.marakana.status"; // Constants to help differentiate between the URI requests private static final int STATUS_ITEM = 1; private static final int STATUS_DIR = 2; private static final UriMatcher uriMatcher; // Static initializer, allocating a UriMatcher object. A URI ending in "/status" is a // request for all statuses, and a URI ending in "/status/<id>" refers to a single status. static { uriMatcher = new UriMatcher(UriMatcher.NO_MATCH); uriMatcher.addURI(AUTHORITY, "status", STATUS_DIR); uriMatcher.addURI(AUTHORITY, "status/#", STATUS_ITEM); } /* * Column names. In this simple example, the publicly exposed column names are * the same as those used in the database table. We could also define a Map of * external keys to internal columns if we needed to preserve an existing * external interface while refactoring the internal table implementation. */ public static final String KEY_ID = BaseColumns._ID; public static final String KEY_USER = "user"; public static final String KEY_MESSAGE = "message"; public static final String KEY_CREATED_AT = "created_at"; // Define default sort order for queries private static final String DEFAULT_SORT_ORDER = KEY_CREATED_AT + " desc"; // Helper class for opening, creating, and upgrading the database private class TimelineHelper extends SQLiteOpenHelper { private static final String DB_CREATE = "create table " + T_TIMELINE + " ( " + KEY_ID + " integer primary key, " + KEY_USER + " text, " + KEY_MESSAGE + " text, " + KEY_CREATED_AT + " integer " + ");" ; public TimelineHelper(Context context) { super(context, DB_NAME, null, DB_VERSION); } @Override public void onCreate(SQLiteDatabase db) { Log.d(TAG, "Creating database"); db.execSQL(DB_CREATE); } @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { Log.d(TAG, "Upgrading database from version " + oldVersion + " to version " + newVersion); db.execSQL("drop table if exists " + T_TIMELINE); onCreate(db); } } // Identify the MIME types we provide for a given URI @Override public String getType(Uri uri) { switch (uriMatcher.match(uri)) { case STATUS_DIR: return CONTENT_DIR_TYPE; case STATUS_ITEM: return CONTENT_ITEM_TYPE; default: throw new IllegalArgumentException("Unknown URI " + uri); } } @Override public boolean onCreate() { Context context = getContext(); // The onCreate() method runs in the looper thread. We don't want to block it, // so we won't invoke getWritableDatabase() here -- it could cause on upgrade // of an existing database, which would be time consuming. // Instead, we invoke getWritableDatabase() in the CRUD methods, because // Android automatically invokes them in worker threads in the CP process. // The client might block when invoking a CRUD method, but that's its problem; // the client should ideally invoke them from its own worker thread. dbHelper = new TimelineHelper(context); return (dbHelper == null) ? false : true; } @Override public Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs, String sort) { SQLiteDatabase db = dbHelper.getWritableDatabase(); // A convenience class to help build the query SQLiteQueryBuilder qb = new SQLiteQueryBuilder(); qb.setTables(T_TIMELINE); // If this is a request for an individual status, limit the result set to that ID switch (uriMatcher.match(uri)) { case STATUS_DIR: break; case STATUS_ITEM: qb.appendWhere(KEY_ID + "=" + uri.getPathSegments().get(1)); break; default: throw new IllegalArgumentException("Unsupported URI: " + uri); } // Use our default sort order if none was specified String orderBy = TextUtils.isEmpty(sort) ? DEFAULT_SORT_ORDER : sort; // Query the underlying database Cursor c = qb.query(db, projection, selection, selectionArgs, null, null, orderBy); // Notify the context's ContentResolver if the cursor result set changes c.setNotificationUri(getContext().getContentResolver(), uri); // Return the cursor to the result set return c; } @Override public Uri insert(Uri uri, ContentValues initialValues) { // Validate the requested Uri if (uriMatcher.match(uri) != STATUS_DIR) { throw new IllegalArgumentException("Unsupported URI: " + uri); } // Insert the new row, returning the row number if successful // or throwing an exception SQLiteDatabase db = dbHelper.getWritableDatabase(); long rowID = db.insertOrThrow(T_TIMELINE, null, initialValues); // Return a URI to the newly created row on success Uri newUri = ContentUris.withAppendedId(CONTENT_URI, rowID); // Notify the Context's ContentResolver of the change getContext().getContentResolver().notifyChange(newUri, null); return newUri; } @Override public int delete(Uri uri, String where, String[] whereArgs) { SQLiteDatabase db = dbHelper.getWritableDatabase(); int count; switch (uriMatcher.match(uri)) { case STATUS_DIR: count = db.delete(T_TIMELINE, where, whereArgs); break; case STATUS_ITEM: String segment = uri.getPathSegments().get(1); String whereClause = KEY_ID + "=" + segment + (!TextUtils.isEmpty(where) ? " AND (" + where + ')' : ""); count = db.delete(T_TIMELINE, whereClause, whereArgs); break; default: throw new IllegalArgumentException("Unsupported URI: " + uri); } // Notify the Context's ContentResolver of the change getContext().getContentResolver().notifyChange(uri, null); return count; } @Override public int update(Uri uri, ContentValues values, String where, String[] whereArgs) { SQLiteDatabase db = dbHelper.getWritableDatabase(); int count; switch (uriMatcher.match(uri)) { case STATUS_DIR: count = db.update(T_TIMELINE, values, where, whereArgs); break; case STATUS_ITEM: String segment = uri.getPathSegments().get(1); String whereClause = KEY_ID + "=" + segment + (!TextUtils.isEmpty(where) ? " AND (" + where + ')' : ""); count = db.update(T_TIMELINE, values, whereClause, whereArgs); break; default: throw new IllegalArgumentException("Unsupported URI: " + uri); } // Notify the Context's ContentResolver of the change getContext().getContentResolver().notifyChange(uri, null); return count; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.vectorized; import java.math.BigDecimal; import java.util.*; import org.apache.spark.memory.MemoryMode; import org.apache.spark.sql.catalyst.InternalRow; import org.apache.spark.sql.catalyst.expressions.GenericInternalRow; import org.apache.spark.sql.catalyst.expressions.UnsafeRow; import org.apache.spark.sql.catalyst.util.ArrayData; import org.apache.spark.sql.catalyst.util.MapData; import org.apache.spark.sql.types.*; import org.apache.spark.unsafe.types.CalendarInterval; import org.apache.spark.unsafe.types.UTF8String; /** * This class is the in memory representation of rows as they are streamed through operators. It * is designed to maximize CPU efficiency and not storage footprint. Since it is expected that * each operator allocates one of these objects, the storage footprint on the task is negligible. * * The layout is a columnar with values encoded in their native format. Each RowBatch contains * a horizontal partitioning of the data, split into columns. * * The ColumnarBatch supports either on heap or offheap modes with (mostly) the identical API. * * TODO: * - There are many TODOs for the existing APIs. They should throw a not implemented exception. * - Compaction: The batch and columns should be able to compact based on a selection vector. */ public final class ColumnarBatch { private static final int DEFAULT_BATCH_SIZE = 4 * 1024; private static MemoryMode DEFAULT_MEMORY_MODE = MemoryMode.ON_HEAP; private final StructType schema; private final int capacity; private int numRows; private final ColumnVector[] columns; // True if the row is filtered. private final boolean[] filteredRows; // Column indices that cannot have null values. private final Set<Integer> nullFilteredColumns; // Total number of rows that have been filtered. private int numRowsFiltered = 0; // Staging row returned from getRow. final Row row; public static ColumnarBatch allocate(StructType schema, MemoryMode memMode) { return new ColumnarBatch(schema, DEFAULT_BATCH_SIZE, memMode); } public static ColumnarBatch allocate(StructType type) { return new ColumnarBatch(type, DEFAULT_BATCH_SIZE, DEFAULT_MEMORY_MODE); } public static ColumnarBatch allocate(StructType schema, MemoryMode memMode, int maxRows) { return new ColumnarBatch(schema, maxRows, memMode); } /** * Called to close all the columns in this batch. It is not valid to access the data after * calling this. This must be called at the end to clean up memory allocations. */ public void close() { for (ColumnVector c: columns) { c.close(); } } /** * Adapter class to interop with existing components that expect internal row. A lot of * performance is lost with this translation. */ public static final class Row extends InternalRow { protected int rowId; private final ColumnarBatch parent; private final int fixedLenRowSize; private final ColumnVector[] columns; // Ctor used if this is a top level row. private Row(ColumnarBatch parent) { this.parent = parent; this.fixedLenRowSize = UnsafeRow.calculateFixedPortionByteSize(parent.numCols()); this.columns = parent.columns; } // Ctor used if this is a struct. protected Row(ColumnVector[] columns) { this.parent = null; this.fixedLenRowSize = UnsafeRow.calculateFixedPortionByteSize(columns.length); this.columns = columns; } /** * Marks this row as being filtered out. This means a subsequent iteration over the rows * in this batch will not include this row. */ public void markFiltered() { parent.markFiltered(rowId); } public ColumnVector[] columns() { return columns; } @Override public int numFields() { return columns.length; } @Override /** * Revisit this. This is expensive. This is currently only used in test paths. */ public InternalRow copy() { GenericInternalRow row = new GenericInternalRow(columns.length); for (int i = 0; i < numFields(); i++) { if (isNullAt(i)) { row.setNullAt(i); } else { DataType dt = columns[i].dataType(); if (dt instanceof BooleanType) { row.setBoolean(i, getBoolean(i)); } else if (dt instanceof ByteType) { row.setByte(i, getByte(i)); } else if (dt instanceof ShortType) { row.setShort(i, getShort(i)); } else if (dt instanceof IntegerType) { row.setInt(i, getInt(i)); } else if (dt instanceof LongType) { row.setLong(i, getLong(i)); } else if (dt instanceof FloatType) { row.setFloat(i, getFloat(i)); } else if (dt instanceof DoubleType) { row.setDouble(i, getDouble(i)); } else if (dt instanceof StringType) { row.update(i, getUTF8String(i)); } else if (dt instanceof BinaryType) { row.update(i, getBinary(i)); } else if (dt instanceof DecimalType) { DecimalType t = (DecimalType)dt; row.setDecimal(i, getDecimal(i, t.precision(), t.scale()), t.precision()); } else if (dt instanceof DateType) { row.setInt(i, getInt(i)); } else if (dt instanceof TimestampType) { row.setLong(i, getLong(i)); } else { throw new RuntimeException("Not implemented. " + dt); } } } return row; } @Override public boolean anyNull() { throw new UnsupportedOperationException(); } @Override public boolean isNullAt(int ordinal) { return columns[ordinal].isNullAt(rowId); } @Override public boolean getBoolean(int ordinal) { return columns[ordinal].getBoolean(rowId); } @Override public byte getByte(int ordinal) { return columns[ordinal].getByte(rowId); } @Override public short getShort(int ordinal) { return columns[ordinal].getShort(rowId); } @Override public int getInt(int ordinal) { return columns[ordinal].getInt(rowId); } @Override public long getLong(int ordinal) { return columns[ordinal].getLong(rowId); } @Override public float getFloat(int ordinal) { return columns[ordinal].getFloat(rowId); } @Override public double getDouble(int ordinal) { return columns[ordinal].getDouble(rowId); } @Override public Decimal getDecimal(int ordinal, int precision, int scale) { if (columns[ordinal].isNullAt(rowId)) return null; return columns[ordinal].getDecimal(rowId, precision, scale); } @Override public UTF8String getUTF8String(int ordinal) { if (columns[ordinal].isNullAt(rowId)) return null; return columns[ordinal].getUTF8String(rowId); } @Override public byte[] getBinary(int ordinal) { if (columns[ordinal].isNullAt(rowId)) return null; return columns[ordinal].getBinary(rowId); } @Override public CalendarInterval getInterval(int ordinal) { if (columns[ordinal].isNullAt(rowId)) return null; final int months = columns[ordinal].getChildColumn(0).getInt(rowId); final long microseconds = columns[ordinal].getChildColumn(1).getLong(rowId); return new CalendarInterval(months, microseconds); } @Override public InternalRow getStruct(int ordinal, int numFields) { if (columns[ordinal].isNullAt(rowId)) return null; return columns[ordinal].getStruct(rowId); } @Override public ArrayData getArray(int ordinal) { if (columns[ordinal].isNullAt(rowId)) return null; return columns[ordinal].getArray(rowId); } @Override public MapData getMap(int ordinal) { throw new UnsupportedOperationException(); } @Override public Object get(int ordinal, DataType dataType) { throw new UnsupportedOperationException(); } @Override public void update(int ordinal, Object value) { if (value == null) { setNullAt(ordinal); } else { DataType dt = columns[ordinal].dataType(); if (dt instanceof BooleanType) { setBoolean(ordinal, (boolean) value); } else if (dt instanceof IntegerType) { setInt(ordinal, (int) value); } else if (dt instanceof ShortType) { setShort(ordinal, (short) value); } else if (dt instanceof LongType) { setLong(ordinal, (long) value); } else if (dt instanceof FloatType) { setFloat(ordinal, (float) value); } else if (dt instanceof DoubleType) { setDouble(ordinal, (double) value); } else if (dt instanceof DecimalType) { DecimalType t = (DecimalType) dt; setDecimal(ordinal, Decimal.apply((BigDecimal) value, t.precision(), t.scale()), t.precision()); } else { throw new UnsupportedOperationException("Datatype not supported " + dt); } } } @Override public void setNullAt(int ordinal) { assert (!columns[ordinal].isConstant); columns[ordinal].putNull(rowId); } @Override public void setBoolean(int ordinal, boolean value) { assert (!columns[ordinal].isConstant); columns[ordinal].putNotNull(rowId); columns[ordinal].putBoolean(rowId, value); } @Override public void setByte(int ordinal, byte value) { assert (!columns[ordinal].isConstant); columns[ordinal].putNotNull(rowId); columns[ordinal].putByte(rowId, value); } @Override public void setShort(int ordinal, short value) { assert (!columns[ordinal].isConstant); columns[ordinal].putNotNull(rowId); columns[ordinal].putShort(rowId, value); } @Override public void setInt(int ordinal, int value) { assert (!columns[ordinal].isConstant); columns[ordinal].putNotNull(rowId); columns[ordinal].putInt(rowId, value); } @Override public void setLong(int ordinal, long value) { assert (!columns[ordinal].isConstant); columns[ordinal].putNotNull(rowId); columns[ordinal].putLong(rowId, value); } @Override public void setFloat(int ordinal, float value) { assert (!columns[ordinal].isConstant); columns[ordinal].putNotNull(rowId); columns[ordinal].putFloat(rowId, value); } @Override public void setDouble(int ordinal, double value) { assert (!columns[ordinal].isConstant); columns[ordinal].putNotNull(rowId); columns[ordinal].putDouble(rowId, value); } @Override public void setDecimal(int ordinal, Decimal value, int precision) { assert (!columns[ordinal].isConstant); columns[ordinal].putNotNull(rowId); columns[ordinal].putDecimal(rowId, value, precision); } } /** * Returns an iterator over the rows in this batch. This skips rows that are filtered out. */ public Iterator<Row> rowIterator() { final int maxRows = ColumnarBatch.this.numRows(); final Row row = new Row(this); return new Iterator<Row>() { int rowId = 0; @Override public boolean hasNext() { while (rowId < maxRows && ColumnarBatch.this.filteredRows[rowId]) { ++rowId; } return rowId < maxRows; } @Override public Row next() { while (rowId < maxRows && ColumnarBatch.this.filteredRows[rowId]) { ++rowId; } if (rowId >= maxRows) { throw new NoSuchElementException(); } row.rowId = rowId++; return row; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } /** * Resets the batch for writing. */ public void reset() { for (int i = 0; i < numCols(); ++i) { columns[i].reset(); } if (this.numRowsFiltered > 0) { Arrays.fill(filteredRows, false); } this.numRows = 0; this.numRowsFiltered = 0; } /** * Sets the number of rows that are valid. Additionally, marks all rows as "filtered" if one or * more of their attributes are part of a non-nullable column. */ public void setNumRows(int numRows) { assert(numRows <= this.capacity); this.numRows = numRows; for (int ordinal : nullFilteredColumns) { if (columns[ordinal].numNulls != 0) { for (int rowId = 0; rowId < numRows; rowId++) { if (!filteredRows[rowId] && columns[ordinal].isNullAt(rowId)) { filteredRows[rowId] = true; ++numRowsFiltered; } } } } } /** * Returns the number of columns that make up this batch. */ public int numCols() { return columns.length; } /** * Returns the number of rows for read, including filtered rows. */ public int numRows() { return numRows; } /** * Returns the number of valid rows. */ public int numValidRows() { assert(numRowsFiltered <= numRows); return numRows - numRowsFiltered; } /** * Returns the max capacity (in number of rows) for this batch. */ public int capacity() { return capacity; } /** * Returns the column at `ordinal`. */ public ColumnVector column(int ordinal) { return columns[ordinal]; } /** * Sets (replaces) the column at `ordinal` with column. This can be used to do very efficient * projections. */ public void setColumn(int ordinal, ColumnVector column) { if (column instanceof OffHeapColumnVector) { throw new UnsupportedOperationException("Need to ref count columns."); } columns[ordinal] = column; } /** * Returns the row in this batch at `rowId`. Returned row is reused across calls. */ public ColumnarBatch.Row getRow(int rowId) { assert(rowId >= 0); assert(rowId < numRows); row.rowId = rowId; return row; } /** * Marks this row as being filtered out. This means a subsequent iteration over the rows * in this batch will not include this row. */ public void markFiltered(int rowId) { assert(!filteredRows[rowId]); filteredRows[rowId] = true; ++numRowsFiltered; } /** * Marks a given column as non-nullable. Any row that has a NULL value for the corresponding * attribute is filtered out. */ public void filterNullsInColumn(int ordinal) { nullFilteredColumns.add(ordinal); } private ColumnarBatch(StructType schema, int maxRows, MemoryMode memMode) { this.schema = schema; this.capacity = maxRows; this.columns = new ColumnVector[schema.size()]; this.nullFilteredColumns = new HashSet<>(); this.filteredRows = new boolean[maxRows]; for (int i = 0; i < schema.fields().length; ++i) { StructField field = schema.fields()[i]; columns[i] = ColumnVector.allocate(maxRows, field.dataType(), memMode); } this.row = new Row(this); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.datanode; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.nio.channels.FileChannel; import java.util.Random; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.server.common.GenerationStamp; import org.apache.hadoop.hdfs.server.datanode.FSDataset.FSVolume; import junit.framework.TestCase; /** * Tests {@link DirectoryScanner} handling of differences * between blocks on the disk and block in memory. */ public class TestDirectoryScanner extends TestCase { private static final Log LOG = LogFactory.getLog(TestDirectoryScanner.class); private static final Configuration CONF = new HdfsConfiguration(); private static final int DEFAULT_GEN_STAMP = 9999; private MiniDFSCluster cluster; private FSDataset fds = null; private DirectoryScanner scanner = null; private Random rand = new Random(); private Random r = new Random(); static { CONF.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, 100); CONF.setInt(DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, 1); CONF.setLong("dfs.heartbeat.interval", 1L); } /** create a file with a length of <code>fileLen</code> */ private void createFile(String fileName, long fileLen) throws IOException { FileSystem fs = cluster.getFileSystem(); Path filePath = new Path(fileName); DFSTestUtil.createFile(fs, filePath, fileLen, (short) 1, r.nextLong()); } /** Truncate a block file */ private long truncateBlockFile() throws IOException { synchronized (fds) { for (ReplicaInfo b : fds.volumeMap.replicas()) { File f = b.getBlockFile(); File mf = b.getMetaFile(); // Truncate a block file that has a corresponding metadata file if (f.exists() && f.length() != 0 && mf.exists()) { FileOutputStream s = new FileOutputStream(f); FileChannel channel = s.getChannel(); channel.truncate(0); LOG.info("Truncated block file " + f.getAbsolutePath()); return b.getBlockId(); } } } return 0; } /** Delete a block file */ private long deleteBlockFile() { synchronized(fds) { for (ReplicaInfo b : fds.volumeMap.replicas()) { File f = b.getBlockFile(); File mf = b.getMetaFile(); // Delete a block file that has corresponding metadata file if (f.exists() && mf.exists() && f.delete()) { LOG.info("Deleting block file " + f.getAbsolutePath()); return b.getBlockId(); } } } return 0; } /** Delete block meta file */ private long deleteMetaFile() { synchronized(fds) { for (ReplicaInfo b : fds.volumeMap.replicas()) { File file = b.getMetaFile(); // Delete a metadata file if (file.exists() && file.delete()) { LOG.info("Deleting metadata file " + file.getAbsolutePath()); return b.getBlockId(); } } } return 0; } /** Get a random blockId that is not used already */ private long getFreeBlockId() { long id = rand.nextLong(); while (true) { id = rand.nextLong(); if (fds.fetchReplicaInfo(id) == null) { break; } } return id; } private String getBlockFile(long id) { return Block.BLOCK_FILE_PREFIX + id; } private String getMetaFile(long id) { return Block.BLOCK_FILE_PREFIX + id + "_" + DEFAULT_GEN_STAMP + Block.METADATA_EXTENSION; } /** Create a block file in a random volume*/ private long createBlockFile() throws IOException { FSVolume[] volumes = fds.volumes.volumes; int index = rand.nextInt(volumes.length - 1); long id = getFreeBlockId(); File file = new File(volumes[index].getDir().getPath(), getBlockFile(id)); if (file.createNewFile()) { LOG.info("Created block file " + file.getName()); } return id; } /** Create a metafile in a random volume*/ private long createMetaFile() throws IOException { FSVolume[] volumes = fds.volumes.volumes; int index = rand.nextInt(volumes.length - 1); long id = getFreeBlockId(); File file = new File(volumes[index].getDir().getPath(), getMetaFile(id)); if (file.createNewFile()) { LOG.info("Created metafile " + file.getName()); } return id; } /** Create block file and corresponding metafile in a rondom volume */ private long createBlockMetaFile() throws IOException { FSVolume[] volumes = fds.volumes.volumes; int index = rand.nextInt(volumes.length - 1); long id = getFreeBlockId(); File file = new File(volumes[index].getDir().getPath(), getBlockFile(id)); if (file.createNewFile()) { LOG.info("Created block file " + file.getName()); // Create files with same prefix as block file but extension names // such that during sorting, these files appear around meta file // to test how DirectoryScanner handles extraneous files String name1 = file.getAbsolutePath() + ".l"; String name2 = file.getAbsolutePath() + ".n"; file = new File(name1); if (file.createNewFile()) { LOG.info("Created extraneous file " + name1); } file = new File(name2); if (file.createNewFile()) { LOG.info("Created extraneous file " + name2); } file = new File(volumes[index].getDir().getPath(), getMetaFile(id)); if (file.createNewFile()) { LOG.info("Created metafile " + file.getName()); } } return id; } private void scan(long totalBlocks, int diffsize, long missingMetaFile, long missingBlockFile, long missingMemoryBlocks, long mismatchBlocks) { scanner.reconcile(); assertEquals(totalBlocks, scanner.totalBlocks); assertEquals(diffsize, scanner.diff.size()); assertEquals(missingMetaFile, scanner.missingMetaFile); assertEquals(missingBlockFile, scanner.missingBlockFile); assertEquals(missingMemoryBlocks, scanner.missingMemoryBlocks); assertEquals(mismatchBlocks, scanner.mismatchBlocks); } public void testDirectoryScanner() throws Exception { // Run the test with and without parallel scanning for (int parallelism = 1; parallelism < 2; parallelism++) { runTest(parallelism); } } public void runTest(int parallelism) throws Exception { cluster = new MiniDFSCluster(CONF, 1, true, null); try { cluster.waitActive(); fds = (FSDataset) cluster.getDataNodes().get(0).getFSDataset(); CONF.setInt(DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_THREADS_KEY, parallelism); scanner = new DirectoryScanner(fds, CONF); // Add files with 100 blocks createFile("/tmp/t1", 10000); long totalBlocks = 100; // Test1: No difference between in-memory and disk scan(100, 0, 0, 0, 0, 0); // Test2: block metafile is missing long blockId = deleteMetaFile(); scan(totalBlocks, 1, 1, 0, 0, 1); verifyGenStamp(blockId, GenerationStamp.GRANDFATHER_GENERATION_STAMP); scan(totalBlocks, 0, 0, 0, 0, 0); // Test3: block file is missing blockId = deleteBlockFile(); scan(totalBlocks, 1, 0, 1, 0, 0); totalBlocks--; verifyDeletion(blockId); scan(totalBlocks, 0, 0, 0, 0, 0); // Test4: A block file exists for which there is no metafile and // a block in memory blockId = createBlockFile(); totalBlocks++; scan(totalBlocks, 1, 1, 0, 1, 0); verifyAddition(blockId, GenerationStamp.GRANDFATHER_GENERATION_STAMP, 0); scan(totalBlocks, 0, 0, 0, 0, 0); // Test5: A metafile exists for which there is no block file and // a block in memory blockId = createMetaFile(); scan(totalBlocks+1, 1, 0, 1, 1, 0); File metafile = new File(getMetaFile(blockId)); assertTrue(!metafile.exists()); scan(totalBlocks, 0, 0, 0, 0, 0); // Test6: A block file and metafile exists for which there is no block in // memory blockId = createBlockMetaFile(); totalBlocks++; scan(totalBlocks, 1, 0, 0, 1, 0); verifyAddition(blockId, DEFAULT_GEN_STAMP, 0); scan(totalBlocks, 0, 0, 0, 0, 0); // Test7: Delete bunch of metafiles for (int i = 0; i < 10; i++) { blockId = deleteMetaFile(); } scan(totalBlocks, 10, 10, 0, 0, 10); scan(totalBlocks, 0, 0, 0, 0, 0); // Test8: Delete bunch of block files for (int i = 0; i < 10; i++) { blockId = deleteBlockFile(); } scan(totalBlocks, 10, 0, 10, 0, 0); totalBlocks -= 10; scan(totalBlocks, 0, 0, 0, 0, 0); // Test9: create a bunch of blocks files for (int i = 0; i < 10 ; i++) { blockId = createBlockFile(); } totalBlocks += 10; scan(totalBlocks, 10, 10, 0, 10, 0); scan(totalBlocks, 0, 0, 0, 0, 0); // Test10: create a bunch of metafiles for (int i = 0; i < 10 ; i++) { blockId = createMetaFile(); } scan(totalBlocks+10, 10, 0, 10, 10, 0); scan(totalBlocks, 0, 0, 0, 0, 0); // Test11: create a bunch block files and meta files for (int i = 0; i < 10 ; i++) { blockId = createBlockMetaFile(); } totalBlocks += 10; scan(totalBlocks, 10, 0, 0, 10, 0); scan(totalBlocks, 0, 0, 0, 0, 0); // Test12: truncate block files to test block length mismatch for (int i = 0; i < 10 ; i++) { truncateBlockFile(); } scan(totalBlocks, 10, 0, 0, 0, 10); scan(totalBlocks, 0, 0, 0, 0, 0); // Test13: all the conditions combined createMetaFile(); createBlockFile(); createBlockMetaFile(); deleteMetaFile(); deleteBlockFile(); truncateBlockFile(); scan(totalBlocks+3, 6, 2, 2, 3, 2); scan(totalBlocks+1, 0, 0, 0, 0, 0); } finally { cluster.shutdown(); } } private void verifyAddition(long blockId, long genStamp, long size) { final ReplicaInfo replicainfo; replicainfo = fds.fetchReplicaInfo(blockId); assertNotNull(replicainfo); // Added block has the same file as the one created by the test File file = new File(getBlockFile(blockId)); assertEquals(file.getName(), fds.findBlockFile(blockId).getName()); // Generation stamp is same as that of created file assertEquals(genStamp, replicainfo.getGenerationStamp()); // File size matches assertEquals(size, replicainfo.getNumBytes()); } private void verifyDeletion(long blockId) { // Ensure block does not exist in memory assertNull(fds.fetchReplicaInfo(blockId)); } private void verifyGenStamp(long blockId, long genStamp) { final ReplicaInfo memBlock; memBlock = fds.fetchReplicaInfo(blockId); assertNotNull(memBlock); assertEquals(genStamp, memBlock.getGenerationStamp()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs; import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyLong; import static org.mockito.Matchers.anyObject; import static org.mockito.Matchers.anyShort; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.timeout; import static org.mockito.Mockito.when; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.SocketTimeoutException; import java.net.URI; import java.security.MessageDigest; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Random; import java.util.concurrent.TimeUnit; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.crypto.CryptoProtocolVersion; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileChecksum; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys; import org.apache.hadoop.hdfs.client.impl.LeaseRenewer; import org.apache.hadoop.hdfs.client.HdfsUtils; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.ClientDatanodeProtocol; import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.namenode.NotReplicatedYetException; import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols; import org.apache.hadoop.hdfs.web.WebHdfsConstants; import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; import org.apache.hadoop.io.EnumSetWritable; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.retry.RetryPolicies.MultipleLinearRandomRetry; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Time; import org.apache.log4j.Level; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.mockito.internal.stubbing.answers.ThrowsException; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import com.google.common.base.Joiner; /** * These tests make sure that DFSClient retries fetching data from DFS * properly in case of errors. */ public class TestDFSClientRetries { private static final String ADDRESS = "0.0.0.0"; final static private int PING_INTERVAL = 1000; final static private int MIN_SLEEP_TIME = 1000; public static final Log LOG = LogFactory.getLog(TestDFSClientRetries.class.getName()); static private Configuration conf = null; private static class TestServer extends Server { private boolean sleep; private Class<? extends Writable> responseClass; public TestServer(int handlerCount, boolean sleep) throws IOException { this(handlerCount, sleep, LongWritable.class, null); } public TestServer(int handlerCount, boolean sleep, Class<? extends Writable> paramClass, Class<? extends Writable> responseClass) throws IOException { super(ADDRESS, 0, paramClass, handlerCount, conf); this.sleep = sleep; this.responseClass = responseClass; } @Override public Writable call(RPC.RpcKind rpcKind, String protocol, Writable param, long receiveTime) throws IOException { if (sleep) { // sleep a bit try { Thread.sleep(PING_INTERVAL + MIN_SLEEP_TIME); } catch (InterruptedException e) {} } if (responseClass != null) { try { return responseClass.newInstance(); } catch (Exception e) { throw new RuntimeException(e); } } else { return param; // echo param as result } } } // writes 'len' bytes of data to out. private static void writeData(OutputStream out, int len) throws IOException { byte [] buf = new byte[4096*16]; while(len > 0) { int toWrite = Math.min(len, buf.length); out.write(buf, 0, toWrite); len -= toWrite; } } @Before public void setupConf(){ conf = new HdfsConfiguration(); } /** * This makes sure that when DN closes clients socket after client had * successfully connected earlier, the data can still be fetched. */ @Test public void testWriteTimeoutAtDataNode() throws IOException, InterruptedException { final int writeTimeout = 100; //milliseconds. // set a very short write timeout for datanode, so that tests runs fast. conf.setInt(HdfsClientConfigKeys.DFS_DATANODE_SOCKET_WRITE_TIMEOUT_KEY, writeTimeout); // set a smaller block size final int blockSize = 10*1024*1024; conf.setInt(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize); conf.setInt(HdfsClientConfigKeys.DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_KEY, 1); // set a small buffer size final int bufferSize = 4096; conf.setInt(CommonConfigurationKeys.IO_FILE_BUFFER_SIZE_KEY, bufferSize); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build(); try { cluster.waitActive(); FileSystem fs = cluster.getFileSystem(); Path filePath = new Path("/testWriteTimeoutAtDataNode"); OutputStream out = fs.create(filePath, true, bufferSize); // write a 2 block file. writeData(out, 2*blockSize); out.close(); byte[] buf = new byte[1024*1024]; // enough to empty TCP buffers. InputStream in = fs.open(filePath, bufferSize); //first read a few bytes IOUtils.readFully(in, buf, 0, bufferSize/2); //now read few more chunks of data by sleeping in between : for(int i=0; i<10; i++) { Thread.sleep(2*writeTimeout); // force write timeout at the datanode. // read enough to empty out socket buffers. IOUtils.readFully(in, buf, 0, buf.length); } // successfully read with write timeout on datanodes. in.close(); } finally { cluster.shutdown(); } } // more tests related to different failure cases can be added here. /** * Verify that client will correctly give up after the specified number * of times trying to add a block */ @SuppressWarnings({ "serial", "unchecked" }) @Test public void testNotYetReplicatedErrors() throws IOException { final String exceptionMsg = "Nope, not replicated yet..."; final int maxRetries = 1; // Allow one retry (total of two calls) conf.setInt(HdfsClientConfigKeys.BlockWrite.LOCATEFOLLOWINGBLOCK_RETRIES_KEY, maxRetries); NamenodeProtocols mockNN = mock(NamenodeProtocols.class); Answer<Object> answer = new ThrowsException(new IOException()) { int retryCount = 0; @Override public Object answer(InvocationOnMock invocation) throws Throwable { retryCount++; System.out.println("addBlock has been called " + retryCount + " times"); if(retryCount > maxRetries + 1) // First call was not a retry throw new IOException("Retried too many times: " + retryCount); else throw new RemoteException(NotReplicatedYetException.class.getName(), exceptionMsg); } }; when(mockNN.addBlock(anyString(), anyString(), any(ExtendedBlock.class), any(DatanodeInfo[].class), anyLong(), any(String[].class))).thenAnswer(answer); Mockito.doReturn( new HdfsFileStatus(0, false, 1, 1024, 0, 0, new FsPermission( (short) 777), "owner", "group", new byte[0], new byte[0], 1010, 0, null, (byte) 0)).when(mockNN).getFileInfo(anyString()); Mockito.doReturn( new HdfsFileStatus(0, false, 1, 1024, 0, 0, new FsPermission( (short) 777), "owner", "group", new byte[0], new byte[0], 1010, 0, null, (byte) 0)) .when(mockNN) .create(anyString(), (FsPermission) anyObject(), anyString(), (EnumSetWritable<CreateFlag>) anyObject(), anyBoolean(), anyShort(), anyLong(), (CryptoProtocolVersion[]) anyObject()); final DFSClient client = new DFSClient(null, mockNN, conf, null); OutputStream os = client.create("testfile", true); os.write(20); // write one random byte try { os.close(); } catch (Exception e) { assertTrue("Retries are not being stopped correctly: " + e.getMessage(), e.getMessage().equals(exceptionMsg)); } } /** * This tests that DFSInputStream failures are counted for a given read * operation, and not over the lifetime of the stream. It is a regression * test for HDFS-127. */ @Test public void testFailuresArePerOperation() throws Exception { long fileSize = 4096; Path file = new Path("/testFile"); // Set short retry timeouts so this test runs faster conf.setInt(HdfsClientConfigKeys.Retry.WINDOW_BASE_KEY, 10); conf.setInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, 2 * 1000); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build(); try { cluster.waitActive(); FileSystem fs = cluster.getFileSystem(); NamenodeProtocols preSpyNN = cluster.getNameNodeRpc(); NamenodeProtocols spyNN = spy(preSpyNN); DFSClient client = new DFSClient(null, spyNN, conf, null); int maxBlockAcquires = client.getConf().getMaxBlockAcquireFailures(); assertTrue(maxBlockAcquires > 0); DFSTestUtil.createFile(fs, file, fileSize, (short)1, 12345L /*seed*/); // If the client will retry maxBlockAcquires times, then if we fail // any more than that number of times, the operation should entirely // fail. doAnswer(new FailNTimesAnswer(preSpyNN, maxBlockAcquires + 1)) .when(spyNN).getBlockLocations(anyString(), anyLong(), anyLong()); try { IOUtils.copyBytes(client.open(file.toString()), new IOUtils.NullOutputStream(), conf, true); fail("Didn't get exception"); } catch (IOException ioe) { DFSClient.LOG.info("Got expected exception", ioe); } // If we fail exactly that many times, then it should succeed. doAnswer(new FailNTimesAnswer(preSpyNN, maxBlockAcquires)) .when(spyNN).getBlockLocations(anyString(), anyLong(), anyLong()); IOUtils.copyBytes(client.open(file.toString()), new IOUtils.NullOutputStream(), conf, true); DFSClient.LOG.info("Starting test case for failure reset"); // Now the tricky case - if we fail a few times on one read, then succeed, // then fail some more on another read, it shouldn't fail. doAnswer(new FailNTimesAnswer(preSpyNN, maxBlockAcquires)) .when(spyNN).getBlockLocations(anyString(), anyLong(), anyLong()); DFSInputStream is = client.open(file.toString()); byte buf[] = new byte[10]; IOUtils.readFully(is, buf, 0, buf.length); DFSClient.LOG.info("First read successful after some failures."); // Further reads at this point will succeed since it has the good block locations. // So, force the block locations on this stream to be refreshed from bad info. // When reading again, it should start from a fresh failure count, since // we're starting a new operation on the user level. doAnswer(new FailNTimesAnswer(preSpyNN, maxBlockAcquires)) .when(spyNN).getBlockLocations(anyString(), anyLong(), anyLong()); is.openInfo(true); // Seek to beginning forces a reopen of the BlockReader - otherwise it'll // just keep reading on the existing stream and the fact that we've poisoned // the block info won't do anything. is.seek(0); IOUtils.readFully(is, buf, 0, buf.length); } finally { cluster.shutdown(); } } /** * Test DFSClient can continue to function after renewLease RPC * receives SocketTimeoutException. */ @Test public void testLeaseRenewSocketTimeout() throws Exception { String file1 = "/testFile1"; String file2 = "/testFile2"; // Set short retry timeouts so this test runs faster conf.setInt(DFSConfigKeys.DFS_CLIENT_RETRY_WINDOW_BASE, 10); conf.setInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, 2 * 1000); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build(); try { cluster.waitActive(); NamenodeProtocols spyNN = spy(cluster.getNameNodeRpc()); Mockito.doThrow(new SocketTimeoutException()).when(spyNN).renewLease( Mockito.anyString()); DFSClient client = new DFSClient(null, spyNN, conf, null); // Get hold of the lease renewer instance used by the client LeaseRenewer leaseRenewer = client.getLeaseRenewer(); leaseRenewer.setRenewalTime(100); OutputStream out1 = client.create(file1, false); Mockito.verify(spyNN, timeout(10000).times(1)).renewLease( Mockito.anyString()); verifyEmptyLease(leaseRenewer); try { out1.write(new byte[256]); fail("existing output stream should be aborted"); } catch (IOException e) { } // Verify DFSClient can do read operation after renewLease aborted. client.exists(file2); // Verify DFSClient can do write operation after renewLease no longer // throws SocketTimeoutException. Mockito.doNothing().when(spyNN).renewLease( Mockito.anyString()); leaseRenewer = client.getLeaseRenewer(); leaseRenewer.setRenewalTime(100); OutputStream out2 = client.create(file2, false); Mockito.verify(spyNN, timeout(10000).times(2)).renewLease( Mockito.anyString()); out2.write(new byte[256]); out2.close(); verifyEmptyLease(leaseRenewer); } finally { cluster.shutdown(); } } /** * Test that getAdditionalBlock() and close() are idempotent. This allows * a client to safely retry a call and still produce a correct * file. See HDFS-3031. */ @Test public void testIdempotentAllocateBlockAndClose() throws Exception { final String src = "/testIdempotentAllocateBlock"; Path file = new Path(src); conf.setInt(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, 4096); final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build(); try { cluster.waitActive(); FileSystem fs = cluster.getFileSystem(); NamenodeProtocols preSpyNN = cluster.getNameNodeRpc(); NamenodeProtocols spyNN = spy(preSpyNN); DFSClient client = new DFSClient(null, spyNN, conf, null); // Make the call to addBlock() get called twice, as if it were retried // due to an IPC issue. doAnswer(new Answer<LocatedBlock>() { @Override public LocatedBlock answer(InvocationOnMock invocation) throws Throwable { LocatedBlock ret = (LocatedBlock) invocation.callRealMethod(); LocatedBlocks lb = cluster.getNameNodeRpc().getBlockLocations(src, 0, Long.MAX_VALUE); int blockCount = lb.getLocatedBlocks().size(); assertEquals(lb.getLastLocatedBlock().getBlock(), ret.getBlock()); // Retrying should result in a new block at the end of the file. // (abandoning the old one) LocatedBlock ret2 = (LocatedBlock) invocation.callRealMethod(); lb = cluster.getNameNodeRpc().getBlockLocations(src, 0, Long.MAX_VALUE); int blockCount2 = lb.getLocatedBlocks().size(); assertEquals(lb.getLastLocatedBlock().getBlock(), ret2.getBlock()); // We shouldn't have gained an extra block by the RPC. assertEquals(blockCount, blockCount2); return ret2; } }).when(spyNN).addBlock(Mockito.anyString(), Mockito.anyString(), Mockito.<ExtendedBlock> any(), Mockito.<DatanodeInfo[]> any(), Mockito.anyLong(), Mockito.<String[]> any()); doAnswer(new Answer<Boolean>() { @Override public Boolean answer(InvocationOnMock invocation) throws Throwable { // complete() may return false a few times before it returns // true. We want to wait until it returns true, and then // make it retry one more time after that. LOG.info("Called complete(: " + Joiner.on(",").join(invocation.getArguments()) + ")"); if (!(Boolean)invocation.callRealMethod()) { LOG.info("Complete call returned false, not faking a retry RPC"); return false; } // We got a successful close. Call it again to check idempotence. try { boolean ret = (Boolean) invocation.callRealMethod(); LOG.info("Complete call returned true, faked second RPC. " + "Returned: " + ret); return ret; } catch (Throwable t) { LOG.error("Idempotent retry threw exception", t); throw t; } } }).when(spyNN).complete(Mockito.anyString(), Mockito.anyString(), Mockito.<ExtendedBlock>any(), anyLong()); OutputStream stm = client.create(file.toString(), true); try { AppendTestUtil.write(stm, 0, 10000); stm.close(); stm = null; } finally { IOUtils.cleanup(LOG, stm); } // Make sure the mock was actually properly injected. Mockito.verify(spyNN, Mockito.atLeastOnce()).addBlock( Mockito.anyString(), Mockito.anyString(), Mockito.<ExtendedBlock> any(), Mockito.<DatanodeInfo[]> any(), Mockito.anyLong(), Mockito.<String[]> any()); Mockito.verify(spyNN, Mockito.atLeastOnce()).complete( Mockito.anyString(), Mockito.anyString(), Mockito.<ExtendedBlock>any(), anyLong()); AppendTestUtil.check(fs, file, 10000); } finally { cluster.shutdown(); } } /** * Mock Answer implementation of NN.getBlockLocations that will return * a poisoned block list a certain number of times before returning * a proper one. */ private static class FailNTimesAnswer implements Answer<LocatedBlocks> { private int failuresLeft; private final NamenodeProtocols realNN; public FailNTimesAnswer(NamenodeProtocols preSpyNN, int timesToFail) { failuresLeft = timesToFail; this.realNN = preSpyNN; } @Override public LocatedBlocks answer(InvocationOnMock invocation) throws IOException { Object args[] = invocation.getArguments(); LocatedBlocks realAnswer = realNN.getBlockLocations( (String)args[0], (Long)args[1], (Long)args[2]); if (failuresLeft-- > 0) { NameNode.LOG.info("FailNTimesAnswer injecting failure."); return makeBadBlockList(realAnswer); } NameNode.LOG.info("FailNTimesAnswer no longer failing."); return realAnswer; } private LocatedBlocks makeBadBlockList(LocatedBlocks goodBlockList) { LocatedBlock goodLocatedBlock = goodBlockList.get(0); LocatedBlock badLocatedBlock = new LocatedBlock( goodLocatedBlock.getBlock(), new DatanodeInfo[] { DFSTestUtil.getDatanodeInfo("1.2.3.4", "bogus", 1234) }); badLocatedBlock.setStartOffset(goodLocatedBlock.getStartOffset()); List<LocatedBlock> badBlocks = new ArrayList<LocatedBlock>(); badBlocks.add(badLocatedBlock); return new LocatedBlocks(goodBlockList.getFileLength(), false, badBlocks, null, true, null); } } /** * Test that a DFSClient waits for random time before retry on busy blocks. */ @Test public void testDFSClientRetriesOnBusyBlocks() throws IOException { System.out.println("Testing DFSClient random waiting on busy blocks."); // // Test settings: // // xcievers fileLen #clients timeWindow #retries // ======== ======= ======== ========== ======== // Test 1: 2 6 MB 50 300 ms 3 // Test 2: 2 6 MB 50 300 ms 50 // Test 3: 2 6 MB 50 1000 ms 3 // Test 4: 2 6 MB 50 1000 ms 50 // // Minimum xcievers is 2 since 1 thread is reserved for registry. // Test 1 & 3 may fail since # retries is low. // Test 2 & 4 should never fail since (#threads)/(xcievers-1) is the upper // bound for guarantee to not throw BlockMissingException. // int xcievers = 2; int fileLen = 6*1024*1024; int threads = 50; int retries = 3; int timeWin = 300; // // Test 1: might fail // long timestamp = Time.now(); boolean pass = busyTest(xcievers, threads, fileLen, timeWin, retries); long timestamp2 = Time.now(); if ( pass ) { LOG.info("Test 1 succeeded! Time spent: " + (timestamp2-timestamp)/1000.0 + " sec."); } else { LOG.warn("Test 1 failed, but relax. Time spent: " + (timestamp2-timestamp)/1000.0 + " sec."); } // // Test 2: should never fail // retries = 50; timestamp = Time.now(); pass = busyTest(xcievers, threads, fileLen, timeWin, retries); timestamp2 = Time.now(); assertTrue("Something wrong! Test 2 got Exception with maxmum retries!", pass); LOG.info("Test 2 succeeded! Time spent: " + (timestamp2-timestamp)/1000.0 + " sec."); // // Test 3: might fail // retries = 3; timeWin = 1000; timestamp = Time.now(); pass = busyTest(xcievers, threads, fileLen, timeWin, retries); timestamp2 = Time.now(); if ( pass ) { LOG.info("Test 3 succeeded! Time spent: " + (timestamp2-timestamp)/1000.0 + " sec."); } else { LOG.warn("Test 3 failed, but relax. Time spent: " + (timestamp2-timestamp)/1000.0 + " sec."); } // // Test 4: should never fail // retries = 50; timeWin = 1000; timestamp = Time.now(); pass = busyTest(xcievers, threads, fileLen, timeWin, retries); timestamp2 = Time.now(); assertTrue("Something wrong! Test 4 got Exception with maxmum retries!", pass); LOG.info("Test 4 succeeded! Time spent: " + (timestamp2-timestamp)/1000.0 + " sec."); } private boolean busyTest(int xcievers, int threads, int fileLen, int timeWin, int retries) throws IOException { boolean ret = true; short replicationFactor = 1; long blockSize = 128*1024*1024; // DFS block size int bufferSize = 4096; int originalXcievers = conf.getInt( DFSConfigKeys.DFS_DATANODE_MAX_RECEIVER_THREADS_KEY, DFSConfigKeys.DFS_DATANODE_MAX_RECEIVER_THREADS_DEFAULT); conf.setInt(DFSConfigKeys.DFS_DATANODE_MAX_RECEIVER_THREADS_KEY, xcievers); conf.setInt(HdfsClientConfigKeys.DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_KEY, retries); conf.setInt(HdfsClientConfigKeys.Retry.WINDOW_BASE_KEY, timeWin); // Disable keepalive conf.setInt(DFSConfigKeys.DFS_DATANODE_SOCKET_REUSE_KEEPALIVE_KEY, 0); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(replicationFactor).build(); cluster.waitActive(); FileSystem fs = cluster.getFileSystem(); Path file1 = new Path("test_data.dat"); file1 = file1.makeQualified(fs.getUri(), fs.getWorkingDirectory()); // make URI hdfs:// try { FSDataOutputStream stm = fs.create(file1, true, bufferSize, replicationFactor, blockSize); // verify that file exists in FS namespace assertTrue(file1 + " should be a file", fs.getFileStatus(file1).isFile()); System.out.println("Path : \"" + file1 + "\""); LOG.info("Path : \"" + file1 + "\""); // write 1 block to file byte[] buffer = AppendTestUtil.randomBytes(Time.now(), fileLen); stm.write(buffer, 0, fileLen); stm.close(); // verify that file size has changed to the full size long len = fs.getFileStatus(file1).getLen(); assertTrue(file1 + " should be of size " + fileLen + " but found to be of size " + len, len == fileLen); // read back and check data integrigy byte[] read_buf = new byte[fileLen]; InputStream in = fs.open(file1, fileLen); IOUtils.readFully(in, read_buf, 0, fileLen); assert(Arrays.equals(buffer, read_buf)); in.close(); read_buf = null; // GC it if needed // compute digest of the content to reduce memory space MessageDigest m = MessageDigest.getInstance("SHA"); m.update(buffer, 0, fileLen); byte[] hash_sha = m.digest(); // spawn multiple threads and all trying to access the same block Thread[] readers = new Thread[threads]; Counter counter = new Counter(0); for (int i = 0; i < threads; ++i ) { DFSClientReader reader = new DFSClientReader(file1, cluster, hash_sha, fileLen, counter); readers[i] = new Thread(reader); readers[i].start(); } // wait for them to exit for (int i = 0; i < threads; ++i ) { readers[i].join(); } if ( counter.get() == threads ) ret = true; else ret = false; } catch (InterruptedException e) { System.out.println("Thread got InterruptedException."); e.printStackTrace(); ret = false; } catch (Exception e) { e.printStackTrace(); ret = false; } finally { conf.setInt(DFSConfigKeys.DFS_DATANODE_MAX_RECEIVER_THREADS_KEY, originalXcievers); fs.delete(file1, false); cluster.shutdown(); } return ret; } private void verifyEmptyLease(LeaseRenewer leaseRenewer) throws Exception { int sleepCount = 0; while (!leaseRenewer.isEmpty() && sleepCount++ < 20) { Thread.sleep(500); } assertTrue("Lease should be empty.", leaseRenewer.isEmpty()); } class DFSClientReader implements Runnable { DFSClient client; final Configuration conf; final byte[] expected_sha; FileSystem fs; final Path filePath; final MiniDFSCluster cluster; final int len; final Counter counter; DFSClientReader(Path file, MiniDFSCluster cluster, byte[] hash_sha, int fileLen, Counter cnt) { filePath = file; this.cluster = cluster; counter = cnt; len = fileLen; conf = new HdfsConfiguration(); expected_sha = hash_sha; try { cluster.waitActive(); } catch (IOException e) { e.printStackTrace(); } } @Override public void run() { try { fs = cluster.getNewFileSystemInstance(0); int bufferSize = len; byte[] buf = new byte[bufferSize]; InputStream in = fs.open(filePath, bufferSize); // read the whole file IOUtils.readFully(in, buf, 0, bufferSize); // compare with the expected input MessageDigest m = MessageDigest.getInstance("SHA"); m.update(buf, 0, bufferSize); byte[] hash_sha = m.digest(); buf = null; // GC if needed since there may be too many threads in.close(); fs.close(); assertTrue("hashed keys are not the same size", hash_sha.length == expected_sha.length); assertTrue("hashed keys are not equal", Arrays.equals(hash_sha, expected_sha)); counter.inc(); // count this thread as successful LOG.info("Thread correctly read the block."); } catch (BlockMissingException e) { LOG.info("Bad - BlockMissingException is caught."); e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } } } class Counter { int counter; Counter(int n) { counter = n; } public synchronized void inc() { ++counter; } public int get() { return counter; } } @Test public void testGetFileChecksum() throws Exception { final String f = "/testGetFileChecksum"; final Path p = new Path(f); final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build(); try { cluster.waitActive(); //create a file final FileSystem fs = cluster.getFileSystem(); DFSTestUtil.createFile(fs, p, 1L << 20, (short)3, 20100402L); //get checksum final FileChecksum cs1 = fs.getFileChecksum(p); assertTrue(cs1 != null); //stop the first datanode final List<LocatedBlock> locatedblocks = DFSClient.callGetBlockLocations( cluster.getNameNodeRpc(), f, 0, Long.MAX_VALUE) .getLocatedBlocks(); final DatanodeInfo first = locatedblocks.get(0).getLocations()[0]; cluster.stopDataNode(first.getXferAddr()); //get checksum again final FileChecksum cs2 = fs.getFileChecksum(p); assertEquals(cs1, cs2); } finally { cluster.shutdown(); } } /** Test that timeout occurs when DN does not respond to RPC. * Start up a server and ask it to sleep for n seconds. Make an * RPC to the server and set rpcTimeout to less than n and ensure * that socketTimeoutException is obtained */ @Test public void testClientDNProtocolTimeout() throws IOException { final Server server = new TestServer(1, true); server.start(); final InetSocketAddress addr = NetUtils.getConnectAddress(server); DatanodeID fakeDnId = DFSTestUtil.getLocalDatanodeID(addr.getPort()); ExtendedBlock b = new ExtendedBlock("fake-pool", new Block(12345L)); LocatedBlock fakeBlock = new LocatedBlock(b, new DatanodeInfo[0]); ClientDatanodeProtocol proxy = null; try { proxy = DFSUtilClient.createClientDatanodeProtocolProxy( fakeDnId, conf, 500, false, fakeBlock); proxy.getReplicaVisibleLength(new ExtendedBlock("bpid", 1)); fail ("Did not get expected exception: SocketTimeoutException"); } catch (SocketTimeoutException e) { LOG.info("Got the expected Exception: SocketTimeoutException"); } finally { if (proxy != null) { RPC.stopProxy(proxy); } server.stop(); } } /** * Test that checksum failures are recovered from by the next read on the same * DFSInputStream. Corruption information is not persisted from read call to * read call, so the client should expect consecutive calls to behave the same * way. See HDFS-3067. */ @Test public void testRetryOnChecksumFailure() throws Exception { HdfsConfiguration conf = new HdfsConfiguration(); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build(); try { final short REPL_FACTOR = 1; final long FILE_LENGTH = 512L; cluster.waitActive(); FileSystem fs = cluster.getFileSystem(); Path path = new Path("/corrupted"); DFSTestUtil.createFile(fs, path, FILE_LENGTH, REPL_FACTOR, 12345L); DFSTestUtil.waitReplication(fs, path, REPL_FACTOR); ExtendedBlock block = DFSTestUtil.getFirstBlock(fs, path); int blockFilesCorrupted = cluster.corruptBlockOnDataNodes(block); assertEquals("All replicas not corrupted", REPL_FACTOR, blockFilesCorrupted); InetSocketAddress nnAddr = new InetSocketAddress("localhost", cluster.getNameNodePort()); DFSClient client = new DFSClient(nnAddr, conf); DFSInputStream dis = client.open(path.toString()); byte[] arr = new byte[(int)FILE_LENGTH]; for (int i = 0; i < 2; ++i) { try { dis.read(arr, 0, (int)FILE_LENGTH); fail("Expected ChecksumException not thrown"); } catch (Exception ex) { GenericTestUtils.assertExceptionContains( "Checksum error", ex); } } } finally { cluster.shutdown(); } } /** Test client retry with namenode restarting. */ @Test(timeout=300000) public void testNamenodeRestart() throws Exception { namenodeRestartTest(new Configuration(), false); } public static void namenodeRestartTest(final Configuration conf, final boolean isWebHDFS) throws Exception { ((Log4JLogger)DFSClient.LOG).getLogger().setLevel(Level.ALL); final List<Exception> exceptions = new ArrayList<Exception>(); final Path dir = new Path("/testNamenodeRestart"); if (isWebHDFS) { conf.setBoolean(HdfsClientConfigKeys.HttpClient.RETRY_POLICY_ENABLED_KEY, true); } else { conf.setBoolean(HdfsClientConfigKeys.Retry.POLICY_ENABLED_KEY, true); } conf.setInt(DFSConfigKeys.DFS_NAMENODE_SAFEMODE_MIN_DATANODES_KEY, 1); conf.setInt(MiniDFSCluster.DFS_NAMENODE_SAFEMODE_EXTENSION_TESTING_KEY, 5000); final short numDatanodes = 3; final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf) .numDataNodes(numDatanodes) .build(); try { cluster.waitActive(); final DistributedFileSystem dfs = cluster.getFileSystem(); final FileSystem fs = isWebHDFS ? WebHdfsTestUtil.getWebHdfsFileSystem( conf, WebHdfsConstants.WEBHDFS_SCHEME) : dfs; final URI uri = dfs.getUri(); assertTrue(HdfsUtils.isHealthy(uri)); //create a file final long length = 1L << 20; final Path file1 = new Path(dir, "foo"); DFSTestUtil.createFile(fs, file1, length, numDatanodes, 20120406L); //get file status final FileStatus s1 = fs.getFileStatus(file1); assertEquals(length, s1.getLen()); //create file4, write some data but not close final Path file4 = new Path(dir, "file4"); final FSDataOutputStream out4 = fs.create(file4, false, 4096, fs.getDefaultReplication(file4), 1024L, null); final byte[] bytes = new byte[1000]; new Random().nextBytes(bytes); out4.write(bytes); out4.write(bytes); if (isWebHDFS) { // WebHDFS does not support hflush. To avoid DataNode communicating with // NN while we're shutting down NN, we call out4.close() to finish // writing the data out4.close(); } else { out4.hflush(); } //shutdown namenode assertTrue(HdfsUtils.isHealthy(uri)); cluster.shutdownNameNode(0); assertFalse(HdfsUtils.isHealthy(uri)); //namenode is down, continue writing file4 in a thread final Thread file4thread = new Thread(new Runnable() { @Override public void run() { try { //write some more data and then close the file if (!isWebHDFS) { out4.write(bytes); out4.write(bytes); out4.write(bytes); out4.close(); } } catch (Exception e) { exceptions.add(e); } } }); file4thread.start(); //namenode is down, read the file in a thread final Thread reader = new Thread(new Runnable() { @Override public void run() { try { //it should retry till namenode is up. final FileSystem fs = createFsWithDifferentUsername(conf, isWebHDFS); final FSDataInputStream in = fs.open(file1); int count = 0; for(; in.read() != -1; count++); in.close(); assertEquals(s1.getLen(), count); } catch (Exception e) { exceptions.add(e); } } }); reader.start(); //namenode is down, create another file in a thread final Path file3 = new Path(dir, "file"); final Thread thread = new Thread(new Runnable() { @Override public void run() { try { //it should retry till namenode is up. final FileSystem fs = createFsWithDifferentUsername(conf, isWebHDFS); DFSTestUtil.createFile(fs, file3, length, numDatanodes, 20120406L); } catch (Exception e) { exceptions.add(e); } } }); thread.start(); //restart namenode in a new thread new Thread(new Runnable() { @Override public void run() { try { //sleep, restart, and then wait active TimeUnit.SECONDS.sleep(30); assertFalse(HdfsUtils.isHealthy(uri)); cluster.restartNameNode(0, false); cluster.waitActive(); assertTrue(HdfsUtils.isHealthy(uri)); } catch (Exception e) { exceptions.add(e); } } }).start(); //namenode is down, it should retry until namenode is up again. final FileStatus s2 = fs.getFileStatus(file1); assertEquals(s1, s2); //check file1 and file3 thread.join(); assertEmpty(exceptions); assertEquals(s1.getLen(), fs.getFileStatus(file3).getLen()); assertEquals(fs.getFileChecksum(file1), fs.getFileChecksum(file3)); reader.join(); assertEmpty(exceptions); //check file4 file4thread.join(); assertEmpty(exceptions); { final FSDataInputStream in = fs.open(file4); int count = 0; for(int r; (r = in.read()) != -1; count++) { Assert.assertEquals(String.format("count=%d", count), bytes[count % bytes.length], (byte)r); } if (!isWebHDFS) { Assert.assertEquals(5 * bytes.length, count); } else { Assert.assertEquals(2 * bytes.length, count); } in.close(); } //enter safe mode assertTrue(HdfsUtils.isHealthy(uri)); dfs.setSafeMode(SafeModeAction.SAFEMODE_ENTER); assertFalse(HdfsUtils.isHealthy(uri)); //leave safe mode in a new thread new Thread(new Runnable() { @Override public void run() { try { //sleep and then leave safe mode TimeUnit.SECONDS.sleep(30); assertFalse(HdfsUtils.isHealthy(uri)); dfs.setSafeMode(SafeModeAction.SAFEMODE_LEAVE); assertTrue(HdfsUtils.isHealthy(uri)); } catch (Exception e) { exceptions.add(e); } } }).start(); //namenode is in safe mode, create should retry until it leaves safe mode. final Path file2 = new Path(dir, "bar"); DFSTestUtil.createFile(fs, file2, length, numDatanodes, 20120406L); assertEquals(fs.getFileChecksum(file1), fs.getFileChecksum(file2)); assertTrue(HdfsUtils.isHealthy(uri)); //make sure it won't retry on exceptions like FileNotFoundException final Path nonExisting = new Path(dir, "nonExisting"); LOG.info("setPermission: " + nonExisting); try { fs.setPermission(nonExisting, new FsPermission((short)0)); fail(); } catch(FileNotFoundException fnfe) { LOG.info("GOOD!", fnfe); } assertEmpty(exceptions); } finally { cluster.shutdown(); } } static void assertEmpty(final List<Exception> exceptions) { if (!exceptions.isEmpty()) { final StringBuilder b = new StringBuilder("There are ") .append(exceptions.size()) .append(" exception(s):"); for(int i = 0; i < exceptions.size(); i++) { b.append("\n Exception ") .append(i) .append(": ") .append(StringUtils.stringifyException(exceptions.get(i))); } fail(b.toString()); } } private static FileSystem createFsWithDifferentUsername( final Configuration conf, final boolean isWebHDFS ) throws IOException, InterruptedException { final String username = UserGroupInformation.getCurrentUser( ).getShortUserName() + "_XXX"; final UserGroupInformation ugi = UserGroupInformation.createUserForTesting( username, new String[]{"supergroup"}); return isWebHDFS? WebHdfsTestUtil.getWebHdfsFileSystemAs(ugi, conf, WebHdfsConstants.WEBHDFS_SCHEME) : DFSTestUtil.getFileSystemAs(ugi, conf); } @Test public void testMultipleLinearRandomRetry() { parseMultipleLinearRandomRetry(null, ""); parseMultipleLinearRandomRetry(null, "11"); parseMultipleLinearRandomRetry(null, "11,22,33"); parseMultipleLinearRandomRetry(null, "11,22,33,44,55"); parseMultipleLinearRandomRetry(null, "AA"); parseMultipleLinearRandomRetry(null, "11,AA"); parseMultipleLinearRandomRetry(null, "11,22,33,FF"); parseMultipleLinearRandomRetry(null, "11,-22"); parseMultipleLinearRandomRetry(null, "-11,22"); parseMultipleLinearRandomRetry("[22x11ms]", "11,22"); parseMultipleLinearRandomRetry("[22x11ms, 44x33ms]", "11,22,33,44"); parseMultipleLinearRandomRetry("[22x11ms, 44x33ms, 66x55ms]", "11,22,33,44,55,66"); parseMultipleLinearRandomRetry("[22x11ms, 44x33ms, 66x55ms]", " 11, 22, 33, 44, 55, 66 "); } static void parseMultipleLinearRandomRetry(String expected, String s) { final MultipleLinearRandomRetry r = MultipleLinearRandomRetry.parseCommaSeparatedString(s); LOG.info("input=" + s + ", parsed=" + r + ", expected=" + expected); if (r == null) { assertEquals(expected, null); } else { assertEquals("MultipleLinearRandomRetry" + expected, r.toString()); } } @Test public void testDFSClientConfigurationLocateFollowingBlockInitialDelay() throws Exception { // test if HdfsClientConfigKeys.BlockWrite.LOCATEFOLLOWINGBLOCK_INITIAL_DELAY_KEY // is not configured, verify DFSClient uses the default value 400. MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build(); try { cluster.waitActive(); NamenodeProtocols nn = cluster.getNameNodeRpc(); DFSClient client = new DFSClient(null, nn, conf, null); assertEquals(client.getConf(). getBlockWriteLocateFollowingInitialDelayMs(), 400); // change HdfsClientConfigKeys.BlockWrite.LOCATEFOLLOWINGBLOCK_INITIAL_DELAY_KEY, // verify DFSClient uses the configured value 1000. conf.setInt( HdfsClientConfigKeys.BlockWrite.LOCATEFOLLOWINGBLOCK_INITIAL_DELAY_MS_KEY, 1000); client = new DFSClient(null, nn, conf, null); assertEquals(client.getConf(). getBlockWriteLocateFollowingInitialDelayMs(), 1000); } finally { cluster.shutdown(); } } }
/******************************************************************************* * Copyright (c) 2019 Infostretch Corporation * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. ******************************************************************************/ package com.qmetry.qaf.automation.scenario; import java.util.ArrayList; import java.util.Iterator; import java.util.Map; import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.qmetry.qaf.automation.data.MetaDataScanner; public class MetaDataFilterTest { @SuppressWarnings("unchecked") @Test(dataProvider = "metaFilterDP") public void applyMetaFileterTest(String scenarioMetadatastr, String includeMetastr, String excludeMetastr, boolean expectedOutcome) { Gson gson = new GsonBuilder().create(); Map<String, Object> includeMeta = gson.fromJson(includeMetastr, Map.class); Map<String, Object> excludeMeta = gson.fromJson(excludeMetastr, Map.class); Map<String, Object> scenarioMetadata = gson.fromJson(scenarioMetadatastr, Map.class); boolean binclude = MetaDataScanner.includeMethod(scenarioMetadata, includeMeta, excludeMeta); Assert.assertEquals(binclude, expectedOutcome); } /* * It contains tests with below different scenarios * - * - * - * nothing include - nothing exclude * known metadata include - exclude nothing * nothing include - known metadata exclude * unknown metadata include - exclude nothing * nothing include - unknown metadata exclude * known metadata include - known metadata exclude * same include - same exclude * unknown metadata include - unknown metadata exclude * known metadata include - unknown metadata exclude * unknown metadata include - known metadata exclude * unkown+known metadata include - nothing metadata exclude * nothing metadata include - unkown+known metadata exclude * known metadata include - unkown+known metadata exclude * unknown metadata include - unkown+known metadata exclude * unkown+known metadata include - known metadata exclude * unkown+known metadata include - unkown metadata exclude * unkown+known metadata include - unkown+known metadata exclude * - * - **/ @DataProvider(name = "metaFilterDP") public static Iterator<Object[]> testData() { ArrayList<Object[]> data = new ArrayList<Object[]>(); // scenarioMetadatastr, includeMetastr, excludeMetastr, expectedOutcome // nothing include - nothing exclude data.add(new Object[]{"{'Brand':['B1'],'Module':'M1','group':['a','b']}", "{}", "{}", true}); // known metadata include - exclude nothing data.add(new Object[]{"{'Brand':['B1'],'Module':'M1','group':['a','b']}", "{'Module':['M1'],'Brand':['B1'],'group':['b']}", "{}", true}); // nothing include - known metadata exclude data.add(new Object[]{"{'Brand':['B1'],'Module':'M1','group':['a','b']}", "{}", "{'Module':['M1'],'group':['b']}", false}); data.add(new Object[]{"{'Brand':['B1','B2'],'Module':'M1','group':['a','b']}", "{}", "{'Brand':['B2']}", false}); data.add(new Object[]{"{'Brand':['B1','B2'],'Module':'M1','group':['a','b']}", "{}", "{'group':['a']}", false}); data.add(new Object[]{"{'Brand':['B1','B2'],'Module':'M1','group':['a','b']}", "{}", "{'group':['a','b']}", false}); // unknown metadata include - exclude nothing data.add(new Object[]{"{'Brand':['B1'],'Module':'M1','group':['a','b']}", "{'Module':['M'],'group':['c']}", "{}", false}); // nothing include - unknown metadata exclude data.add(new Object[]{"{'Brand':['B1'],'Module':'M1','group':['a','b']}", "{}", "{'Module':['M'],'group':['c']}", true}); // known metadata include - known metadata exclude data.add(new Object[]{"{'Brand':['B1'],'Module':'M1','group':['a','b']}", "{'group':['b']}", "{'Module':['M1']}", false}); data.add(new Object[]{ "{'Brand':['B1','B2'],'Module':['M1','M2'],'group':['a','b']}", "{'Brand':['B1'],'Module':['M1']}", "{'Module':['M2']}", false}); data.add(new Object[]{ "{'Brand':['B1','B2'],'Module':['M1','M2'],'group':['a','b']}", "{'Module':['M1']}", "{'Module':['M2']}", false}); data.add(new Object[]{ "{'Brand':['B1','B2'],'Module':['M1','M2'],'group':['a','b']}", "{'group':['a','b']}", "{'Module':['M2'],'Brand':['B1']}", false}); data.add(new Object[]{ "{'Brand':['B1','B2'],'Module':['M1','M2'],'group':['a','b']}", "{'Module':['M2']}", "{'Brand':['B1']}", false}); // same include - same exclude data.add(new Object[]{"{'Brand':['B1','B2'],'Module':'M1','group':['a','b']}", "{'Module':['M1']}", "{'Module':['M1']}", false}); // unknown metadata include - unknown metadata exclude data.add(new Object[]{"{'Price':['100'],'Author':['Atul'],'group':['a','b']}", "{'group':['c']}", "{'Author':['Priyesh']}", false}); data.add(new Object[]{"{'Brand':['B1','B2'],'Module':'M1','group':['a','b']}", "{'Module':['M']}", "{'Module':['M2']}", false}); data.add(new Object[]{"{'Price':['100'],'Author':['Atul'],'group':['a','b']}", "{'group':['c']}", "{'Author':['Priyesh']}", false}); // known metadata include - unknown metadata exclude data.add(new Object[]{"{'Brand':['B1'],'Module':'M1','group':['a','b']}", "{'group':['b']}", "{'Module':['M']}", true}); // unknown metadata include - known metadata exclude data.add(new Object[]{"{'Brand':['B1'],'Module':'M1','group':['a','b']}", "{'group':['c']}", "{'Module':['M1']}", false}); data.add(new Object[]{"{'Brand':['B1','B2'],'Module':'M1','group':['a','b']}", "{'Module':['M']}", "{'Module':['M1']}", false}); // unkown+known metadata include - nothing metadata exclude data.add(new Object[]{"{'Brand':['B1'],'Module':'M1','group':['a','b']}", "{'Module':['M1','M'],'Brand':['B1'],'group':['b']}", "{}", true}); data.add(new Object[]{"{'Brand':['B1'],'Module':'M1','group':['a','b']}", "{'Module':['M1'],'Brand':['B1'],'group':['a','c']}", "{}", true}); data.add(new Object[]{"{'Brand':['B1'],'Module':'M1','group':['a','b']}", "{'Module':['M1'],'Brand':['B1'],'group':['c']}", "{}", false}); data.add(new Object[]{"{'Brand':['B1'],'Module':'M1','group':['a']}", "{'Module':['M1','M'],'group':['a','b']}", "{}", true}); data.add(new Object[]{"{'Price':['100'],'Author':['Atul'],'group':['a','b']}", "{'Author':['Priyesh'],'Module':['100']}", "{}", false}); data.add(new Object[]{"{'Price':['100'],'Author':['Atul'],'group':['a','b']}", "{'Author':['Atul'],'Module':['100']}", "{}", false}); // nothing metadata include - unkown+known metadata exclude data.add(new Object[]{"{'Price':['100'],'Author':['Atul'],'group':['a','b']}", "{}", "{'Author':['Priyesh','Atul']}", false}); data.add(new Object[]{"{'Price':['100'],'Author':['Atul'],'group':['a','b']}", "{}", "{'Author':['Priyesh'],'Price':['100']}", false}); data.add(new Object[]{"{'Price':['100'],'Author':['Atul'],'group':['a','b']}", "{}", "{'Author':['Priyesh'],'Module':['100']}", true}); data.add(new Object[]{"{'Price':['100'],'Author':['Atul'],'group':['a','b']}", "{}", "{'Author':['Atul'],'Module':['100']}", false}); // known metadata include - unkown+known metadata exclude data.add(new Object[]{"{'Brand':['B1','B2'],'Module':'M1','group':['a','b']}", "{'Module':['M1']}", "{'group':['c'],'Brand':['B1']}", false}); data.add(new Object[]{"{'Brand':['B1','B2'],'Module':'M1','group':['a','b']}", "{'Module':['M1']}", "{'group':['c','b']}", false}); data.add(new Object[]{"{'Brand':['B1','B2'],'Module':'M1','group':['a','b']}", "{'Module':['M1']}", "{'group':['c','b'],'Brand':['B1']}", false}); data.add(new Object[]{"{'Brand':['B1','B2'],'Module':'M1','group':['a','b']}", "{'Module':['M1']}", "{'Module':['M'],'group':['c'],'Brand':['B1']}", false}); // unknown metadata include - unkown+known metadata exclude data.add(new Object[]{"{'Price':['100'],'Author':['Atul'],'group':['a','b']}", "{'group':['c']}", "{'Author':['Priyesh'],'group':['a']}", false}); // unkown+known metadata include - known metadata exclude data.add(new Object[]{"{'Brand':['B1'],'Module':'M1','group':['a','b']}", "{'Module':['M1','M'],'Brand':['B1'],'group':['b']}", "{'group':['b']}", false}); // unkown+known metadata include - unkown metadata exclude data.add(new Object[]{"{'Brand':['B1'],'Module':'M1','group':['a','b']}", "{'Module':['M1','M'],'Brand':['B1'],'group':['b']}", "{'group':['c']}", true}); data.add(new Object[]{"{'Price':['100'],'Author':['Atul'],'group':['a','b']}", "{'group':['a','b','c']}", "{'Author':['Priyesh']}", true}); // unkown+known metadata include - unkown+known metadata exclude data.add(new Object[]{"{'Price':['100'],'Author':['Atul'],'group':['a','b']}", "{'group':['c','b']}", "{'Author':['Priyesh'],'Price':['100']}", false}); return data.iterator(); } }
import java.awt.geom.CubicCurve2D; import java.awt.Dimension; import java.awt.geom.AffineTransform; import java.awt.geom.GeneralPath; import java.awt.Stroke; import java.awt.geom.Rectangle2D; import java.awt.geom.Line2D; import java.awt.geom.Area; import java.awt.geom.Point2D; import java.awt.geom.PathIterator; import java.awt.geom.Arc2D; import java.awt.Color; import java.awt.BasicStroke; import java.awt.Shape; import javax.swing.JPanel; import javax.swing.JFrame; import javax.swing.UIManager; import java.awt.Graphics2D; import java.awt.Graphics; public class SymbolStroke implements Stroke { protected BasicStroke _basicStroke = null; protected BasicStroke _symbolStroke = null; protected Shape _symbol = null; protected float _symbolSeparation = 0.0F; protected boolean _startWithSymbol = false; protected boolean _drawBasicLine = true; protected boolean _symbolFillOnly = false; public SymbolStroke() { _basicStroke = new BasicStroke(); _symbolStroke = new BasicStroke(); } public SymbolStroke(BasicStroke basicStroke) { _basicStroke = basicStroke; } public SymbolStroke(Shape symbol,float symbolSeparation,boolean startWithSymbol,boolean drawBasicLine,boolean symbolFillOnly) { this(); _symbol = symbol; _symbolSeparation = symbolSeparation; if(_symbolSeparation <= 0.0F) throw new IllegalArgumentException("symbolSeparation must be greater than 0.0"); _startWithSymbol = startWithSymbol; _drawBasicLine = drawBasicLine; _symbolFillOnly = symbolFillOnly; } public SymbolStroke(BasicStroke basicStroke,BasicStroke symbolStroke,Shape symbol,float symbolSeparation,boolean startWithSymbol,boolean drawBasicLine,boolean symbolFillOnly) { this(symbol,symbolSeparation,startWithSymbol,drawBasicLine,symbolFillOnly); _basicStroke = basicStroke; if(_basicStroke == null) throw new IllegalArgumentException("basicStroke cannot be null"); _symbolStroke = symbolStroke; if(_symbolStroke == null) throw new IllegalArgumentException("symbolStroke cannot be null"); } public Shape createStrokedShape(Shape s) { Shape basicShape = _basicStroke.createStrokedShape(s); if(_symbol == null) return(basicShape); GeneralPath retVal = null; if(_drawBasicLine) retVal = new GeneralPath(basicShape); else retVal = new GeneralPath(); PathIterator pi = s.getPathIterator(null,1.0); float pt[] = new float[6]; Point2D.Float prevPt = null; Point2D.Float currPt = null; float prevDist = 0.0F; float currDist = 0.0F; boolean newSegment = false; while(!pi.isDone()) { int type = pi.currentSegment(pt); switch(type) { case PathIterator.SEG_MOVETO: prevPt = currPt; currPt = new Point2D.Float(pt[0],pt[1]); prevDist = 0.0F; currDist = 0.0F; newSegment = true; break; case PathIterator.SEG_LINETO: prevPt = currPt; currPt = new Point2D.Float(pt[0],pt[1]); if(prevPt == null) break; prevDist = currDist; currDist += prevPt.distance(currPt); while(currDist >= _symbolSeparation) { float radius = _symbolSeparation - prevDist; Point2D.Float center = prevPt; Line2D.Float line = new Line2D.Float(prevPt,currPt); Point2D.Float intersect = intersectLineAndCircle(center,radius,line); if((newSegment == true) && (_startWithSymbol == true)) { Shape symbolShape = getStrokedSymbolShape(line,prevPt); retVal.append(symbolShape,false); newSegment = false; } if(intersect == null) { //Not sure why intersection failed, but clean up anyway prevDist = 0.0F; currDist = 0.0F; break; } else { Shape symbolShape = getStrokedSymbolShape(line,intersect); retVal.append(symbolShape,false); } prevDist = 0.0F; prevPt = intersect; currDist -= _symbolSeparation; } break; case PathIterator.SEG_CLOSE: break; } pi.next(); } return(retVal); } protected Shape getStrokedSymbolShape(Line2D.Float line,Point2D.Float intersect) { //Find proper angle to draw symbol, relative to North, not East as usual float rotation = (float)Math.atan2(line.x1 - line.x2,line.y2 - line.y1) - ((float)Math.PI / 2.0F); //Transform shape to be anchored at intersection point AffineTransform rt = AffineTransform.getRotateInstance(rotation); Shape rotatedSymbol = rt.createTransformedShape(_symbol); AffineTransform tt = AffineTransform.getTranslateInstance(intersect.x,intersect.y); Shape transformedSymbol = tt.createTransformedShape(rotatedSymbol); Shape symbolShape = _symbolStroke.createStrokedShape(transformedSymbol); if(_symbolFillOnly == true) return(transformedSymbol); else return(symbolShape); } //Based on Theorem 1.4 of http://www.sonoma.edu/users/w/wilsonst/Papers/Geometry/lines/default.html //Based on Theorem 3.2 of http://www.sonoma.edu/users/w/wilsonst/Papers/Geometry/circles/default.html //Based on Theorem 3.3 of http://www.sonoma.edu/users/w/wilsonst/Papers/Geometry/circles/default.html protected Point2D.Float intersectLineAndCircle(Point2D.Float c,float r,Line2D.Float l) { Point2D.Float retVal = null; float xPos,yPos,xNeg,yNeg; float m = (l.y2 - l.y1) / (l.x2 - l.x1); //Check for vertical line if(Float.isInfinite(m)) { float a = l.x1; xPos = a; xNeg = a; float aMinusXSquared = (a - c.x) * (a - c.x); float yRight = (float)Math.sqrt((r * r) - aMinusXSquared); if(Float.isNaN(yRight)) return(null); yPos = c.y + yRight; yNeg = c.y - yRight; } else { float b = ((l.x2 * l.y1) - (l.x1 * l.y2)) / (l.x2 - l.x1); float onePlusMSquared = 1.0F + (m * m); float xLeft = ((m * c.y) + c.x - (m * b)) / (onePlusMSquared); float xRightTopTop = c.y - (m * c.x) - b; float xRightTopBottom = (float)Math.sqrt(onePlusMSquared); if(Float.isNaN(xRightTopBottom)) return(null); float xRightTop = (float)Math.sqrt((r * r) - ((xRightTopTop / xRightTopBottom) * (xRightTopTop / xRightTopBottom))); if(Float.isNaN(xRightTop)) return(null); float xRight = xRightTop / (float)Math.sqrt(onePlusMSquared); if((Float.isNaN(xRight)) || (Float.isInfinite(xRight))) return(null); xPos = xLeft + xRight; xNeg = xLeft - xRight; float yLeft = ((m * m * c.y) + (m * c.x) + b) / (onePlusMSquared); float yRight = m * xRight; yPos = yLeft + yRight; yNeg = yLeft - yRight; } if((xPos >= Math.min(l.x1,l.x2)) && (xPos <= Math.max(l.x1,l.x2)) && (yPos >= Math.min(l.y1,l.y2)) && (yPos <= Math.max(l.y1,l.y2))) retVal = new Point2D.Float(xPos,yPos); else if((xNeg >= Math.min(l.x1,l.x2)) && (xNeg <= Math.max(l.x1,l.x2)) && (yNeg >= Math.min(l.y1,l.y2)) && (yNeg <= Math.max(l.y1,l.y2))) retVal = new Point2D.Float(xNeg,yNeg); return(retVal); } public static void main(String args[]) { try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch(Exception ex) { } JFrame f = new JFrame("SymbolStroke Demo"); f.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); JPanel mainPanel = new JPanel() { public void paintComponent(Graphics g) { super.paintComponent(g); Graphics2D g2d = (Graphics2D)g; Line2D.Float l = new Line2D.Float(0.0F,150.0F,300.0F,150.0F); Arc2D.Float a = new Arc2D.Float(10.0F,10.0F,310.0F,310.0F,0.0F,360.0F,Arc2D.OPEN); CubicCurve2D.Float c = new CubicCurve2D.Float(0.0F,150.0F,400.0F,-50.0F,-100.0F,350.0F,300.0F,150.0F); SymbolStroke stroke = new SymbolStroke(new BasicStroke(1.0F),new BasicStroke(1.5F),new Rectangle2D.Float(-4.0F,-4.0F,8.0F,8.0F),5.0F,false,false,true); // SymbolStroke stroke = new SymbolStroke(new BasicStroke(2.0F),new BasicStroke(2.0F),new Line2D.Float(0.0F,0.0F,0.0F,-4.0F),30.0F,false,true); // SymbolStroke stroke = new SymbolStroke(new BasicStroke(1.0F),new BasicStroke(2.0F),new CubicCurve2D.Float(-15.0F,0.0F,0.0F,15.0F,0.0F,-15.0F,15.0F,0.0F),30.0F,true,false); g2d.setStroke(stroke); g2d.draw(c); } }; mainPanel.setPreferredSize(new Dimension(400,400)); f.getContentPane().add(mainPanel); f.pack(); f.setVisible(true); } }
package tactician; /** * This class represents which squares on the chessboard are "occupied" for a given representation. * For example you could have a bitboard for white bishops; the bitboard would represent that white * has bishops on the e5 and g7 squares, and nowhere else. You could also have a bitboard for * squares attacked by a knight on f6; in that case the bitboard would contain 8 occupied squares. * * <p>The data is stored in {@link #data} as a 64-bit long. The low bit 0x000000000000001L says * whether the a1 square (bottom-left from white's perspective) is occupied. The next bit * represents b1, the square to the right from white's perspective. It continues for c1-h1, a2-h2, * and so on. Finally the highest bit 0x8000000000000000L represents the h8 square. * * @author Phil Leszczynski */ public class Bitboard { /** Initializes a bitboard with no squares occupied. */ public Bitboard() { this.data = 0L; } /** * Initializes a bitboard represented by given data. * * @param data a 64-bit long representing data as described in the class definition. */ public Bitboard(long data) { this.data = data; } /** * Initializes a bitboard represented by given occupied squares. * * @param squares a list of squares to be occupied, e.g. "d4", "e7", "h2" */ public Bitboard(String... squares) { long result = 0; for (String square : squares) { long mask = new Square(square).getMask(); result |= mask; } this.data = result; } /** * Returns a bitboard for a given rank (horizontal row) of occupied squares on the chessboard. * * @param rank an integer from 0-7, 0 being the rank closest to the white player, 7 being the * rank closest to the black player * @return a bitboard with squares in the rank occupied */ public static Bitboard bitboardFromRank(int rank) { long zerothRankMask = 0x00000000000000ffL; long rankMask = zerothRankMask << (8 * rank); return new Bitboard(rankMask); } /** * Returns a bitboard for a given file (vertical row) of occupied squares on the chessboard. * * @param file an integer from 0-7, 0 being the left file from white's perspective, 7 being the * right file from white's perspective * @return a bitboard with squares in the file occupied */ public static Bitboard bitboardFromFile(int file) { long zerothFileMask = 0x0101010101010101L; long fileMask = zerothFileMask << file; return new Bitboard(fileMask); } /** * Returns a bitboard with the same occupied squares. * * @return a bitboard with the same occupied squares */ public Bitboard copy() { return new Bitboard(this.data); } /** Clears the bitboard so that it no longer has any occupied squares. */ public void clear() { this.data = 0; } /** Returns the hexadecimal representation of the bitboard's data. */ @Override public String toString() { return Long.toHexString(this.data); } /** * Reflects the bitboard along the horizontal axis dividing the two players (the line between the * fourth and fifth ranks, 1-indexed). For example if the original bitboard has occupied squares * c2 and d5, the resulting bitboard will have occupied squares c7 and d4. * * @return the horizontally flipped bitboard */ public Bitboard flip() { long firstRank = 0x00000000000000ffL; long mask = this.data; long output = 0; for (int i = 0; i < 4; i++) { long row = mask & (firstRank << (8 * i)); output += row << (8 * (7 - 2 * i)); } for (int i = 4; i < 8; i++) { long row = mask & (firstRank << (8 * i)); output += row >>> (8 * (2 * i - 7)); } return new Bitboard(output); } /** * Returns a modified bitboard which has squares occupied by the original AND by the mask. See * the class definition for how the mask's data is represented. * * @param mask a 64-bit long representing another bitboard with which to intersect the original * @return a bitboard containing all the original occupied squares that are * also occupied by the mask */ public Bitboard intersection(long mask) { return new Bitboard(this.data & mask); } /** * Returns a modified bitboard which has squares occupied by the original AND by the other * bitboard. * * @param other a bitboard with which to intersect the original * @return a bitboard containing the occupied squares of both this and the other bitboard */ public Bitboard intersection(Bitboard other) { return new Bitboard(this.data & other.getData()); } /** * Tests whether the bitboard has any squares in common with the mask. See the class definition * for how the mask's data is represented. * * @param mask a 64-bit long representing a bitboard with which to find common occupied squares * @return true if there are any occupied squares in common, false otherwise */ public boolean intersects(long mask) { return this.intersection(mask).getData() != 0; } /** * Tests whether the bitboard has any squares in common with the other bitboard. * * @param other a bitboard with which to find common occupied squares * @return true if there are any occupied squares in common, false otherwise */ public boolean intersects(Bitboard other) { return this.intersects(other.getData()); } /** * Tests whether the bitboard contains the given square. * * @param square the square to check whether the bitboard occupies it * @return true if the square is occupied by this bitboard, false otherwise */ public boolean intersects(Square square) { return this.intersects(square.getMask()); } /** * Tests whether the bitboard has any occupied squares. * * @return true if there are no occupied squares, false if there is at least one */ public boolean isEmpty() { return this.data == 0L; } /** * Returns a modified bitboard which has squares occupied by the original OR by the mask. See the * class definition for how the mask's data is represented. * * @param mask a 64-bit long representing another bitboard with which to union the original * @return a bitboard containing all the original occupied squares plus those occupied by the * mask */ public Bitboard union(long mask) { return new Bitboard(this.data | mask); } /** * Returns a modified bitboard which has squares occupied by the original OR by the other * bitboard. * * @param other a bitboard with which to union the original * @return a bitboard containing the occupied squares of either this or the other bitboard */ public Bitboard union(Bitboard other) { return new Bitboard(this.data | other.getData()); } /** * Updates the bitboard to remove all the squares occupied by the mask. For example if this has * occupied squares e7 and h2, and the mask has occupied squares h2 and c5, then this will end up * having only e7 occupied. * * @param mask a 64-bit long representing the occupied squares to remove */ public void updateRemove(long mask) { this.data &= ~(mask ^ 0); } /** * Updates the bitboard to remove all the squares occupied by the other bitboard. For example if * this has occupied squares e7 and h2, and other has occupied squares h2 and c5, then this will * end up having only e7 occupied. * * @param other a bitboard containing the occupied squares to remove */ public void updateRemove(Bitboard other) { this.updateRemove(other.getData()); } /** * Updates the bitboard to include all the squares occupied by the mask. For example if this has * occupied squares e7 and h2, and the mask has occupied squares h2 and c5, then this will end up * having e7, h2, and c5 occupied. * * @param mask a 64-bit long representing the occupied squares to add in */ public void updateUnion(long mask) { this.data |= mask; } /** * Updates the bitboard to include all the squares occupied by the other bitboard. For example if * this has occupied squares e7 and h2, and the mask has occupied squares h2 and c5, then this * will end up having e7, h2, and c5 occupied. * * @param other a bitboard containing the occupied squares to add in */ public void updateUnion(Bitboard other) { this.updateUnion(other.getData()); } /** * Counts the number of occupied squares. * * @see <a href="http://en.wikipedia.org/wiki/Hamming_weight">Hamming Weight</a> * @return the number of occupied squares */ public int numOccupied() { return Long.bitCount(this.data); } /** * Returns the number of empty squares starting from a1. For example if this has occupied squares * c2 and h5, then since a1-h1, a2, and b2 are empty, the result would be 10. If a1 is occupied * the result is 0. * * @return the number of starting empty squares */ public int numEmptyStartingSquares() { return Long.numberOfTrailingZeros(this.data); } /** * Returns the 64-bit long representation of the bitboard. See the class definition for a * description of the long representation. * * @return the bitboard representation as a 64-bit long */ public long getData() { return this.data; } /** * Sets the occupied squares based on a 64-bit long representation. See the class definition for * a description of the long representation. * * @param data the 64-bit long containing the occupied squares */ public void setData(long data) { this.data = data; } /** * The internal 64-bit representation of the occupied squares. See the class definition for a * description of the long representation. */ private long data = 0; }
package org.opencb.opencga.storage.hadoop.variant.index.annotation; import org.junit.Before; import org.junit.Test; import org.opencb.biodata.models.variant.avro.ConsequenceType; import org.opencb.biodata.models.variant.avro.PopulationFrequency; import org.opencb.biodata.models.variant.avro.SequenceOntologyTerm; import org.opencb.biodata.models.variant.avro.VariantAnnotation; import org.opencb.opencga.storage.hadoop.variant.index.sample.SampleIndexConfiguration; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.opencb.opencga.storage.hadoop.variant.index.annotation.AnnotationIndexConverter.*; /** * Created on 17/04/19. * * @author Jacobo Coll &lt;[email protected]&gt; */ public class AnnotationIndexConverterTest { private AnnotationIndexConverter converter; byte b; @Before public void setUp() throws Exception { List<String> populations = Arrays.asList( "STUDY:POP_1", "STUDY:POP_2", "STUDY:POP_3", "STUDY:POP_4", "STUDY:POP_5", "STUDY:POP_6" ); SampleIndexConfiguration configuration = new SampleIndexConfiguration().setPopulationRanges( populations.stream().map(SampleIndexConfiguration.PopulationFrequencyRange::new).collect(Collectors.toList())); converter = new AnnotationIndexConverter(configuration); } // @After // public void tearDown() throws Exception { // System.out.println(IndexUtils.byteToString(b)); // } @Test public void testLof() { assertEquals(LOF_MASK | LOF_EXTENDED_MASK | POP_FREQ_ANY_001_MASK, b = converter.convert(annot(ct("stop_lost"))).getSummaryIndex()); } @Test public void testLofe() { assertEquals(LOF_EXTENDED_MASK | MISSENSE_VARIANT_MASK | POP_FREQ_ANY_001_MASK, b = converter.convert(annot(ct("missense_variant"))).getSummaryIndex()); } @Test public void testLofProtein() { assertEquals(LOF_MASK | LOF_EXTENDED_MASK | LOFE_PROTEIN_CODING_MASK | PROTEIN_CODING_MASK | POP_FREQ_ANY_001_MASK, b = converter.convert(annot(ct("stop_lost", "protein_coding"))).getSummaryIndex()); } @Test public void testLofeProtein() { assertEquals(LOF_EXTENDED_MASK | MISSENSE_VARIANT_MASK | LOFE_PROTEIN_CODING_MASK | PROTEIN_CODING_MASK | POP_FREQ_ANY_001_MASK, b = converter.convert(annot(ct("missense_variant", "protein_coding"))).getSummaryIndex()); } @Test public void testLofProteinDifferentTranscript() { assertEquals(LOF_MASK | LOF_EXTENDED_MASK | PROTEIN_CODING_MASK | POP_FREQ_ANY_001_MASK, b = converter.convert(annot(ct("stop_lost", "other"), ct("other", "protein_coding"))).getSummaryIndex()); } @Test public void testLofeProteinDifferentTranscript() { assertEquals(LOF_EXTENDED_MASK | MISSENSE_VARIANT_MASK | PROTEIN_CODING_MASK | POP_FREQ_ANY_001_MASK, b = converter.convert(annot(ct("missense_variant", "other"), ct("other", "protein_coding"))).getSummaryIndex()); } @Test public void testCtBtCombination() { AnnotationIndexEntry entry = converter.convert(annot(ct("missense_variant", "pseudogene"), ct("pseudogene", "protein_coding"))); byte[] ctBtIndex = entry.getCtBtCombination().getCtBtMatrix(); assertEquals(1, ctBtIndex.length); assertEquals(1, entry.getCtBtCombination().getNumCt()); assertEquals(1, entry.getCtBtCombination().getNumBt()); assertEquals(0, ctBtIndex[0]); // No combination entry = converter.convert(annot(ct("missense_variant", "protein_coding"), ct("stop_lost", "protein_coding"))); ctBtIndex = entry.getCtBtCombination().getCtBtMatrix(); assertEquals(2, ctBtIndex.length); assertEquals(2, entry.getCtBtCombination().getNumCt()); assertEquals(1, entry.getCtBtCombination().getNumBt()); assertEquals(1, ctBtIndex[0]); // missense_variant assertEquals(1, ctBtIndex[1]); // stop_lost entry = converter.convert(annot(ct("missense_variant", "protein_coding"), ct("stop_lost", "protein_coding"), ct("stop_gained", "pseudogene"))); ctBtIndex = entry.getCtBtCombination().getCtBtMatrix(); assertEquals(3, ctBtIndex.length); assertEquals(3, entry.getCtBtCombination().getNumCt()); assertEquals(1, entry.getCtBtCombination().getNumBt()); assertEquals(1, ctBtIndex[0]); // missense_variant assertEquals(0, ctBtIndex[1]); // stop_gained assertEquals(1, ctBtIndex[2]); // stop_lost entry = converter.convert(annot( ct("missense_variant", "protein_coding"), ct("start_lost", "processed_transcript"), ct("start_lost", "protein_coding"), ct("stop_lost", "processed_transcript"), ct("stop_gained", "pseudogene"))); ctBtIndex = entry.getCtBtCombination().getCtBtMatrix(); assertEquals(4, ctBtIndex.length); assertEquals(4, entry.getCtBtCombination().getNumCt()); assertEquals(2, entry.getCtBtCombination().getNumBt()); // protein_coding + processed_transcript. biotype "other" does not count //protein_coding | processed_transcript assertEquals(0b10, ctBtIndex[0]); // missense_variant assertEquals(0b11, ctBtIndex[1]); // start_lost assertEquals(0b00, ctBtIndex[2]); // stop_gained assertEquals(0b01, ctBtIndex[3]); // stop_lost } @Test public void testIntergenic() { assertEquals(POP_FREQ_ANY_001_MASK | INTERGENIC_MASK, b = converter.convert(annot(ct("intergenic_variant"))).getSummaryIndex()); } @Test public void testNonIntergenic() { // Intergenic and regulatory variants should be marked as intergenic assertEquals(POP_FREQ_ANY_001_MASK | INTERGENIC_MASK, b = converter.convert(annot(ct("intergenic_variant"), ct("regulatory_region_variant"))).getSummaryIndex()); assertEquals(POP_FREQ_ANY_001_MASK | MISSENSE_VARIANT_MASK | LOF_EXTENDED_MASK | INTERGENIC_MASK, b = converter.convert(annot(ct("intergenic_variant"), ct("missense_variant"))).getSummaryIndex()); } @Test public void testPopFreq() { assertEquals(POP_FREQ_ANY_001_MASK | INTERGENIC_MASK, b = converter.convert(annot()).getSummaryIndex()); } @Test public void testPopFreqAny() { assertEquals(POP_FREQ_ANY_001_MASK | INTERGENIC_MASK, b = converter.convert(annot(pf(GNOMAD_GENOMES, "ALL", 0.3))).getSummaryIndex()); } @Test public void testPopFreqNone() { assertEquals(INTERGENIC_MASK, b = converter.convert(annot(pf(GNOMAD_GENOMES, "ALL", 0.3), pf(K_GENOMES, "ALL", 0.3))).getSummaryIndex()); } @Test(expected = IllegalArgumentException.class) public void testDuplicatedPopulations() { List<String> populations = Arrays.asList("1kG_phase3:ALL", "GNOMAD_GENOMES:ALL", "1kG_phase3:ALL"); SampleIndexConfiguration configuration = new SampleIndexConfiguration().setPopulationRanges( populations.stream().map(SampleIndexConfiguration.PopulationFrequencyRange::new).collect(Collectors.toList())); new AnnotationIndexConverter(configuration); } @Test public void testPopFreqMulti() { assertArrayEquals(new byte[]{0b11, 0, 0, 0, 0, 0}, converter.convert(annot(pf("STUDY", "POP_1", 0.5))).getPopFreqIndex()); assertArrayEquals(new byte[]{0b11, 0, 0, 0, 0, 0}, converter.convert(annot(pf("STUDY", "POP_1", 0.5), pf(K_GENOMES, "ALL", 0.3))).getPopFreqIndex()); assertArrayEquals(new byte[]{0b11, 0, 0, 0, 0, 0}, converter.convert(annot(pf("STUDY", "POP_1", 0.5), pf("STUDY", "POP_2", 0))).getPopFreqIndex()); assertArrayEquals(new byte[]{0b11, 0b10, 0, 0, 0, 0}, converter.convert(annot(pf("STUDY", "POP_1", 0.5), pf("STUDY", "POP_2", 0.00501))).getPopFreqIndex()); assertArrayEquals(new byte[]{0b11, 0, 0, 0b01, 0b11, 0}, converter.convert(annot(pf("STUDY", "POP_1", 0.5), pf("STUDY", "POP_4", 0.001), pf("STUDY", "POP_5", 0.5))).getPopFreqIndex()); } public static VariantAnnotation annot() { VariantAnnotation variantAnnotation = new VariantAnnotation(); variantAnnotation.setConsequenceTypes(Arrays.asList(ct("intergenic_variant"))); return variantAnnotation; } public static VariantAnnotation annot(ConsequenceType... value) { VariantAnnotation variantAnnotation = new VariantAnnotation(); variantAnnotation.setConsequenceTypes(Arrays.asList(value)); return variantAnnotation; } public static VariantAnnotation annot(PopulationFrequency... value) { VariantAnnotation variantAnnotation = new VariantAnnotation(); variantAnnotation.setPopulationFrequencies(Arrays.asList(value)); variantAnnotation.setConsequenceTypes(Arrays.asList(ct("intergenic_variant"))); return variantAnnotation; } public static PopulationFrequency pf(String study, String population, double af) { PopulationFrequency pf = new PopulationFrequency(); pf.setStudy(study); pf.setPopulation(population); pf.setAltAlleleFreq((float) (af)); pf.setRefAlleleFreq((float) (1 - af)); return pf; } public static ConsequenceType ct(String ct, String biotype) { ConsequenceType consequenceType = ct(ct); consequenceType.setBiotype(biotype); return consequenceType; } public static ConsequenceType ct(String ct) { ConsequenceType consequenceType = new ConsequenceType();; consequenceType.setGeneName("Gene"); consequenceType.setEnsemblGeneId("ENSEMBL_GENE"); consequenceType.setSequenceOntologyTerms(Collections.singletonList(new SequenceOntologyTerm(ct, ct))); return consequenceType; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; import java.util.Arrays; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.ChecksumException; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; import org.apache.hadoop.hdfs.server.namenode.LeaseManager; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.StringUtils; import org.apache.log4j.Logger; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.slf4j.event.Level; /** * This class tests the cases of a concurrent reads/writes to a file; * ie, one writer and one or more readers can see unfinsihed blocks */ public class TestFileConcurrentReader { private enum SyncType { SYNC, APPEND, } private static final Logger LOG = Logger.getLogger(TestFileConcurrentReader.class); { GenericTestUtils.setLogLevel(LeaseManager.LOG, Level.TRACE); GenericTestUtils.setLogLevel(FSNamesystem.LOG, Level.TRACE); GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE); } static final long seed = 0xDEADBEEFL; static final int blockSize = 8192; private static final int DEFAULT_WRITE_SIZE = 1024 + 1; private static final int SMALL_WRITE_SIZE = 61; private Configuration conf; private MiniDFSCluster cluster; private FileSystem fileSystem; @Before public void setUp() throws IOException { conf = new Configuration(); init(conf); } @After public void tearDown() throws Exception { if (cluster != null) { cluster.shutdown(); cluster = null; } } private void init(Configuration conf) throws IOException { if (cluster != null) { cluster.shutdown(); } cluster = new MiniDFSCluster.Builder(conf).build(); cluster.waitClusterUp(); fileSystem = cluster.getFileSystem(); } private void writeFileAndSync(FSDataOutputStream stm, int size) throws IOException { byte[] buffer = DFSTestUtil.generateSequentialBytes(0, size); stm.write(buffer, 0, size); stm.hflush(); } private void checkCanRead(FileSystem fileSys, Path path, int numBytes) throws IOException { waitForBlocks(fileSys, path); assertBytesAvailable(fileSys, path, numBytes); } // make sure bytes are available and match expected private void assertBytesAvailable( FileSystem fileSystem, Path path, int numBytes ) throws IOException { byte[] buffer = new byte[numBytes]; FSDataInputStream inputStream = fileSystem.open(path); IOUtils.readFully(inputStream, buffer, 0, numBytes); inputStream.close(); assertTrue( "unable to validate bytes", validateSequentialBytes(buffer, 0, numBytes) ); } private void waitForBlocks(FileSystem fileSys, Path name) throws IOException { // wait until we have at least one block in the file to read. boolean done = false; while (!done) { try { Thread.sleep(1000); } catch (InterruptedException e) { } done = true; BlockLocation[] locations = fileSys.getFileBlockLocations( fileSys.getFileStatus(name), 0, blockSize); if (locations.length < 1) { done = false; continue; } } } /** * Test that that writes to an incomplete block are available to a reader */ @Test (timeout = 30000) public void testUnfinishedBlockRead() throws IOException { // create a new file in the root, write data, do no close Path file1 = new Path("/unfinished-block"); FSDataOutputStream stm = TestFileCreation.createFile(fileSystem, file1, 1); // write partial block and sync int partialBlockSize = blockSize / 2; writeFileAndSync(stm, partialBlockSize); // Make sure a client can read it before it is closed checkCanRead(fileSystem, file1, partialBlockSize); stm.close(); } /** * test case: if the BlockSender decides there is only one packet to send, * the previous computation of the pktSize based on transferToAllowed * would result in too small a buffer to do the buffer-copy needed * for partial chunks. */ @Test (timeout = 30000) public void testUnfinishedBlockPacketBufferOverrun() throws IOException { // check that / exists Path path = new Path("/"); System.out.println("Path : \"" + path.toString() + "\""); // create a new file in the root, write data, do no close Path file1 = new Path("/unfinished-block"); final FSDataOutputStream stm = TestFileCreation.createFile(fileSystem, file1, 1); // write partial block and sync final int bytesPerChecksum = conf.getInt("io.bytes.per.checksum", 512); final int partialBlockSize = bytesPerChecksum - 1; writeFileAndSync(stm, partialBlockSize); // Make sure a client can read it before it is closed checkCanRead(fileSystem, file1, partialBlockSize); stm.close(); } // use a small block size and a large write so that DN is busy creating // new blocks. This makes it almost 100% sure we can reproduce // case of client getting a DN that hasn't yet created the blocks @Test (timeout = 30000) public void testImmediateReadOfNewFile() throws IOException { final int blockSize = 64 * 1024; final int writeSize = 10 * blockSize; Configuration conf = new Configuration(); conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize); init(conf); final int requiredSuccessfulOpens = 100; final Path file = new Path("/file1"); final AtomicBoolean openerDone = new AtomicBoolean(false); final AtomicReference<String> errorMessage = new AtomicReference<String>(); final FSDataOutputStream out = fileSystem.create(file); final Thread writer = new Thread(new Runnable() { @Override public void run() { try { while (!openerDone.get()) { out.write(DFSTestUtil.generateSequentialBytes(0, writeSize)); out.hflush(); } } catch (IOException e) { LOG.warn("error in writer", e); } finally { try { out.close(); } catch (IOException e) { LOG.error("unable to close file"); } } } }); Thread opener = new Thread(new Runnable() { @Override public void run() { try { for (int i = 0; i < requiredSuccessfulOpens; i++) { fileSystem.open(file).close(); } openerDone.set(true); } catch (IOException e) { openerDone.set(true); errorMessage.set(String.format( "got exception : %s", StringUtils.stringifyException(e) )); } catch (Exception e) { openerDone.set(true); errorMessage.set(String.format( "got exception : %s", StringUtils.stringifyException(e) )); writer.interrupt(); fail("here"); } } }); writer.start(); opener.start(); try { writer.join(); opener.join(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } assertNull(errorMessage.get(), errorMessage.get()); } // for some reason, using tranferTo evokes the race condition more often // so test separately @Test (timeout = 30000) public void testUnfinishedBlockCRCErrorTransferTo() throws IOException { runTestUnfinishedBlockCRCError(true, SyncType.SYNC, DEFAULT_WRITE_SIZE); } @Test (timeout = 30000) public void testUnfinishedBlockCRCErrorTransferToVerySmallWrite() throws IOException { runTestUnfinishedBlockCRCError(true, SyncType.SYNC, SMALL_WRITE_SIZE); } // fails due to issue w/append, disable @Ignore public void _testUnfinishedBlockCRCErrorTransferToAppend() throws IOException { runTestUnfinishedBlockCRCError(true, SyncType.APPEND, DEFAULT_WRITE_SIZE); } @Test (timeout = 30000) public void testUnfinishedBlockCRCErrorNormalTransfer() throws IOException { runTestUnfinishedBlockCRCError(false, SyncType.SYNC, DEFAULT_WRITE_SIZE); } @Test (timeout = 30000) public void testUnfinishedBlockCRCErrorNormalTransferVerySmallWrite() throws IOException { runTestUnfinishedBlockCRCError(false, SyncType.SYNC, SMALL_WRITE_SIZE); } // fails due to issue w/append, disable @Ignore public void _testUnfinishedBlockCRCErrorNormalTransferAppend() throws IOException { runTestUnfinishedBlockCRCError(false, SyncType.APPEND, DEFAULT_WRITE_SIZE); } private void runTestUnfinishedBlockCRCError( final boolean transferToAllowed, SyncType syncType, int writeSize ) throws IOException { runTestUnfinishedBlockCRCError( transferToAllowed, syncType, writeSize, new Configuration() ); } private void runTestUnfinishedBlockCRCError( final boolean transferToAllowed, final SyncType syncType, final int writeSize, Configuration conf ) throws IOException { conf.setBoolean(DFSConfigKeys.DFS_DATANODE_TRANSFERTO_ALLOWED_KEY, transferToAllowed); init(conf); final Path file = new Path("/block-being-written-to"); final int numWrites = 2000; final AtomicBoolean writerDone = new AtomicBoolean(false); final AtomicBoolean writerStarted = new AtomicBoolean(false); final AtomicBoolean error = new AtomicBoolean(false); final Thread writer = new Thread(new Runnable() { @Override public void run() { try { FSDataOutputStream outputStream = fileSystem.create(file); if (syncType == SyncType.APPEND) { outputStream.close(); outputStream = fileSystem.append(file); } try { for (int i = 0; !error.get() && i < numWrites; i++) { final byte[] writeBuf = DFSTestUtil.generateSequentialBytes(i * writeSize, writeSize); outputStream.write(writeBuf); if (syncType == SyncType.SYNC) { outputStream.hflush(); } writerStarted.set(true); } } catch (IOException e) { error.set(true); LOG.error("error writing to file", e); } finally { outputStream.close(); } writerDone.set(true); } catch (Exception e) { LOG.error("error in writer", e); throw new RuntimeException(e); } } }); Thread tailer = new Thread(new Runnable() { @Override public void run() { try { long startPos = 0; while (!writerDone.get() && !error.get()) { if (writerStarted.get()) { try { startPos = tailFile(file, startPos); } catch (IOException e) { LOG.error(String.format("error tailing file %s", file), e); throw new RuntimeException(e); } } } } catch (RuntimeException e) { if (e.getCause() instanceof ChecksumException) { error.set(true); } writer.interrupt(); LOG.error("error in tailer", e); throw e; } } }); writer.start(); tailer.start(); try { writer.join(); tailer.join(); assertFalse( "error occurred, see log above", error.get() ); } catch (InterruptedException e) { LOG.info("interrupted waiting for writer or tailer to complete"); Thread.currentThread().interrupt(); } } private boolean validateSequentialBytes(byte[] buf, int startPos, int len) { for (int i = 0; i < len; i++) { int expected = (i + startPos) % 127; if (buf[i] % 127 != expected) { LOG.error(String.format("at position [%d], got [%d] and expected [%d]", startPos, buf[i], expected)); return false; } } return true; } private long tailFile(Path file, long startPos) throws IOException { long numRead = 0; FSDataInputStream inputStream = fileSystem.open(file); inputStream.seek(startPos); int len = 4 * 1024; byte[] buf = new byte[len]; int read; while ((read = inputStream.read(buf)) > -1) { LOG.info(String.format("read %d bytes", read)); if (!validateSequentialBytes(buf, (int) (startPos + numRead), read)) { LOG.error(String.format("invalid bytes: [%s]\n", Arrays.toString(buf))); throw new ChecksumException( String.format("unable to validate bytes"), startPos ); } numRead += read; } inputStream.close(); return numRead + startPos - 1; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.executiongraph; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.api.common.ArchivedExecutionConfig; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.JobID; import org.apache.flink.api.common.JobStatus; import org.apache.flink.api.common.accumulators.Accumulator; import org.apache.flink.api.common.accumulators.AccumulatorHelper; import org.apache.flink.api.common.time.Time; import org.apache.flink.configuration.Configuration; import org.apache.flink.metrics.Counter; import org.apache.flink.metrics.SimpleCounter; import org.apache.flink.runtime.JobException; import org.apache.flink.runtime.accumulators.AccumulatorSnapshot; import org.apache.flink.runtime.accumulators.StringifiedAccumulatorResult; import org.apache.flink.runtime.blob.BlobWriter; import org.apache.flink.runtime.blob.PermanentBlobKey; import org.apache.flink.runtime.checkpoint.CheckpointCoordinator; import org.apache.flink.runtime.checkpoint.CheckpointFailureManager; import org.apache.flink.runtime.checkpoint.CheckpointIDCounter; import org.apache.flink.runtime.checkpoint.CheckpointPlanCalculator; import org.apache.flink.runtime.checkpoint.CheckpointStatsSnapshot; import org.apache.flink.runtime.checkpoint.CheckpointStatsTracker; import org.apache.flink.runtime.checkpoint.CheckpointsCleaner; import org.apache.flink.runtime.checkpoint.CompletedCheckpointStore; import org.apache.flink.runtime.checkpoint.DefaultCheckpointPlanCalculator; import org.apache.flink.runtime.checkpoint.ExecutionAttemptMappingProvider; import org.apache.flink.runtime.checkpoint.MasterTriggerRestoreHook; import org.apache.flink.runtime.checkpoint.OperatorCoordinatorCheckpointContext; import org.apache.flink.runtime.concurrent.ComponentMainThreadExecutor; import org.apache.flink.runtime.deployment.TaskDeploymentDescriptorFactory; import org.apache.flink.runtime.entrypoint.ClusterEntryPointExceptionUtils; import org.apache.flink.runtime.execution.ExecutionState; import org.apache.flink.runtime.executiongraph.failover.flip1.ResultPartitionAvailabilityChecker; import org.apache.flink.runtime.executiongraph.failover.flip1.partitionrelease.PartitionGroupReleaseStrategy; import org.apache.flink.runtime.io.network.partition.JobMasterPartitionTracker; import org.apache.flink.runtime.io.network.partition.ResultPartitionID; import org.apache.flink.runtime.jobgraph.IntermediateDataSetID; import org.apache.flink.runtime.jobgraph.IntermediateResultPartitionID; import org.apache.flink.runtime.jobgraph.JobVertex; import org.apache.flink.runtime.jobgraph.JobVertexID; import org.apache.flink.runtime.jobgraph.tasks.CheckpointCoordinatorConfiguration; import org.apache.flink.runtime.jobmanager.scheduler.SlotSharingGroup; import org.apache.flink.runtime.operators.coordination.CoordinatorStore; import org.apache.flink.runtime.operators.coordination.CoordinatorStoreImpl; import org.apache.flink.runtime.query.KvStateLocationRegistry; import org.apache.flink.runtime.scheduler.InternalFailuresListener; import org.apache.flink.runtime.scheduler.SsgNetworkMemoryCalculationUtils; import org.apache.flink.runtime.scheduler.VertexParallelismInformation; import org.apache.flink.runtime.scheduler.VertexParallelismStore; import org.apache.flink.runtime.scheduler.adapter.DefaultExecutionTopology; import org.apache.flink.runtime.scheduler.strategy.ConsumedPartitionGroup; import org.apache.flink.runtime.scheduler.strategy.ExecutionVertexID; import org.apache.flink.runtime.scheduler.strategy.SchedulingExecutionVertex; import org.apache.flink.runtime.scheduler.strategy.SchedulingResultPartition; import org.apache.flink.runtime.scheduler.strategy.SchedulingTopology; import org.apache.flink.runtime.shuffle.ShuffleMaster; import org.apache.flink.runtime.state.CheckpointStorage; import org.apache.flink.runtime.state.StateBackend; import org.apache.flink.runtime.taskmanager.DispatcherThreadFactory; import org.apache.flink.types.Either; import org.apache.flink.util.ExceptionUtils; import org.apache.flink.util.FlinkException; import org.apache.flink.util.IterableUtils; import org.apache.flink.util.OptionalFailure; import org.apache.flink.util.SerializedValue; import org.apache.flink.util.concurrent.FutureUtils; import org.apache.flink.util.concurrent.FutureUtils.ConjunctFuture; import org.apache.flink.util.concurrent.ScheduledExecutorServiceAdapter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.stream.Collectors; import static org.apache.flink.util.Preconditions.checkNotNull; import static org.apache.flink.util.Preconditions.checkState; /** Default implementation of the {@link ExecutionGraph}. */ public class DefaultExecutionGraph implements ExecutionGraph, InternalExecutionGraphAccessor { /** The log object used for debugging. */ static final Logger LOG = LoggerFactory.getLogger(ExecutionGraph.class); // -------------------------------------------------------------------------------------------- /** Job specific information like the job id, job name, job configuration, etc. */ private final JobInformation jobInformation; /** Serialized job information or a blob key pointing to the offloaded job information. */ private final Either<SerializedValue<JobInformation>, PermanentBlobKey> jobInformationOrBlobKey; /** The executor which is used to execute futures. */ private final ScheduledExecutorService futureExecutor; /** The executor which is used to execute blocking io operations. */ private final Executor ioExecutor; /** {@link CoordinatorStore} shared across all operator coordinators within this execution. */ private final CoordinatorStore coordinatorStore = new CoordinatorStoreImpl(); /** Executor that runs tasks in the job manager's main thread. */ @Nonnull private ComponentMainThreadExecutor jobMasterMainThreadExecutor; /** {@code true} if all source tasks are stoppable. */ private boolean isStoppable = true; /** All job vertices that are part of this graph. */ private final Map<JobVertexID, ExecutionJobVertex> tasks; /** All vertices, in the order in which they were created. * */ private final List<ExecutionJobVertex> verticesInCreationOrder; /** All intermediate results that are part of this graph. */ private final Map<IntermediateDataSetID, IntermediateResult> intermediateResults; /** The currently executed tasks, for callbacks. */ private final Map<ExecutionAttemptID, Execution> currentExecutions; /** * Listeners that receive messages when the entire job switches it status (such as from RUNNING * to FINISHED). */ private final List<JobStatusListener> jobStatusListeners; /** * Timestamps (in milliseconds as returned by {@code System.currentTimeMillis()} when the * execution graph transitioned into a certain state. The index into this array is the ordinal * of the enum value, i.e. the timestamp when the graph went into state "RUNNING" is at {@code * stateTimestamps[RUNNING.ordinal()]}. */ private final long[] stateTimestamps; /** The timeout for all messages that require a response/acknowledgement. */ private final Time rpcTimeout; /** The classloader for the user code. Needed for calls into user code classes. */ private final ClassLoader userClassLoader; /** Registered KvState instances reported by the TaskManagers. */ private final KvStateLocationRegistry kvStateLocationRegistry; /** Blob writer used to offload RPC messages. */ private final BlobWriter blobWriter; /** Number of total job vertices. */ private int numJobVerticesTotal; private final PartitionGroupReleaseStrategy.Factory partitionGroupReleaseStrategyFactory; private PartitionGroupReleaseStrategy partitionGroupReleaseStrategy; private DefaultExecutionTopology executionTopology; @Nullable private InternalFailuresListener internalTaskFailuresListener; /** Counts all restarts. Used by other Gauges/Meters and does not register to metric group. */ private final Counter numberOfRestartsCounter = new SimpleCounter(); // ------ Configuration of the Execution ------- private final TaskDeploymentDescriptorFactory.PartitionLocationConstraint partitionLocationConstraint; /** The maximum number of prior execution attempts kept in history. */ private final int maxPriorAttemptsHistoryLength; // ------ Execution status and progress. These values are volatile, and accessed under the lock // ------- /** Number of finished job vertices. */ private int numFinishedJobVertices; /** Current status of the job execution. */ private volatile JobStatus state = JobStatus.CREATED; /** A future that completes once the job has reached a terminal state. */ private final CompletableFuture<JobStatus> terminationFuture = new CompletableFuture<>(); /** * The exception that caused the job to fail. This is set to the first root exception that was * not recoverable and triggered job failure. */ private Throwable failureCause; /** * The extended failure cause information for the job. This exists in addition to * 'failureCause', to let 'failureCause' be a strong reference to the exception, while this info * holds no strong reference to any user-defined classes. */ private ErrorInfo failureInfo; private final JobMasterPartitionTracker partitionTracker; private final ResultPartitionAvailabilityChecker resultPartitionAvailabilityChecker; /** Future for an ongoing or completed scheduling action. */ @Nullable private CompletableFuture<Void> schedulingFuture; private final VertexAttemptNumberStore initialAttemptCounts; private final VertexParallelismStore parallelismStore; // ------ Fields that are relevant to the execution and need to be cleared before archiving // ------- @Nullable private CheckpointCoordinatorConfiguration checkpointCoordinatorConfiguration; /** The coordinator for checkpoints, if snapshot checkpoints are enabled. */ @Nullable private CheckpointCoordinator checkpointCoordinator; /** TODO, replace it with main thread executor. */ @Nullable private ScheduledExecutorService checkpointCoordinatorTimer; /** * Checkpoint stats tracker separate from the coordinator in order to be available after * archiving. */ private CheckpointStatsTracker checkpointStatsTracker; // ------ Fields that are only relevant for archived execution graphs ------------ @Nullable private String stateBackendName; @Nullable private String checkpointStorageName; private String jsonPlan; /** Shuffle master to register partitions for task deployment. */ private final ShuffleMaster<?> shuffleMaster; private final ExecutionDeploymentListener executionDeploymentListener; private final ExecutionStateUpdateListener executionStateUpdateListener; private final EdgeManager edgeManager; private final Map<ExecutionVertexID, ExecutionVertex> executionVerticesById; private final Map<IntermediateResultPartitionID, IntermediateResultPartition> resultPartitionsById; private final boolean isDynamic; // -------------------------------------------------------------------------------------------- // Constructors // -------------------------------------------------------------------------------------------- public DefaultExecutionGraph( JobInformation jobInformation, ScheduledExecutorService futureExecutor, Executor ioExecutor, Time rpcTimeout, int maxPriorAttemptsHistoryLength, ClassLoader userClassLoader, BlobWriter blobWriter, PartitionGroupReleaseStrategy.Factory partitionGroupReleaseStrategyFactory, ShuffleMaster<?> shuffleMaster, JobMasterPartitionTracker partitionTracker, TaskDeploymentDescriptorFactory.PartitionLocationConstraint partitionLocationConstraint, ExecutionDeploymentListener executionDeploymentListener, ExecutionStateUpdateListener executionStateUpdateListener, long initializationTimestamp, VertexAttemptNumberStore initialAttemptCounts, VertexParallelismStore vertexParallelismStore, boolean isDynamic) throws IOException { this.jobInformation = checkNotNull(jobInformation); this.blobWriter = checkNotNull(blobWriter); this.partitionLocationConstraint = checkNotNull(partitionLocationConstraint); this.jobInformationOrBlobKey = BlobWriter.serializeAndTryOffload( jobInformation, jobInformation.getJobId(), blobWriter); this.futureExecutor = checkNotNull(futureExecutor); this.ioExecutor = checkNotNull(ioExecutor); this.userClassLoader = checkNotNull(userClassLoader, "userClassLoader"); this.tasks = new HashMap<>(16); this.intermediateResults = new HashMap<>(16); this.verticesInCreationOrder = new ArrayList<>(16); this.currentExecutions = new HashMap<>(16); this.jobStatusListeners = new ArrayList<>(); this.stateTimestamps = new long[JobStatus.values().length]; this.stateTimestamps[JobStatus.INITIALIZING.ordinal()] = initializationTimestamp; this.stateTimestamps[JobStatus.CREATED.ordinal()] = System.currentTimeMillis(); this.rpcTimeout = checkNotNull(rpcTimeout); this.partitionGroupReleaseStrategyFactory = checkNotNull(partitionGroupReleaseStrategyFactory); this.kvStateLocationRegistry = new KvStateLocationRegistry(jobInformation.getJobId(), getAllVertices()); this.maxPriorAttemptsHistoryLength = maxPriorAttemptsHistoryLength; this.schedulingFuture = null; this.jobMasterMainThreadExecutor = new ComponentMainThreadExecutor.DummyComponentMainThreadExecutor( "ExecutionGraph is not initialized with proper main thread executor. " + "Call to ExecutionGraph.start(...) required."); this.shuffleMaster = checkNotNull(shuffleMaster); this.partitionTracker = checkNotNull(partitionTracker); this.resultPartitionAvailabilityChecker = new ExecutionGraphResultPartitionAvailabilityChecker( this::createResultPartitionId, partitionTracker); this.executionDeploymentListener = executionDeploymentListener; this.executionStateUpdateListener = executionStateUpdateListener; this.initialAttemptCounts = initialAttemptCounts; this.parallelismStore = vertexParallelismStore; this.edgeManager = new EdgeManager(); this.executionVerticesById = new HashMap<>(); this.resultPartitionsById = new HashMap<>(); this.isDynamic = isDynamic; } @Override public void start(@Nonnull ComponentMainThreadExecutor jobMasterMainThreadExecutor) { this.jobMasterMainThreadExecutor = jobMasterMainThreadExecutor; } // -------------------------------------------------------------------------------------------- // Configuration of Data-flow wide execution settings // -------------------------------------------------------------------------------------------- @Override public SchedulingTopology getSchedulingTopology() { return executionTopology; } @Override public TaskDeploymentDescriptorFactory.PartitionLocationConstraint getPartitionLocationConstraint() { return partitionLocationConstraint; } @Override @Nonnull public ComponentMainThreadExecutor getJobMasterMainThreadExecutor() { return jobMasterMainThreadExecutor; } @Override public Optional<String> getStateBackendName() { return Optional.ofNullable(stateBackendName); } @Override public Optional<String> getCheckpointStorageName() { return Optional.ofNullable(checkpointStorageName); } @Override public void enableCheckpointing( CheckpointCoordinatorConfiguration chkConfig, List<MasterTriggerRestoreHook<?>> masterHooks, CheckpointIDCounter checkpointIDCounter, CompletedCheckpointStore checkpointStore, StateBackend checkpointStateBackend, CheckpointStorage checkpointStorage, CheckpointStatsTracker statsTracker, CheckpointsCleaner checkpointsCleaner) { checkState(state == JobStatus.CREATED, "Job must be in CREATED state"); checkState(checkpointCoordinator == null, "checkpointing already enabled"); final Collection<OperatorCoordinatorCheckpointContext> operatorCoordinators = buildOpCoordinatorCheckpointContexts(); checkpointStatsTracker = checkNotNull(statsTracker, "CheckpointStatsTracker"); checkpointCoordinatorConfiguration = checkNotNull(chkConfig, "CheckpointCoordinatorConfiguration"); CheckpointFailureManager failureManager = new CheckpointFailureManager( chkConfig.getTolerableCheckpointFailureNumber(), new CheckpointFailureManager.FailJobCallback() { @Override public void failJob(Throwable cause) { getJobMasterMainThreadExecutor().execute(() -> failGlobal(cause)); } @Override public void failJobDueToTaskFailure( Throwable cause, ExecutionAttemptID failingTask) { getJobMasterMainThreadExecutor() .execute( () -> failGlobalIfExecutionIsStillRunning( cause, failingTask)); } }); checkState(checkpointCoordinatorTimer == null); checkpointCoordinatorTimer = Executors.newSingleThreadScheduledExecutor( new DispatcherThreadFactory( Thread.currentThread().getThreadGroup(), "Checkpoint Timer")); // create the coordinator that triggers and commits checkpoints and holds the state checkpointCoordinator = new CheckpointCoordinator( jobInformation.getJobId(), chkConfig, operatorCoordinators, checkpointIDCounter, checkpointStore, checkpointStorage, ioExecutor, checkpointsCleaner, new ScheduledExecutorServiceAdapter(checkpointCoordinatorTimer), failureManager, createCheckpointPlanCalculator( chkConfig.isEnableCheckpointsAfterTasksFinish()), new ExecutionAttemptMappingProvider(getAllExecutionVertices()), checkpointStatsTracker); // register the master hooks on the checkpoint coordinator for (MasterTriggerRestoreHook<?> hook : masterHooks) { if (!checkpointCoordinator.addMasterHook(hook)) { LOG.warn( "Trying to register multiple checkpoint hooks with the name: {}", hook.getIdentifier()); } } if (checkpointCoordinator.isPeriodicCheckpointingConfigured()) { // the periodic checkpoint scheduler is activated and deactivated as a result of // job status changes (running -> on, all other states -> off) registerJobStatusListener(checkpointCoordinator.createActivatorDeactivator()); } this.stateBackendName = checkpointStateBackend.getClass().getSimpleName(); this.checkpointStorageName = checkpointStorage.getClass().getSimpleName(); } private CheckpointPlanCalculator createCheckpointPlanCalculator( boolean enableCheckpointsAfterTasksFinish) { return new DefaultCheckpointPlanCalculator( getJobID(), new ExecutionGraphCheckpointPlanCalculatorContext(this), getVerticesTopologically(), enableCheckpointsAfterTasksFinish); } @Override @Nullable public CheckpointCoordinator getCheckpointCoordinator() { return checkpointCoordinator; } @Override public KvStateLocationRegistry getKvStateLocationRegistry() { return kvStateLocationRegistry; } @Override public CheckpointCoordinatorConfiguration getCheckpointCoordinatorConfiguration() { if (checkpointCoordinatorConfiguration != null) { return checkpointCoordinatorConfiguration; } else { return null; } } @Override public CheckpointStatsSnapshot getCheckpointStatsSnapshot() { if (checkpointStatsTracker != null) { return checkpointStatsTracker.createSnapshot(); } else { return null; } } private Collection<OperatorCoordinatorCheckpointContext> buildOpCoordinatorCheckpointContexts() { final ArrayList<OperatorCoordinatorCheckpointContext> contexts = new ArrayList<>(); for (final ExecutionJobVertex vertex : verticesInCreationOrder) { contexts.addAll(vertex.getOperatorCoordinators()); } contexts.trimToSize(); return contexts; } // -------------------------------------------------------------------------------------------- // Properties and Status of the Execution Graph // -------------------------------------------------------------------------------------------- @Override public void setJsonPlan(String jsonPlan) { this.jsonPlan = jsonPlan; } @Override public String getJsonPlan() { return jsonPlan; } @Override public Either<SerializedValue<JobInformation>, PermanentBlobKey> getJobInformationOrBlobKey() { return jobInformationOrBlobKey; } @Override public JobID getJobID() { return jobInformation.getJobId(); } @Override public String getJobName() { return jobInformation.getJobName(); } @Override public boolean isStoppable() { return this.isStoppable; } @Override public Configuration getJobConfiguration() { return jobInformation.getJobConfiguration(); } @Override public ClassLoader getUserClassLoader() { return this.userClassLoader; } @Override public JobStatus getState() { return state; } @Override public Throwable getFailureCause() { return failureCause; } public ErrorInfo getFailureInfo() { return failureInfo; } @Override public long getNumberOfRestarts() { return numberOfRestartsCounter.getCount(); } @Override public int getNumFinishedVertices() { return IterableUtils.toStream(getVerticesTopologically()) .map(ExecutionJobVertex::getNumExecutionVertexFinished) .mapToInt(Integer::intValue) .sum(); } @Override public ExecutionJobVertex getJobVertex(JobVertexID id) { return this.tasks.get(id); } @Override public Map<JobVertexID, ExecutionJobVertex> getAllVertices() { return Collections.unmodifiableMap(this.tasks); } @Override public Iterable<ExecutionJobVertex> getVerticesTopologically() { // we return a specific iterator that does not fail with concurrent modifications // the list is append only, so it is safe for that final int numElements = this.verticesInCreationOrder.size(); return new Iterable<ExecutionJobVertex>() { @Override public Iterator<ExecutionJobVertex> iterator() { return new Iterator<ExecutionJobVertex>() { private int pos = 0; @Override public boolean hasNext() { return pos < numElements; } @Override public ExecutionJobVertex next() { if (hasNext()) { return verticesInCreationOrder.get(pos++); } else { throw new NoSuchElementException(); } } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }; } @Override public Map<IntermediateDataSetID, IntermediateResult> getAllIntermediateResults() { return Collections.unmodifiableMap(this.intermediateResults); } @Override public Iterable<ExecutionVertex> getAllExecutionVertices() { return () -> new AllVerticesIterator<>(getVerticesTopologically().iterator()); } @Override public EdgeManager getEdgeManager() { return edgeManager; } @Override public ExecutionVertex getExecutionVertexOrThrow(ExecutionVertexID id) { return checkNotNull(executionVerticesById.get(id)); } @Override public IntermediateResultPartition getResultPartitionOrThrow( final IntermediateResultPartitionID id) { return checkNotNull(resultPartitionsById.get(id)); } @Override public long getStatusTimestamp(JobStatus status) { return this.stateTimestamps[status.ordinal()]; } @Override public final BlobWriter getBlobWriter() { return blobWriter; } @Override public Executor getFutureExecutor() { return futureExecutor; } @Override public Map<String, OptionalFailure<Accumulator<?, ?>>> aggregateUserAccumulators() { Map<String, OptionalFailure<Accumulator<?, ?>>> userAccumulators = new HashMap<>(); for (ExecutionVertex vertex : getAllExecutionVertices()) { Map<String, Accumulator<?, ?>> next = vertex.getCurrentExecutionAttempt().getUserAccumulators(); if (next != null) { AccumulatorHelper.mergeInto(userAccumulators, next); } } return userAccumulators; } /** * Gets a serialized accumulator map. * * @return The accumulator map with serialized accumulator values. */ @Override public Map<String, SerializedValue<OptionalFailure<Object>>> getAccumulatorsSerialized() { return aggregateUserAccumulators().entrySet().stream() .collect( Collectors.toMap( Map.Entry::getKey, entry -> serializeAccumulator(entry.getKey(), entry.getValue()))); } private static SerializedValue<OptionalFailure<Object>> serializeAccumulator( String name, OptionalFailure<Accumulator<?, ?>> accumulator) { try { if (accumulator.isFailure()) { return new SerializedValue<>( OptionalFailure.ofFailure(accumulator.getFailureCause())); } return new SerializedValue<>( OptionalFailure.of(accumulator.getUnchecked().getLocalValue())); } catch (IOException ioe) { LOG.error("Could not serialize accumulator " + name + '.', ioe); try { return new SerializedValue<>(OptionalFailure.ofFailure(ioe)); } catch (IOException e) { throw new RuntimeException( "It should never happen that we cannot serialize the accumulator serialization exception.", e); } } } /** * Returns the a stringified version of the user-defined accumulators. * * @return an Array containing the StringifiedAccumulatorResult objects */ @Override public StringifiedAccumulatorResult[] getAccumulatorResultsStringified() { Map<String, OptionalFailure<Accumulator<?, ?>>> accumulatorMap = aggregateUserAccumulators(); return StringifiedAccumulatorResult.stringifyAccumulatorResults(accumulatorMap); } @Override public void setInternalTaskFailuresListener( final InternalFailuresListener internalTaskFailuresListener) { checkNotNull(internalTaskFailuresListener); checkState( this.internalTaskFailuresListener == null, "internalTaskFailuresListener can be only set once"); this.internalTaskFailuresListener = internalTaskFailuresListener; } // -------------------------------------------------------------------------------------------- // Actions // -------------------------------------------------------------------------------------------- @Override public void notifyNewlyInitializedJobVertices(List<ExecutionJobVertex> vertices) { executionTopology.notifyExecutionGraphUpdated(this, vertices); } @Override public void attachJobGraph(List<JobVertex> topologicallySorted) throws JobException { if (isDynamic) { attachJobGraph(topologicallySorted, Collections.emptyList()); } else { attachJobGraph(topologicallySorted, topologicallySorted); } } private void attachJobGraph( List<JobVertex> verticesToAttach, List<JobVertex> verticesToInitialize) throws JobException { assertRunningInJobMasterMainThread(); LOG.debug( "Attaching {} topologically sorted vertices to existing job graph with {} " + "vertices and {} intermediate results.", verticesToAttach.size(), tasks.size(), intermediateResults.size()); attachJobVertices(verticesToAttach); initializeJobVertices(verticesToInitialize); // the topology assigning should happen before notifying new vertices to failoverStrategy executionTopology = DefaultExecutionTopology.fromExecutionGraph(this); partitionGroupReleaseStrategy = partitionGroupReleaseStrategyFactory.createInstance(getSchedulingTopology()); } /** Attach job vertices without initializing them. */ private void attachJobVertices(List<JobVertex> topologicallySorted) throws JobException { for (JobVertex jobVertex : topologicallySorted) { if (jobVertex.isInputVertex() && !jobVertex.isStoppable()) { this.isStoppable = false; } VertexParallelismInformation parallelismInfo = parallelismStore.getParallelismInfo(jobVertex.getID()); // create the execution job vertex and attach it to the graph ExecutionJobVertex ejv = new ExecutionJobVertex(this, jobVertex, parallelismInfo); ExecutionJobVertex previousTask = this.tasks.putIfAbsent(jobVertex.getID(), ejv); if (previousTask != null) { throw new JobException( String.format( "Encountered two job vertices with ID %s : previous=[%s] / new=[%s]", jobVertex.getID(), ejv, previousTask)); } this.verticesInCreationOrder.add(ejv); this.numJobVerticesTotal++; } } private void initializeJobVertices(List<JobVertex> topologicallySorted) throws JobException { final long createTimestamp = System.currentTimeMillis(); for (JobVertex jobVertex : topologicallySorted) { final ExecutionJobVertex ejv = tasks.get(jobVertex.getID()); initializeJobVertex(ejv, createTimestamp); } } @Override public void initializeJobVertex(ExecutionJobVertex ejv, long createTimestamp) throws JobException { checkNotNull(ejv); ejv.initialize( maxPriorAttemptsHistoryLength, rpcTimeout, createTimestamp, this.initialAttemptCounts.getAttemptCounts(ejv.getJobVertexId()), coordinatorStore); ejv.connectToPredecessors(this.intermediateResults); for (IntermediateResult res : ejv.getProducedDataSets()) { IntermediateResult previousDataSet = this.intermediateResults.putIfAbsent(res.getId(), res); if (previousDataSet != null) { throw new JobException( String.format( "Encountered two intermediate data set with ID %s : previous=[%s] / new=[%s]", res.getId(), res, previousDataSet)); } } registerExecutionVerticesAndResultPartitionsFor(ejv); // enrich network memory. SlotSharingGroup slotSharingGroup = ejv.getSlotSharingGroup(); if (areJobVerticesAllInitialized(slotSharingGroup)) { SsgNetworkMemoryCalculationUtils.enrichNetworkMemory( slotSharingGroup, this::getJobVertex, shuffleMaster); } } private boolean areJobVerticesAllInitialized(final SlotSharingGroup group) { for (JobVertexID jobVertexId : group.getJobVertexIds()) { final ExecutionJobVertex jobVertex = getJobVertex(jobVertexId); checkNotNull(jobVertex, "Unknown job vertex %s", jobVertexId); if (!jobVertex.isInitialized()) { return false; } } return true; } @Override public void transitionToRunning() { if (!transitionState(JobStatus.CREATED, JobStatus.RUNNING)) { throw new IllegalStateException( "Job may only be scheduled from state " + JobStatus.CREATED); } } @Override public void cancel() { assertRunningInJobMasterMainThread(); while (true) { JobStatus current = state; if (current == JobStatus.RUNNING || current == JobStatus.CREATED || current == JobStatus.RESTARTING) { if (transitionState(current, JobStatus.CANCELLING)) { incrementRestarts(); final CompletableFuture<Void> ongoingSchedulingFuture = schedulingFuture; // cancel ongoing scheduling action if (ongoingSchedulingFuture != null) { ongoingSchedulingFuture.cancel(false); } final ConjunctFuture<Void> allTerminal = cancelVerticesAsync(); allTerminal.whenComplete( (Void value, Throwable throwable) -> { if (throwable != null) { transitionState( JobStatus.CANCELLING, JobStatus.FAILED, new FlinkException( "Could not cancel job " + getJobName() + " because not all execution job vertices could be cancelled.", throwable)); } else { // cancellations may currently be overridden by failures which // trigger // restarts, so we need to pass a proper restart global version // here allVerticesInTerminalState(); } }); return; } } // Executions are being canceled. Go into cancelling and wait for // all vertices to be in their final state. else if (current == JobStatus.FAILING) { if (transitionState(current, JobStatus.CANCELLING)) { return; } } else { // no need to treat other states return; } } } @VisibleForTesting protected ConjunctFuture<Void> cancelVerticesAsync() { final ArrayList<CompletableFuture<?>> futures = new ArrayList<>(verticesInCreationOrder.size()); // cancel all tasks (that still need cancelling) for (ExecutionJobVertex ejv : verticesInCreationOrder) { futures.add(ejv.cancelWithFuture()); } // we build a future that is complete once all vertices have reached a terminal state return FutureUtils.waitForAll(futures); } @Override public void suspend(Throwable suspensionCause) { assertRunningInJobMasterMainThread(); if (state.isTerminalState()) { // stay in a terminal state return; } else if (transitionState(state, JobStatus.SUSPENDED, suspensionCause)) { initFailureCause(suspensionCause, System.currentTimeMillis()); incrementRestarts(); // cancel ongoing scheduling action if (schedulingFuture != null) { schedulingFuture.cancel(false); } final ArrayList<CompletableFuture<Void>> executionJobVertexTerminationFutures = new ArrayList<>(verticesInCreationOrder.size()); for (ExecutionJobVertex ejv : verticesInCreationOrder) { executionJobVertexTerminationFutures.add(ejv.suspend()); } final ConjunctFuture<Void> jobVerticesTerminationFuture = FutureUtils.waitForAll(executionJobVertexTerminationFutures); checkState(jobVerticesTerminationFuture.isDone(), "Suspend needs to happen atomically"); jobVerticesTerminationFuture.whenComplete( (Void ignored, Throwable throwable) -> { if (throwable != null) { LOG.debug("Could not properly suspend the execution graph.", throwable); } onTerminalState(state); LOG.info("Job {} has been suspended.", getJobID()); }); } else { throw new IllegalStateException( String.format( "Could not suspend because transition from %s to %s failed.", state, JobStatus.SUSPENDED)); } } void failGlobalIfExecutionIsStillRunning(Throwable cause, ExecutionAttemptID failingAttempt) { final Execution failedExecution = currentExecutions.get(failingAttempt); if (failedExecution != null && (failedExecution.getState() == ExecutionState.RUNNING || failedExecution.getState() == ExecutionState.INITIALIZING)) { failGlobal(cause); } else { LOG.debug( "The failing attempt {} belongs to an already not" + " running task thus won't fail the job", failingAttempt); } } @Override public void failGlobal(Throwable t) { checkState(internalTaskFailuresListener != null); internalTaskFailuresListener.notifyGlobalFailure(t); } /** * Returns the serializable {@link ArchivedExecutionConfig}. * * @return ArchivedExecutionConfig which may be null in case of errors */ @Override public ArchivedExecutionConfig getArchivedExecutionConfig() { // create a summary of all relevant data accessed in the web interface's JobConfigHandler try { ExecutionConfig executionConfig = jobInformation.getSerializedExecutionConfig().deserializeValue(userClassLoader); if (executionConfig != null) { return executionConfig.archive(); } } catch (IOException | ClassNotFoundException e) { LOG.error("Couldn't create ArchivedExecutionConfig for job {} ", getJobID(), e); } return null; } @Override public CompletableFuture<JobStatus> getTerminationFuture() { return terminationFuture; } @Override @VisibleForTesting public JobStatus waitUntilTerminal() throws InterruptedException { try { return terminationFuture.get(); } catch (ExecutionException e) { // this should never happen // it would be a bug, so we don't expect this to be handled and throw // an unchecked exception here throw new RuntimeException(e); } } // ------------------------------------------------------------------------ // State Transitions // ------------------------------------------------------------------------ @Override public boolean transitionState(JobStatus current, JobStatus newState) { return transitionState(current, newState, null); } private void transitionState(JobStatus newState, Throwable error) { transitionState(state, newState, error); } private boolean transitionState(JobStatus current, JobStatus newState, Throwable error) { assertRunningInJobMasterMainThread(); // consistency check if (current.isTerminalState()) { String message = "Job is trying to leave terminal state " + current; LOG.error(message); throw new IllegalStateException(message); } // now do the actual state transition if (state == current) { state = newState; LOG.info( "Job {} ({}) switched from state {} to {}.", getJobName(), getJobID(), current, newState, error); stateTimestamps[newState.ordinal()] = System.currentTimeMillis(); notifyJobStatusChange(newState); return true; } else { return false; } } @Override public void incrementRestarts() { numberOfRestartsCounter.inc(); } @Override public void initFailureCause(Throwable t, long timestamp) { this.failureCause = t; this.failureInfo = new ErrorInfo(t, timestamp); } // ------------------------------------------------------------------------ // Job Status Progress // ------------------------------------------------------------------------ /** * Called whenever a job vertex reaches state FINISHED (completed successfully). Once all job * vertices are in the FINISHED state, the program is successfully done. */ @Override public void jobVertexFinished() { assertRunningInJobMasterMainThread(); final int numFinished = ++numFinishedJobVertices; if (numFinished == numJobVerticesTotal) { // done :-) // check whether we are still in "RUNNING" and trigger the final cleanup if (state == JobStatus.RUNNING) { // we do the final cleanup in the I/O executor, because it may involve // some heavier work try { for (ExecutionJobVertex ejv : verticesInCreationOrder) { ejv.getJobVertex().finalizeOnMaster(getUserClassLoader()); } } catch (Throwable t) { ExceptionUtils.rethrowIfFatalError(t); ClusterEntryPointExceptionUtils.tryEnrichClusterEntryPointError(t); failGlobal(new Exception("Failed to finalize execution on master", t)); return; } // if we do not make this state transition, then a concurrent // cancellation or failure happened if (transitionState(JobStatus.RUNNING, JobStatus.FINISHED)) { onTerminalState(JobStatus.FINISHED); } } } } @Override public void jobVertexUnFinished() { assertRunningInJobMasterMainThread(); numFinishedJobVertices--; } /** * This method is a callback during cancellation/failover and called when all tasks have reached * a terminal state (cancelled/failed/finished). */ private void allVerticesInTerminalState() { assertRunningInJobMasterMainThread(); // we are done, transition to the final state JobStatus current; while (true) { current = this.state; if (current == JobStatus.RUNNING) { failGlobal( new Exception( "ExecutionGraph went into allVerticesInTerminalState() from RUNNING")); } else if (current == JobStatus.CANCELLING) { if (transitionState(current, JobStatus.CANCELED)) { onTerminalState(JobStatus.CANCELED); break; } } else if (current == JobStatus.FAILING) { break; } else if (current.isGloballyTerminalState()) { LOG.warn( "Job has entered globally terminal state without waiting for all " + "job vertices to reach final state."); break; } else { failGlobal( new Exception( "ExecutionGraph went into final state from state " + current)); break; } } // done transitioning the state } @Override public void failJob(Throwable cause, long timestamp) { if (state == JobStatus.FAILING || state.isTerminalState()) { return; } transitionState(JobStatus.FAILING, cause); initFailureCause(cause, timestamp); FutureUtils.assertNoException( cancelVerticesAsync() .whenComplete( (aVoid, throwable) -> { if (transitionState( JobStatus.FAILING, JobStatus.FAILED, cause)) { onTerminalState(JobStatus.FAILED); } else if (state == JobStatus.CANCELLING) { transitionState(JobStatus.CANCELLING, JobStatus.CANCELED); onTerminalState(JobStatus.CANCELED); } else if (!state.isTerminalState()) { throw new IllegalStateException( "Cannot complete job failing from an unexpected state: " + state); } })); } private void onTerminalState(JobStatus status) { LOG.debug("ExecutionGraph {} reached terminal state {}.", getJobID(), status); try { CheckpointCoordinator coord = this.checkpointCoordinator; this.checkpointCoordinator = null; if (coord != null) { coord.shutdown(); } if (checkpointCoordinatorTimer != null) { checkpointCoordinatorTimer.shutdownNow(); checkpointCoordinatorTimer = null; } } catch (Exception e) { LOG.error("Error while cleaning up after execution", e); } finally { terminationFuture.complete(status); } } // -------------------------------------------------------------------------------------------- // Callbacks and Callback Utilities // -------------------------------------------------------------------------------------------- @Override public boolean updateState(TaskExecutionStateTransition state) { assertRunningInJobMasterMainThread(); final Execution attempt = currentExecutions.get(state.getID()); if (attempt != null) { try { final boolean stateUpdated = updateStateInternal(state, attempt); maybeReleasePartitionGroupsFor(attempt); return stateUpdated; } catch (Throwable t) { ExceptionUtils.rethrowIfFatalErrorOrOOM(t); // failures during updates leave the ExecutionGraph inconsistent failGlobal(t); return false; } } else { return false; } } private boolean updateStateInternal( final TaskExecutionStateTransition state, final Execution attempt) { Map<String, Accumulator<?, ?>> accumulators; switch (state.getExecutionState()) { case INITIALIZING: return attempt.switchToRecovering(); case RUNNING: return attempt.switchToRunning(); case FINISHED: // this deserialization is exception-free accumulators = deserializeAccumulators(state); attempt.markFinished(accumulators, state.getIOMetrics()); return true; case CANCELED: // this deserialization is exception-free accumulators = deserializeAccumulators(state); attempt.completeCancelling(accumulators, state.getIOMetrics(), false); return true; case FAILED: // this deserialization is exception-free accumulators = deserializeAccumulators(state); attempt.markFailed( state.getError(userClassLoader), state.getCancelTask(), accumulators, state.getIOMetrics(), state.getReleasePartitions(), true); return true; default: // we mark as failed and return false, which triggers the TaskManager // to remove the task attempt.fail( new Exception( "TaskManager sent illegal state update: " + state.getExecutionState())); return false; } } private void maybeReleasePartitionGroupsFor(final Execution attempt) { final ExecutionVertexID finishedExecutionVertex = attempt.getVertex().getID(); if (attempt.getState() == ExecutionState.FINISHED) { final List<ConsumedPartitionGroup> releasablePartitionGroups = partitionGroupReleaseStrategy.vertexFinished(finishedExecutionVertex); releasePartitionGroups(releasablePartitionGroups); } else { partitionGroupReleaseStrategy.vertexUnfinished(finishedExecutionVertex); } } private void releasePartitionGroups( final List<ConsumedPartitionGroup> releasablePartitionGroups) { if (releasablePartitionGroups.size() > 0) { // Remove the cache of ShuffleDescriptors when ConsumedPartitionGroups are released for (ConsumedPartitionGroup releasablePartitionGroup : releasablePartitionGroups) { IntermediateResult totalResult = checkNotNull( intermediateResults.get( releasablePartitionGroup.getIntermediateDataSetID())); totalResult.clearCachedInformationForPartitionGroup(releasablePartitionGroup); } final List<ResultPartitionID> releasablePartitionIds = releasablePartitionGroups.stream() .flatMap(IterableUtils::toStream) .map(this::createResultPartitionId) .collect(Collectors.toList()); partitionTracker.stopTrackingAndReleasePartitions(releasablePartitionIds); } } ResultPartitionID createResultPartitionId( final IntermediateResultPartitionID resultPartitionId) { final SchedulingResultPartition schedulingResultPartition = getSchedulingTopology().getResultPartition(resultPartitionId); final SchedulingExecutionVertex producer = schedulingResultPartition.getProducer(); final ExecutionVertexID producerId = producer.getId(); final JobVertexID jobVertexId = producerId.getJobVertexId(); final ExecutionJobVertex jobVertex = getJobVertex(jobVertexId); checkNotNull(jobVertex, "Unknown job vertex %s", jobVertexId); final ExecutionVertex[] taskVertices = jobVertex.getTaskVertices(); final int subtaskIndex = producerId.getSubtaskIndex(); checkState( subtaskIndex < taskVertices.length, "Invalid subtask index %d for job vertex %s", subtaskIndex, jobVertexId); final ExecutionVertex taskVertex = taskVertices[subtaskIndex]; final Execution execution = taskVertex.getCurrentExecutionAttempt(); return new ResultPartitionID(resultPartitionId, execution.getAttemptId()); } /** * Deserializes accumulators from a task state update. * * <p>This method never throws an exception! * * @param state The task execution state from which to deserialize the accumulators. * @return The deserialized accumulators, of null, if there are no accumulators or an error * occurred. */ private Map<String, Accumulator<?, ?>> deserializeAccumulators( TaskExecutionStateTransition state) { AccumulatorSnapshot serializedAccumulators = state.getAccumulators(); if (serializedAccumulators != null) { try { return serializedAccumulators.deserializeUserAccumulators(userClassLoader); } catch (Throwable t) { // we catch Throwable here to include all form of linking errors that may // occur if user classes are missing in the classpath LOG.error("Failed to deserialize final accumulator results.", t); } } return null; } @Override public void notifyPartitionDataAvailable(ResultPartitionID partitionId) { assertRunningInJobMasterMainThread(); final Execution execution = currentExecutions.get(partitionId.getProducerId()); checkState( execution != null, "Cannot find execution for execution Id " + partitionId.getPartitionId() + "."); execution.getVertex().notifyPartitionDataAvailable(partitionId); } @Override public Map<ExecutionAttemptID, Execution> getRegisteredExecutions() { return Collections.unmodifiableMap(currentExecutions); } @Override public void registerExecution(Execution exec) { assertRunningInJobMasterMainThread(); Execution previous = currentExecutions.putIfAbsent(exec.getAttemptId(), exec); if (previous != null) { failGlobal( new Exception( "Trying to register execution " + exec + " for already used ID " + exec.getAttemptId())); } } @Override public void deregisterExecution(Execution exec) { assertRunningInJobMasterMainThread(); Execution contained = currentExecutions.remove(exec.getAttemptId()); if (contained != null && contained != exec) { failGlobal( new Exception( "De-registering execution " + exec + " failed. Found for same ID execution " + contained)); } } private void registerExecutionVerticesAndResultPartitionsFor( ExecutionJobVertex executionJobVertex) { for (ExecutionVertex executionVertex : executionJobVertex.getTaskVertices()) { executionVerticesById.put(executionVertex.getID(), executionVertex); resultPartitionsById.putAll(executionVertex.getProducedPartitions()); } } @Override public void updateAccumulators(AccumulatorSnapshot accumulatorSnapshot) { Map<String, Accumulator<?, ?>> userAccumulators; try { userAccumulators = accumulatorSnapshot.deserializeUserAccumulators(userClassLoader); ExecutionAttemptID execID = accumulatorSnapshot.getExecutionAttemptID(); Execution execution = currentExecutions.get(execID); if (execution != null) { execution.setAccumulators(userAccumulators); } else { LOG.debug("Received accumulator result for unknown execution {}.", execID); } } catch (Exception e) { LOG.error("Cannot update accumulators for job {}.", getJobID(), e); } } // -------------------------------------------------------------------------------------------- // Listeners & Observers // -------------------------------------------------------------------------------------------- @Override public void registerJobStatusListener(JobStatusListener listener) { if (listener != null) { jobStatusListeners.add(listener); } } private void notifyJobStatusChange(JobStatus newState) { if (jobStatusListeners.size() > 0) { final long timestamp = System.currentTimeMillis(); for (JobStatusListener listener : jobStatusListeners) { try { listener.jobStatusChanges(getJobID(), newState, timestamp); } catch (Throwable t) { LOG.warn("Error while notifying JobStatusListener", t); } } } } @Override public void notifyExecutionChange( final Execution execution, final ExecutionState newExecutionState) { executionStateUpdateListener.onStateUpdate(execution.getAttemptId(), newExecutionState); } private void assertRunningInJobMasterMainThread() { if (!(jobMasterMainThreadExecutor instanceof ComponentMainThreadExecutor.DummyComponentMainThreadExecutor)) { jobMasterMainThreadExecutor.assertRunningInMainThread(); } } @Override public void notifySchedulerNgAboutInternalTaskFailure( final ExecutionAttemptID attemptId, final Throwable t, final boolean cancelTask, final boolean releasePartitions) { checkState(internalTaskFailuresListener != null); internalTaskFailuresListener.notifyTaskFailure(attemptId, t, cancelTask, releasePartitions); } @Override public void deleteBlobs(List<PermanentBlobKey> blobKeys) { CompletableFuture.runAsync( () -> { for (PermanentBlobKey blobKey : blobKeys) { blobWriter.deletePermanent(getJobID(), blobKey); } }, ioExecutor); } @Override public ShuffleMaster<?> getShuffleMaster() { return shuffleMaster; } @Override public JobMasterPartitionTracker getPartitionTracker() { return partitionTracker; } @Override public ResultPartitionAvailabilityChecker getResultPartitionAvailabilityChecker() { return resultPartitionAvailabilityChecker; } @Override public PartitionGroupReleaseStrategy getPartitionGroupReleaseStrategy() { return partitionGroupReleaseStrategy; } @Override public ExecutionDeploymentListener getExecutionDeploymentListener() { return executionDeploymentListener; } @Override public boolean isDynamic() { return isDynamic; } }
package cucumber.runtime; import cucumber.api.PendingException; import cucumber.api.Scenario; import cucumber.api.StepDefinitionReporter; import cucumber.runtime.formatter.CucumberJSONFormatter; import cucumber.runtime.formatter.FormatterSpy; import cucumber.runtime.io.ClasspathResourceLoader; import cucumber.runtime.io.Resource; import cucumber.runtime.io.ResourceLoader; import cucumber.runtime.model.CucumberFeature; import gherkin.I18n; import gherkin.formatter.Formatter; import gherkin.formatter.JSONFormatter; import gherkin.formatter.Reporter; import gherkin.formatter.model.Result; import gherkin.formatter.model.Step; import gherkin.formatter.model.Tag; import org.junit.Ignore; import org.junit.Test; import org.junit.internal.AssumptionViolatedException; import org.mockito.ArgumentCaptor; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.PrintStream; import java.util.AbstractMap.SimpleEntry; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import static cucumber.runtime.TestHelper.feature; import static cucumber.runtime.TestHelper.result; import static java.util.Arrays.asList; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.startsWith; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyCollectionOf; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class RuntimeTest { private static final I18n ENGLISH = new I18n("en"); @Ignore @Test public void runs_feature_with_json_formatter() throws Exception { CucumberFeature feature = feature("test.feature", "" + "Feature: feature name\n" + " Background: background name\n" + " Given b\n" + " Scenario: scenario name\n" + " When s\n"); StringBuilder out = new StringBuilder(); JSONFormatter jsonFormatter = new CucumberJSONFormatter(out); List<Backend> backends = asList(mock(Backend.class)); ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); RuntimeOptions runtimeOptions = new RuntimeOptions(""); Runtime runtime = new Runtime(new ClasspathResourceLoader(classLoader), classLoader, backends, runtimeOptions); feature.run(jsonFormatter, jsonFormatter, runtime); jsonFormatter.done(); String expected = "" + "[\n" + " {\n" + " \"id\": \"feature-name\",\n" + " \"description\": \"\",\n" + " \"name\": \"feature name\",\n" + " \"keyword\": \"Feature\",\n" + " \"line\": 1,\n" + " \"elements\": [\n" + " {\n" + " \"description\": \"\",\n" + " \"name\": \"background name\",\n" + " \"keyword\": \"Background\",\n" + " \"line\": 2,\n" + " \"steps\": [\n" + " {\n" + " \"result\": {\n" + " \"status\": \"undefined\"\n" + " },\n" + " \"name\": \"b\",\n" + " \"keyword\": \"Given \",\n" + " \"line\": 3,\n" + " \"match\": {}\n" + " }\n" + " ],\n" + " \"type\": \"background\"\n" + " },\n" + " {\n" + " \"id\": \"feature-name;scenario-name\",\n" + " \"description\": \"\",\n" + " \"name\": \"scenario name\",\n" + " \"keyword\": \"Scenario\",\n" + " \"line\": 4,\n" + " \"steps\": [\n" + " {\n" + " \"result\": {\n" + " \"status\": \"undefined\"\n" + " },\n" + " \"name\": \"s\",\n" + " \"keyword\": \"When \",\n" + " \"line\": 5,\n" + " \"match\": {}\n" + " }\n" + " ],\n" + " \"type\": \"scenario\"\n" + " }\n" + " ],\n" + " \"uri\": \"test.feature\"\n" + " }\n" + "]"; assertEquals(expected, out.toString()); } @Test public void strict_without_pending_steps_or_errors() { Runtime runtime = createStrictRuntime(); assertEquals(0x0, runtime.exitStatus()); } @Test public void non_strict_without_pending_steps_or_errors() { Runtime runtime = createNonStrictRuntime(); assertEquals(0x0, runtime.exitStatus()); } @Test public void non_strict_with_undefined_steps() { Runtime runtime = createNonStrictRuntime(); runtime.undefinedStepsTracker.addUndefinedStep(new Step(null, "Given ", "A", 1, null, null), ENGLISH); assertEquals(0x0, runtime.exitStatus()); } @Test public void strict_with_undefined_steps() { Runtime runtime = createStrictRuntime(); runtime.undefinedStepsTracker.addUndefinedStep(new Step(null, "Given ", "A", 1, null, null), ENGLISH); assertEquals(0x1, runtime.exitStatus()); } @Test public void strict_with_pending_steps_and_no_errors() { Runtime runtime = createStrictRuntime(); runtime.addError(new PendingException()); assertEquals(0x1, runtime.exitStatus()); } @Test public void non_strict_with_pending_steps() { Runtime runtime = createNonStrictRuntime(); runtime.addError(new PendingException()); assertEquals(0x0, runtime.exitStatus()); } @Test public void non_strict_with_failed_junit_assumption_prior_to_junit_412() { Runtime runtime = createNonStrictRuntime(); runtime.addError(new AssumptionViolatedException("should be treated like pending")); assertEquals(0x0, runtime.exitStatus()); } @Test public void non_strict_with_failed_junit_assumption_from_junit_412_on() { Runtime runtime = createNonStrictRuntime(); runtime.addError(new org.junit.AssumptionViolatedException("should be treated like pending")); assertEquals(0x0, runtime.exitStatus()); } @Test public void non_strict_with_errors() { Runtime runtime = createNonStrictRuntime(); runtime.addError(new RuntimeException()); assertEquals(0x1, runtime.exitStatus()); } @Test public void strict_with_errors() { Runtime runtime = createStrictRuntime(); runtime.addError(new RuntimeException()); assertEquals(0x1, runtime.exitStatus()); } @Test public void should_pass_if_no_features_are_found() throws IOException { ResourceLoader resourceLoader = createResourceLoaderThatFindsNoFeatures(); Runtime runtime = createStrictRuntime(resourceLoader); runtime.run(); assertEquals(0x0, runtime.exitStatus()); } @Test public void reports_step_definitions_to_plugin() throws IOException, NoSuchMethodException { Runtime runtime = createRuntime("--plugin", "cucumber.runtime.RuntimeTest$StepdefsPrinter"); StubStepDefinition stepDefinition = new StubStepDefinition(this, getClass().getMethod("reports_step_definitions_to_plugin"), "some pattern"); runtime.getGlue().addStepDefinition(stepDefinition); runtime.run(); assertSame(stepDefinition, StepdefsPrinter.instance.stepDefinition); } public static class StepdefsPrinter implements StepDefinitionReporter { public static StepdefsPrinter instance; public StepDefinition stepDefinition; public StepdefsPrinter() { instance = this; } @Override public void stepDefinition(StepDefinition stepDefinition) { this.stepDefinition = stepDefinition; } } @Test public void should_throw_cucumer_exception_if_no_backends_are_found() throws Exception { try { ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); new Runtime(new ClasspathResourceLoader(classLoader), classLoader, Collections.<Backend>emptyList(), new RuntimeOptions("")); fail("A CucumberException should have been thrown"); } catch (CucumberException e) { assertEquals("No backends were found. Please make sure you have a backend module on your CLASSPATH.", e.getMessage()); } } @Test public void should_add_passed_result_to_the_summary_counter() throws Exception { ByteArrayOutputStream baos = new ByteArrayOutputStream(); Reporter reporter = mock(Reporter.class); StepDefinitionMatch match = mock(StepDefinitionMatch.class); Runtime runtime = createRuntimeWithMockedGlue(match, "--monochrome"); runScenario(reporter, runtime, stepCount(1)); runtime.printStats(new PrintStream(baos)); assertThat(baos.toString(), startsWith(String.format( "1 Scenarios (1 passed)%n" + "1 Steps (1 passed)%n"))); } @Test public void should_add_pending_result_to_the_summary_counter() throws Throwable { ByteArrayOutputStream baos = new ByteArrayOutputStream(); Reporter reporter = mock(Reporter.class); StepDefinitionMatch match = createExceptionThrowingMatch(new PendingException()); Runtime runtime = createRuntimeWithMockedGlue(match, "--monochrome"); runScenario(reporter, runtime, stepCount(1)); runtime.printStats(new PrintStream(baos)); assertThat(baos.toString(), containsString(String.format("" + "1 Scenarios (1 pending)%n" + "1 Steps (1 pending)%n"))); } @Test public void should_add_failed_result_to_the_summary_counter() throws Throwable { ByteArrayOutputStream baos = new ByteArrayOutputStream(); Reporter reporter = mock(Reporter.class); StepDefinitionMatch match = createExceptionThrowingMatch(new Exception()); Runtime runtime = createRuntimeWithMockedGlue(match, "--monochrome"); runScenario(reporter, runtime, stepCount(1)); runtime.printStats(new PrintStream(baos)); assertThat(baos.toString(), containsString(String.format("" + "1 Scenarios (1 failed)%n" + "1 Steps (1 failed)%n"))); } @Test public void should_add_ambiguous_match_as_failed_result_to_the_summary_counter() throws Throwable { ByteArrayOutputStream baos = new ByteArrayOutputStream(); Reporter reporter = mock(Reporter.class); Runtime runtime = createRuntimeWithMockedGlueWithAmbiguousMatch("--monochrome"); runScenario(reporter, runtime, stepCount(1)); runtime.printStats(new PrintStream(baos)); assertThat(baos.toString(), containsString(String.format(""+ "1 Scenarios (1 failed)%n" + "1 Steps (1 failed)%n"))); } @Test public void should_add_skipped_result_to_the_summary_counter() throws Throwable { ByteArrayOutputStream baos = new ByteArrayOutputStream(); Reporter reporter = mock(Reporter.class); StepDefinitionMatch match = createExceptionThrowingMatch(new Exception()); Runtime runtime = createRuntimeWithMockedGlue(match, "--monochrome"); runScenario(reporter, runtime, stepCount(2)); runtime.printStats(new PrintStream(baos)); assertThat(baos.toString(), containsString(String.format("" + "1 Scenarios (1 failed)%n" + "2 Steps (1 failed, 1 skipped)%n"))); } @Test public void should_add_undefined_result_to_the_summary_counter() throws Throwable { ByteArrayOutputStream baos = new ByteArrayOutputStream(); Reporter reporter = mock(Reporter.class); Runtime runtime = createRuntimeWithMockedGlue(null, "--monochrome"); runScenario(reporter, runtime, stepCount(1)); runtime.printStats(new PrintStream(baos)); assertThat(baos.toString(), containsString(String.format("" + "1 Scenarios (1 undefined)%n" + "1 Steps (1 undefined)%n"))); } @Test public void should_fail_the_scenario_if_before_fails() throws Throwable { ByteArrayOutputStream baos = new ByteArrayOutputStream(); Reporter reporter = mock(Reporter.class); StepDefinitionMatch match = mock(StepDefinitionMatch.class); HookDefinition hook = createExceptionThrowingHook(); Runtime runtime = createRuntimeWithMockedGlue(match, hook, true, "--monochrome"); runScenario(reporter, runtime, stepCount(1)); runtime.printStats(new PrintStream(baos)); assertThat(baos.toString(), containsString(String.format("" + "1 Scenarios (1 failed)%n" + "1 Steps (1 skipped)%n"))); } @Test public void should_fail_the_scenario_if_after_fails() throws Throwable { ByteArrayOutputStream baos = new ByteArrayOutputStream(); Reporter reporter = mock(Reporter.class); StepDefinitionMatch match = mock(StepDefinitionMatch.class); HookDefinition hook = createExceptionThrowingHook(); Runtime runtime = createRuntimeWithMockedGlue(match, hook, false, "--monochrome"); runScenario(reporter, runtime, stepCount(1)); runtime.printStats(new PrintStream(baos)); assertThat(baos.toString(), containsString(String.format("" + "1 Scenarios (1 failed)%n" + "1 Steps (1 passed)%n"))); } @Test public void should_make_scenario_name_available_to_hooks() throws Throwable { CucumberFeature feature = TestHelper.feature("path/test.feature", "Feature: feature name\n" + " Scenario: scenario name\n" + " Given first step\n" + " When second step\n" + " Then third step\n"); HookDefinition beforeHook = mock(HookDefinition.class); when(beforeHook.matches(anyCollectionOf(Tag.class))).thenReturn(true); Runtime runtime = createRuntimeWithMockedGlue(mock(StepDefinitionMatch.class), beforeHook, true); feature.run(mock(Formatter.class), mock(Reporter.class), runtime); ArgumentCaptor<Scenario> capturedScenario = ArgumentCaptor.forClass(Scenario.class); verify(beforeHook).execute(capturedScenario.capture()); assertEquals("scenario name", capturedScenario.getValue().getName()); } @Test public void should_make_scenario_id_available_to_hooks() throws Throwable { CucumberFeature feature = TestHelper.feature("path/test.feature", "Feature: feature name\n" + " Scenario: scenario name\n" + " Given first step\n" + " When second step\n" + " Then third step\n"); HookDefinition beforeHook = mock(HookDefinition.class); when(beforeHook.matches(anyCollectionOf(Tag.class))).thenReturn(true); Runtime runtime = createRuntimeWithMockedGlue(mock(StepDefinitionMatch.class), beforeHook, true); feature.run(mock(Formatter.class), mock(Reporter.class), runtime); ArgumentCaptor<Scenario> capturedScenario = ArgumentCaptor.forClass(Scenario.class); verify(beforeHook).execute(capturedScenario.capture()); assertEquals("feature-name;scenario-name", capturedScenario.getValue().getId()); } @Test public void should_call_formatter_for_two_scenarios_with_background() throws Throwable { CucumberFeature feature = TestHelper.feature("path/test.feature", "" + "Feature: feature name\n" + " Background: background\n" + " Given first step\n" + " Scenario: scenario_1 name\n" + " When second step\n" + " Then third step\n" + " Scenario: scenario_2 name\n" + " Then second step\n"); Map<String, Result> stepsToResult = new HashMap<String, Result>(); stepsToResult.put("first step", result("passed")); stepsToResult.put("second step", result("passed")); stepsToResult.put("third step", result("passed")); String formatterOutput = runFeatureWithFormatterSpy(feature, stepsToResult); assertEquals("" + "uri\n" + "feature\n" + " startOfScenarioLifeCycle\n" + " background\n" + " step\n" + " match\n" + " result\n" + " scenario\n" + " step\n" + " step\n" + " match\n" + " result\n" + " match\n" + " result\n" + " endOfScenarioLifeCycle\n" + " startOfScenarioLifeCycle\n" + " background\n" + " step\n" + " match\n" + " result\n" + " scenario\n" + " step\n" + " match\n" + " result\n" + " endOfScenarioLifeCycle\n" + "eof\n" + "done\n" + "close\n", formatterOutput); } @Test public void should_call_formatter_for_scenario_outline_with_two_examples_table_and_background() throws Throwable { CucumberFeature feature = TestHelper.feature("path/test.feature", "" + "Feature: feature name\n" + " Background: background\n" + " Given first step\n" + " Scenario Outline: scenario outline name\n" + " When <x> step\n" + " Then <y> step\n" + " Examples: examples 1 name\n" + " | x | y |\n" + " | second | third |\n" + " | second | third |\n" + " Examples: examples 2 name\n" + " | x | y |\n" + " | second | third |\n"); Map<String, Result> stepsToResult = new HashMap<String, Result>(); stepsToResult.put("first step", result("passed")); stepsToResult.put("second step", result("passed")); stepsToResult.put("third step", result("passed")); String formatterOutput = runFeatureWithFormatterSpy(feature, stepsToResult); assertEquals("" + "uri\n" + "feature\n" + " scenarioOutline\n" + " step\n" + " step\n" + " examples\n" + " startOfScenarioLifeCycle\n" + " background\n" + " step\n" + " match\n" + " result\n" + " scenario\n" + " step\n" + " step\n" + " match\n" + " result\n" + " match\n" + " result\n" + " endOfScenarioLifeCycle\n" + " startOfScenarioLifeCycle\n" + " background\n" + " step\n" + " match\n" + " result\n" + " scenario\n" + " step\n" + " step\n" + " match\n" + " result\n" + " match\n" + " result\n" + " endOfScenarioLifeCycle\n" + " examples\n" + " startOfScenarioLifeCycle\n" + " background\n" + " step\n" + " match\n" + " result\n" + " scenario\n" + " step\n" + " step\n" + " match\n" + " result\n" + " match\n" + " result\n" + " endOfScenarioLifeCycle\n" + "eof\n" + "done\n" + "close\n", formatterOutput); } private String runFeatureWithFormatterSpy(CucumberFeature feature, Map<String, Result> stepsToResult) throws Throwable { FormatterSpy formatterSpy = new FormatterSpy(); TestHelper.runFeatureWithFormatter(feature, stepsToResult, Collections.<SimpleEntry<String, Result>>emptyList(), 0L, formatterSpy, formatterSpy); return formatterSpy.toString(); } private StepDefinitionMatch createExceptionThrowingMatch(Exception exception) throws Throwable { StepDefinitionMatch match = mock(StepDefinitionMatch.class); doThrow(exception).when(match).runStep((I18n) any()); return match; } private HookDefinition createExceptionThrowingHook() throws Throwable { HookDefinition hook = mock(HookDefinition.class); when(hook.matches(anyCollectionOf(Tag.class))).thenReturn(true); doThrow(new Exception()).when(hook).execute((Scenario) any()); return hook; } public void runStep(Reporter reporter, Runtime runtime) { Step step = mock(Step.class); I18n i18n = mock(I18n.class); runtime.runStep("<featurePath>", step, reporter, i18n); } private ResourceLoader createResourceLoaderThatFindsNoFeatures() { ResourceLoader resourceLoader = mock(ResourceLoader.class); when(resourceLoader.resources(anyString(), eq(".feature"))).thenReturn(Collections.<Resource>emptyList()); return resourceLoader; } private Runtime createStrictRuntime() { return createRuntime("-g", "anything", "--strict"); } private Runtime createNonStrictRuntime() { return createRuntime("-g", "anything"); } private Runtime createStrictRuntime(ResourceLoader resourceLoader) { return createRuntime(resourceLoader, Thread.currentThread().getContextClassLoader(), "-g", "anything", "--strict"); } private Runtime createRuntime(String... runtimeArgs) { ResourceLoader resourceLoader = mock(ResourceLoader.class); ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); return createRuntime(resourceLoader, classLoader, runtimeArgs); } private Runtime createRuntime(ResourceLoader resourceLoader, ClassLoader classLoader, String... runtimeArgs) { RuntimeOptions runtimeOptions = new RuntimeOptions(asList(runtimeArgs)); Backend backend = mock(Backend.class); Collection<Backend> backends = Arrays.asList(backend); return new Runtime(resourceLoader, classLoader, backends, runtimeOptions); } private Runtime createRuntimeWithMockedGlue(StepDefinitionMatch match, String... runtimeArgs) { return createRuntimeWithMockedGlue(match, false, mock(HookDefinition.class), false, runtimeArgs); } private Runtime createRuntimeWithMockedGlue(StepDefinitionMatch match, HookDefinition hook, boolean isBefore, String... runtimeArgs) { return createRuntimeWithMockedGlue(match, false, hook, isBefore, runtimeArgs); } private Runtime createRuntimeWithMockedGlueWithAmbiguousMatch(String... runtimeArgs) { return createRuntimeWithMockedGlue(mock(StepDefinitionMatch.class), true, mock(HookDefinition.class), false, runtimeArgs); } private Runtime createRuntimeWithMockedGlue(StepDefinitionMatch match, boolean isAmbiguous, HookDefinition hook, boolean isBefore, String... runtimeArgs) { ResourceLoader resourceLoader = mock(ResourceLoader.class); ClassLoader classLoader = mock(ClassLoader.class); RuntimeOptions runtimeOptions = new RuntimeOptions(asList(runtimeArgs)); Backend backend = mock(Backend.class); RuntimeGlue glue = mock(RuntimeGlue.class); mockMatch(glue, match, isAmbiguous); mockHook(glue, hook, isBefore); Collection<Backend> backends = Arrays.asList(backend); return new Runtime(resourceLoader, classLoader, backends, runtimeOptions, glue); } private void mockMatch(RuntimeGlue glue, StepDefinitionMatch match, boolean isAmbiguous) { if (isAmbiguous) { Exception exception = new AmbiguousStepDefinitionsException(Arrays.asList(match, match)); doThrow(exception).when(glue).stepDefinitionMatch(anyString(), (Step) any(), (I18n) any()); } else { when(glue.stepDefinitionMatch(anyString(), (Step) any(), (I18n) any())).thenReturn(match); } } private void mockHook(RuntimeGlue glue, HookDefinition hook, boolean isBefore) { if (isBefore) { when(glue.getBeforeHooks()).thenReturn(Arrays.asList(hook)); } else { when(glue.getAfterHooks()).thenReturn(Arrays.asList(hook)); } } private void runScenario(Reporter reporter, Runtime runtime, int stepCount) { gherkin.formatter.model.Scenario gherkinScenario = mock(gherkin.formatter.model.Scenario.class); runtime.buildBackendWorlds(reporter, Collections.<Tag>emptySet(), gherkinScenario); runtime.runBeforeHooks(reporter, Collections.<Tag>emptySet()); for (int i = 0; i < stepCount; ++i) { runStep(reporter, runtime); } runtime.runAfterHooks(reporter, Collections.<Tag>emptySet()); runtime.disposeBackendWorlds("scenario designation"); } private int stepCount(int stepCount) { return stepCount; } }
/* * Copyright (C) 2014-2022 Philip Helger (www.helger.com) * philip[at]helger[dot]com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.helger.commons.locale; import java.math.BigDecimal; import java.math.BigInteger; import java.text.NumberFormat; import java.util.Locale; import javax.annotation.Nonnegative; import javax.annotation.Nonnull; import javax.annotation.concurrent.Immutable; import com.helger.commons.ValueEnforcer; import com.helger.commons.annotation.PresentForCodeCoverage; /** * Helper class to safely print numbers in a formatted way. * * @author Philip Helger */ @Immutable public final class LocaleFormatter { @PresentForCodeCoverage private static final LocaleFormatter INSTANCE = new LocaleFormatter (); private LocaleFormatter () {} /** * Format the passed value according to the rules specified by the given * locale. All calls to {@link Double#toString(double)} that are displayed to * the user should instead use this method. * * @param dValue * The value to be formatted. * @param aDisplayLocale * The locale to be used. May not be <code>null</code>. * @return The formatted string. */ @Nonnull public static String getFormatted (final double dValue, @Nonnull final Locale aDisplayLocale) { ValueEnforcer.notNull (aDisplayLocale, "DisplayLocale"); return NumberFormat.getNumberInstance (aDisplayLocale).format (dValue); } /** * Format the passed value according to the rules specified by the given * locale. All calls to {@link Integer#toString(int)} that are displayed to * the user should instead use this method. * * @param nValue * The value to be formatted. * @param aDisplayLocale * The locale to be used. May not be <code>null</code>. * @return The formatted string. */ @Nonnull public static String getFormatted (final int nValue, @Nonnull final Locale aDisplayLocale) { ValueEnforcer.notNull (aDisplayLocale, "DisplayLocale"); return NumberFormat.getIntegerInstance (aDisplayLocale).format (nValue); } /** * Format the passed value according to the rules specified by the given * locale. All calls to {@link Long#toString(long)} that are displayed to the * user should instead use this method. * * @param nValue * The value to be formatted. * @param aDisplayLocale * The locale to be used. May not be <code>null</code>. * @return The formatted string. */ @Nonnull public static String getFormatted (final long nValue, @Nonnull final Locale aDisplayLocale) { ValueEnforcer.notNull (aDisplayLocale, "DisplayLocale"); return NumberFormat.getIntegerInstance (aDisplayLocale).format (nValue); } /** * Format the passed value according to the rules specified by the given * locale. All calls to {@link BigInteger#toString()} that are displayed to * the user should instead use this method. * * @param aValue * The value to be formatted. May not be <code>null</code>. * @param aDisplayLocale * The locale to be used. May not be <code>null</code>. * @return The formatted string. */ @Nonnull public static String getFormatted (@Nonnull final BigInteger aValue, @Nonnull final Locale aDisplayLocale) { ValueEnforcer.notNull (aValue, "Value"); ValueEnforcer.notNull (aDisplayLocale, "DisplayLocale"); return NumberFormat.getIntegerInstance (aDisplayLocale).format (aValue); } /** * Format the passed value according to the rules specified by the given * locale. All calls to {@link BigDecimal#toString()} that are displayed to * the user should instead use this method. By default a maximum of 3 fraction * digits are shown. * * @param aValue * The value to be formatted. May not be <code>null</code>. * @param aDisplayLocale * The locale to be used. May not be <code>null</code>. * @return The formatted string. */ @Nonnull public static String getFormatted (@Nonnull final BigDecimal aValue, @Nonnull final Locale aDisplayLocale) { ValueEnforcer.notNull (aValue, "Value"); ValueEnforcer.notNull (aDisplayLocale, "DisplayLocale"); return NumberFormat.getInstance (aDisplayLocale).format (aValue); } /** * Format the passed value according to the rules specified by the given * locale. * * @param aValue * The value to be formatted. May not be <code>null</code>. * @param nFractionDigits * The number of fractional digits to use. Must be &ge; 0. * @param aDisplayLocale * The locale to be used. May not be <code>null</code>. * @return The formatted string. */ @Nonnull public static String getFormatted (@Nonnull final BigDecimal aValue, @Nonnegative final int nFractionDigits, @Nonnull final Locale aDisplayLocale) { ValueEnforcer.notNull (aValue, "Value"); ValueEnforcer.notNull (aDisplayLocale, "DisplayLocale"); final NumberFormat aNF = NumberFormat.getInstance (aDisplayLocale); aNF.setMinimumFractionDigits (nFractionDigits); aNF.setMaximumFractionDigits (nFractionDigits); return aNF.format (aValue); } /** * Format the passed value according to the rules specified by the given * locale. All fraction digits of the passed value are displayed. * * @param aValue * The value to be formatted. May not be <code>null</code>. * @param aDisplayLocale * The locale to be used. May not be <code>null</code>. * @return The formatted string. */ @Nonnull public static String getFormattedWithAllFractionDigits (@Nonnull final BigDecimal aValue, @Nonnull final Locale aDisplayLocale) { ValueEnforcer.notNull (aValue, "Value"); ValueEnforcer.notNull (aDisplayLocale, "DisplayLocale"); final NumberFormat aNF = NumberFormat.getInstance (aDisplayLocale); aNF.setMaximumFractionDigits (aValue.scale ()); return aNF.format (aValue); } /** * Format the given value as percentage. The "%" sign is automatically * appended according to the requested locale. The number of fractional digits * depend on the locale. * * @param dValue * The value to be used. E.g. "0.125" will result in something like * "12.5%" * @param aDisplayLocale * The locale to use. * @return The non-<code>null</code> formatted string. */ @Nonnull public static String getFormattedPercent (final double dValue, @Nonnull final Locale aDisplayLocale) { ValueEnforcer.notNull (aDisplayLocale, "DisplayLocale"); return NumberFormat.getPercentInstance (aDisplayLocale).format (dValue); } /** * Format the given value as percentage. The "%" sign is automatically * appended according to the requested locale. * * @param dValue * The value to be used. E.g. "0.125" will result in something like * "12.5%" * @param nFractionDigits * The number of fractional digits to use. Must be &ge; 0. * @param aDisplayLocale * The locale to use. * @return The non-<code>null</code> formatted string. */ @Nonnull public static String getFormattedPercent (final double dValue, @Nonnegative final int nFractionDigits, @Nonnull final Locale aDisplayLocale) { ValueEnforcer.notNull (aDisplayLocale, "DisplayLocale"); final NumberFormat aNF = NumberFormat.getPercentInstance (aDisplayLocale); aNF.setMinimumFractionDigits (nFractionDigits); aNF.setMaximumFractionDigits (nFractionDigits); return aNF.format (dValue); } }
package org.eclipse.wang.datastructure; /** * http://cslibrary.stanford.edu/110/BinaryTrees.html#java * * @author super * */ public class BinaryTree { private Node root; private class Node { private Node left; private Node right; private int data; private Node(int newData) { this.left = null; this.right = null; this.data = newData; } } public void insert(int data) { root = insert(root, data); } /** * Recursive insert */ private Node insert(Node node, int data) { if (node == null) { node = new Node(data); } else { if (data <= node.data) { node.left = insert(node.left, data); } else { node.right = insert(node.right, data); } } return node; } public boolean lookup(int data) { return lookup(root, data); } /** * Recursive lookup */ private boolean lookup(Node node, int data) { if (node == null) { return false; } if (data == node.data) { return true; } else if (data < node.data) { return lookup(node.left, data); } else { return lookup(node.right, data); } } /** * Returns the max root-to-leaf depth of the tree. Uses a recursive helper * that recurs down to find the max depth. */ public int size() { return size(root); } private int size(Node node) { if (node == null) { return 0; } else { return size(node.left) + 1 + size(node.right); } } /** * Returns the max root-to-leaf depth of the tree. Uses a recursive helper * that recurs down to find the max depth. */ public int maxDepth() { return maxDepth(root); } private int maxDepth(Node node) { if (node == null) { return 0; } else { int lDepth = maxDepth(node.left); int rDepth = maxDepth(node.right); return Math.max(lDepth, rDepth) + 1; } } /** * Returns the min value in a non-empty binary search tree. Uses a helper * method that iterates to the left to find the min value. */ public int minValue() { return (minValue(root)); } /** * Finds the min value in a non-empty binary search tree. */ private int minValue(Node node) { Node current = node; while (current.left != null) { current = current.left; } return (current.data); } /** * Prints the node values in the "inorder" order. Uses a recursive helper to * do the traversal. */ public void printTreeInorder() { printTreeInorder(root); System.out.println(); } private void printTreeInorder(Node node) { if (node == null) return; // left, node itself, right printTreeInorder(node.left); System.out.print(node.data + " "); printTreeInorder(node.right); } public void printTreePreorder() { printTreePreorder(root); System.out.println(); } private void printTreePreorder(Node node) { if (node == null) { return; } System.out.print(node.data + " "); printTreePreorder(node.left); printTreePreorder(node.right); } public void printTreePostorder() { printTreePostorder(root); System.out.println(); } private void printTreePostorder(Node node) { if (node == null) { return; } else { printTreePostorder(node.left); printTreePostorder(node.right); System.out.print(node.data + " "); } } public boolean sameTree(BinaryTree other) { return (sameTree(root, other.root)); } private boolean sameTree(Node a, Node b) { if (a == null && b == null) { return true; } else if (a != null && b != null) { return a.data == b.data && sameTree(a.left, b.left) && sameTree(a.right, b.right); } else { return false; } } }
/* * Copyright (c) 2010-2016 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.web.page.admin.server.dto; import java.io.Serializable; import java.util.ArrayList; import java.util.Date; import java.util.List; import com.evolveum.midpoint.xml.ns._public.common.common_3.MisfireActionType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowKindType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ThreadStopActionType; import javax.xml.namespace.QName; /** * @author lazyman */ public class TaskAddDto implements Serializable { public static final String F_DRY_RUN = "dryRun"; public static final String F_FOCUS_TYPE = "focusType"; public static final String F_KIND = "kind"; public static final String F_INTENT = "intent"; public static final String F_OBJECT_CLASS = "objectClass"; public static final String F_RESOURCE = "resource"; public static final String F_CATEGORY = "category"; public static final String F_NAME = "name"; public static final String F_RECURRING = "reccuring"; public static final String F_BOUND = "bound"; public static final String F_INTERVAL = "interval"; public static final String F_CRON = "cron"; public static final String F_NOT_START_BEFORE = "notStartBefore"; public static final String F_NOT_START_AFTER = "notStartAfter"; public static final String F_RUN_UNTIL_NODW_DOWN = "runUntilNodeDown"; public static final String F_SUSPENDED_STATE = "suspendedState"; public static final String F_THREAD_STOP = "threadStop"; public static final String F_MISFIRE_ACTION = "misfireAction"; private String category; private TaskAddResourcesDto resource; private String name; //Scheduling private boolean reccuring; private boolean bound; private Integer interval; private String cron; private Date notStartBefore; private Date notStartAfter; private boolean runUntilNodeDown; //Advanced private boolean suspendedState; private ThreadStopActionType threadStop; private MisfireActionType misfireAction = MisfireActionType.EXECUTE_IMMEDIATELY; private boolean dryRun; private QName focusType; private ShadowKindType kind; private String intent; private String objectClass; private List<QName> objectClassList; public String getCategory() { return category; } public void setCategory(String category) { this.category = category; } public TaskAddResourcesDto getResource() { return resource; } public void setResource(TaskAddResourcesDto resource) { this.resource = resource; } public String getName() { return name; } public void setName(String name) { this.name = name; } public Boolean getReccuring() { return reccuring; } public void setReccuring(Boolean reccuring) { this.reccuring = reccuring; } public Boolean getBound() { return bound; } public void setBound(Boolean bound) { this.bound = bound; } public Integer getInterval() { return interval; } public void setInterval(Integer interval) { this.interval = interval; } public String getCron() { return cron; } public void setCron(String cron) { this.cron = cron; } public Date getNotStartBefore() { return notStartBefore; } public void setNotStartBefore(Date notStartBefore) { this.notStartBefore = notStartBefore; } public Date getNotStartAfter() { return notStartAfter; } public void setNotStartAfter(Date notStartAfter) { this.notStartAfter = notStartAfter; } public Boolean getRunUntilNodeDown() { return runUntilNodeDown; } public void setRunUntilNodeDown(Boolean runUntilNodeDown) { this.runUntilNodeDown = runUntilNodeDown; } public Boolean getSuspendedState() { return suspendedState; } public void setSuspendedState(Boolean suspendedState) { this.suspendedState = suspendedState; } public ThreadStopActionType getThreadStop() { return threadStop; } public void setThreadStop(ThreadStopActionType threadStop) { this.threadStop = threadStop; } public MisfireActionType getMisfireAction() { return misfireAction; } public void setMisfireAction(MisfireActionType misfireAction) { this.misfireAction = misfireAction; } public boolean isDryRun() { return dryRun; } public void setDryRun(boolean dryRun) { this.dryRun = dryRun; } public QName getFocusType() { return focusType; } public void setFocusType(QName focusType) { this.focusType = focusType; } public ShadowKindType getKind() { return kind; } public void setKind(ShadowKindType kind) { this.kind = kind; } public String getIntent() { return intent; } public void setIntent(String intent) { this.intent = intent; } public String getObjectClass() { return objectClass; } public void setObjectClass(String objectClass) { this.objectClass = objectClass; } public List<QName> getObjectClassList() { if(objectClassList == null){ objectClassList = new ArrayList<>(); } return objectClassList; } public void setObjectClassList(List<QName> objectClassList) { this.objectClassList = objectClassList; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof TaskAddDto)) return false; TaskAddDto that = (TaskAddDto) o; if (bound != that.bound) return false; if (dryRun != that.dryRun) return false; if (reccuring != that.reccuring) return false; if (runUntilNodeDown != that.runUntilNodeDown) return false; if (suspendedState != that.suspendedState) return false; if (category != null ? !category.equals(that.category) : that.category != null) return false; if (cron != null ? !cron.equals(that.cron) : that.cron != null) return false; if (intent != null ? !intent.equals(that.intent) : that.intent != null) return false; if (interval != null ? !interval.equals(that.interval) : that.interval != null) return false; if (kind != that.kind) return false; if (misfireAction != that.misfireAction) return false; if (name != null ? !name.equals(that.name) : that.name != null) return false; if (notStartAfter != null ? !notStartAfter.equals(that.notStartAfter) : that.notStartAfter != null) return false; if (notStartBefore != null ? !notStartBefore.equals(that.notStartBefore) : that.notStartBefore != null) return false; if (focusType != null ? !focusType.equals(that.focusType) : that.focusType != null) return false; if (objectClass != null ? !objectClass.equals(that.objectClass) : that.objectClass != null) return false; if (objectClassList != null ? !objectClassList.equals(that.objectClassList) : that.objectClassList != null) return false; if (resource != null ? !resource.equals(that.resource) : that.resource != null) return false; if (threadStop != that.threadStop) return false; return true; } @Override public int hashCode() { int result = category != null ? category.hashCode() : 0; result = 31 * result + (resource != null ? resource.hashCode() : 0); result = 31 * result + (name != null ? name.hashCode() : 0); result = 31 * result + (reccuring ? 1 : 0); result = 31 * result + (bound ? 1 : 0); result = 31 * result + (interval != null ? interval.hashCode() : 0); result = 31 * result + (cron != null ? cron.hashCode() : 0); result = 31 * result + (notStartBefore != null ? notStartBefore.hashCode() : 0); result = 31 * result + (notStartAfter != null ? notStartAfter.hashCode() : 0); result = 31 * result + (runUntilNodeDown ? 1 : 0); result = 31 * result + (suspendedState ? 1 : 0); result = 31 * result + (threadStop != null ? threadStop.hashCode() : 0); result = 31 * result + (misfireAction != null ? misfireAction.hashCode() : 0); result = 31 * result + (dryRun ? 1 : 0); result = 31 * result + (kind != null ? kind.hashCode() : 0); result = 31 * result + (intent != null ? intent.hashCode() : 0); result = 31 * result + (focusType != null ? focusType.hashCode() : 0); result = 31 * result + (objectClass != null ? objectClass.hashCode() : 0); result = 31 * result + (objectClassList != null ? objectClassList.hashCode() : 0); return result; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.aries.blueprint.plugin; import org.apache.aries.blueprint.plugin.model.Blueprint; import org.apache.aries.blueprint.plugin.model.ConflictDetected; import org.apache.aries.blueprint.plugin.spi.Activation; import org.apache.aries.blueprint.plugin.spi.Availability; import org.apache.maven.artifact.Artifact; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.plugins.annotations.ResolutionScope; import org.apache.maven.project.MavenProject; import org.apache.xbean.finder.ClassFinder; import org.codehaus.plexus.classworlds.ClassWorld; import org.codehaus.plexus.classworlds.realm.ClassRealm; import org.sonatype.plexus.build.incremental.BuildContext; import java.io.File; import java.io.OutputStream; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; /** * Generates blueprint from CDI annotations */ @Mojo(name = "blueprint-generate", requiresDependencyResolution = ResolutionScope.COMPILE, defaultPhase = LifecyclePhase.PROCESS_CLASSES, inheritByDefault = false, threadSafe = true) public class GenerateMojo extends AbstractMojo { @Parameter(defaultValue = "${project}", required = true) protected MavenProject project; @Parameter protected List<String> scanPaths; /** * Which extension namespaces should the plugin support */ @Parameter protected Set<String> namespaces; @Component private BuildContext buildContext; /** * Name of file to write */ @Parameter(defaultValue = "autowire.xml") protected String generatedFileName; /** * Base directory to write generated hierarchy. */ @Parameter(defaultValue = "${project.build.directory}/generated-sources/blueprint/") private String baseDir; /** * Base directory to write into * (relative to baseDir property). */ @Parameter(defaultValue = "OSGI-INF/blueprint/") private String generatedDir; /** * Specifies the default activation setting that will be defined for components. * Default is null, which indicates eager (blueprint default). * If LAZY then default-activation will be set to lazy. * If EAGER then default-activation will be explicitly set to eager. */ @Parameter protected Activation defaultActivation; /** * Specifies the default availability setting that will be defined for components. * Default is null, which indicates mandatory (blueprint default). * If MANDATORY then default-activation will be set to mandatory. * If OPTIONAL then default-activation will be explicitly set to optional. */ @Parameter protected Availability defaultAvailability; /** * Specifies the default timout setting that will be defined for components. * Default is null, which indicates 300000 seconds (blueprint default). */ @Parameter protected Long defaultTimeout; /** * Specifies additional parameters which could be used in extensions */ @Parameter protected Map<String, String> customParameters; /** * Which artifacts should be included in finding beans process */ @Parameter private Set<String> includeArtifacts = new HashSet<>(); /** * Which artifacts should be excluded from finding beans process */ @Parameter private Set<String> excludeArtifacts = new HashSet<>(); @Override public void execute() throws MojoExecutionException, MojoFailureException { List<String> toScan = getPackagesToScan(); if (!sourcesChanged()) { getLog().info("Skipping blueprint generation because source files were not changed"); return; } try { BlueprintConfigurationImpl blueprintConfiguration = new BlueprintConfigurationImpl(namespaces, defaultActivation, customParameters, defaultAvailability, defaultTimeout); generateBlueprint(toScan, blueprintConfiguration); } catch (ConflictDetected e) { throw new MojoExecutionException(e.getMessage(), e); } catch (Exception e) { throw new MojoExecutionException("Error during blueprint generation", e); } } private void generateBlueprint(List<String> toScan, BlueprintConfigurationImpl blueprintConfiguration) throws Exception { long startTime = System.currentTimeMillis(); ClassFinder classFinder = createProjectScopeFinder(); getLog().debug("Creating package scope class finder: " + (System.currentTimeMillis() - startTime) + "ms"); startTime = System.currentTimeMillis(); Set<Class<?>> classes = FilteredClassFinder.findClasses(classFinder, toScan); getLog().debug("Finding bean classes: " + (System.currentTimeMillis() - startTime) + "ms"); startTime = System.currentTimeMillis(); Blueprint blueprint = new Blueprint(blueprintConfiguration, classes); getLog().debug("Creating blueprint model: " + (System.currentTimeMillis() - startTime) + "ms"); startTime = System.currentTimeMillis(); writeBlueprintIfNeeded(blueprint); getLog().debug("Writing blueprint: " + (System.currentTimeMillis() - startTime) + "ms"); } private void writeBlueprintIfNeeded(Blueprint blueprint) throws Exception { if (blueprint.shouldBeGenerated()) { writeBlueprint(blueprint); } else { getLog().warn("Skipping blueprint generation because no beans were found"); } } private boolean sourcesChanged() { return buildContext.hasDelta(new File(project.getCompileSourceRoots().iterator().next())); } private void writeBlueprint(Blueprint blueprint) throws Exception { ResourceInitializer.prepareBaseDir(project, baseDir); File dir = new File(baseDir, generatedDir); File file = new File(dir, generatedFileName); file.getParentFile().mkdirs(); getLog().info("Generating blueprint to " + file); OutputStream fos = buildContext.newFileOutputStream(file); new BlueprintFileWriter(fos).write(blueprint); fos.close(); } private ClassFinder createProjectScopeFinder() throws Exception { List<URL> urls = new ArrayList<>(); long startTime = System.currentTimeMillis(); ClassRealm classRealm = new ClassRealm(new ClassWorld(), "maven-blueprint-plugin-classloader", getClass().getClassLoader()); classRealm.addURL(new File(project.getBuild().getOutputDirectory()).toURI().toURL()); urls.add(new File(project.getBuild().getOutputDirectory()).toURI().toURL()); ArtifactFilter artifactFilter = new ArtifactFilter(includeArtifacts, excludeArtifacts); for (Object artifactO : project.getArtifacts()) { Artifact artifact = (Artifact) artifactO; File file = artifact.getFile(); if (file == null) { continue; } URL artifactUrl = file.toURI().toURL(); classRealm.addURL(artifactUrl); if (artifactFilter.shouldExclude(artifact)) { getLog().debug("Excluded artifact: " + artifact); continue; } getLog().debug("Taken artifact: " + artifact); urls.add(artifactUrl); } getLog().debug(" Create class loader: " + (System.currentTimeMillis() - startTime) + "ms"); startTime = System.currentTimeMillis(); ClassFinder classFinder = new ClassFinder(classRealm, urls); getLog().debug(" Building class finder: " + (System.currentTimeMillis() - startTime) + "ms"); return classFinder; } private List<String> getPackagesToScan() throws MojoExecutionException { List<String> toScan = scanPaths; if (scanPaths == null || scanPaths.size() == 0 || scanPaths.iterator().next() == null) { getLog().info("Scan paths not specified - searching for packages"); Set<String> packages = PackageFinder.findPackagesInSources(project.getCompileSourceRoots()); if (packages.contains(null)) { throw new MojoExecutionException("Found file without package"); } toScan = new ArrayList<>(packages); Collections.sort(toScan); } for (String aPackage : toScan) { getLog().info("Package " + aPackage + " will be scanned"); } return toScan; } }
/* * * Copyright 2015 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.genie.core.jobmanager.impl; import com.netflix.genie.common.exceptions.GenieException; import com.netflix.genie.common.exceptions.GeniePreconditionException; import com.netflix.genie.common.model.Job; import com.netflix.genie.common.model.JobStatus; import com.netflix.genie.core.jobmanager.JobManager; import com.netflix.genie.core.jobmanager.JobMonitor; import com.netflix.genie.core.services.ExecutionService; import com.netflix.genie.core.services.JobService; import com.netflix.genie.core.metrics.GenieNodeStatistics; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.config.ConfigurableBeanFactory; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; import java.io.File; /** * The monitor thread that gets launched for each job. * * @author skrishnan * @author amsharma * @author tgianos */ @Component @Scope(ConfigurableBeanFactory.SCOPE_PROTOTYPE) public class JobMonitorImpl implements JobMonitor { private static final Logger LOG = LoggerFactory.getLogger(JobMonitorImpl.class); // interval to check status, and update in database if needed private static final int JOB_UPDATE_TIME_MS = 60000; // stdout filename private static final String STDOUT_FILENAME = "stdout"; // stderr filename private static final String STDERR_FILENAME = "stderr"; private final GenieNodeStatistics genieNodeStatistics; private final ExecutionService xs; private final JobService jobService; // max specified stdout size @Value("${com.netflix.genie.job.max.stdout.size:null}") private Long maxStdoutSize; // max specified stdout size @Value("${com.netflix.genie.job.max.stderr.size:null}") private Long maxStderrSize; private String jobId; private JobManager jobManager; // last updated time in DB private long lastUpdatedTimeMS; // the handle to the process for the running job private Process process; // the working directory for this job private String workingDir; // the stdout for this job private File stdOutFile; // the stderr for this job private File stdErrFile; // whether this job has been terminated by the monitor thread private boolean terminated; private int threadSleepTime = 5000; /** * Constructor. * * @param xs The job execution service. * @param jobService The job service API's to use. * @param genieNodeStatistics The statistics object to use */ @Autowired public JobMonitorImpl( final ExecutionService xs, final JobService jobService, final GenieNodeStatistics genieNodeStatistics ) { this.xs = xs; this.jobService = jobService; this.genieNodeStatistics = genieNodeStatistics; this.workingDir = null; this.process = null; this.stdOutFile = null; this.stdErrFile = null; } /** * {@inheritDoc} */ @Override public void setJob(final Job job) throws GenieException { if (job == null || StringUtils.isBlank(job.getId())) { throw new GeniePreconditionException("No job entered."); } this.jobId = job.getId(); } /** * {@inheritDoc} */ @Override public void setWorkingDir(final String workingDir) { this.workingDir = workingDir; if (this.workingDir != null) { this.stdOutFile = new File(this.workingDir + File.separator + "stdout.log"); this.stdErrFile = new File(this.workingDir + File.separator + "stderr.log"); } } /** * {@inheritDoc} */ @Override public void setProcess(final Process process) throws GenieException { if (process == null) { throw new GeniePreconditionException("No process entered."); } this.process = process; } /** * {@inheritDoc} */ @Override public void setJobManager(final JobManager jobManager) throws GenieException { if (jobManager == null) { throw new GeniePreconditionException("No job manager entered."); } this.jobManager = jobManager; } /** * {@inheritDoc} */ @Override public void setThreadSleepTime(final int threadSleepTime) throws GenieException { if (threadSleepTime < 1) { throw new GeniePreconditionException("Sleep time was less than 1. Unable to sleep that little."); } this.threadSleepTime = threadSleepTime; } /** * The main run method for this thread - wait till it finishes, and manage * job state in DB. */ @Override public void run() { try { // wait for process to complete final boolean killed = this.xs.finalizeJob(this.jobId, waitForExit()) == JobStatus.KILLED; // Check if user email address is specified. If so // send an email to user about job completion. final String emailTo = this.jobService.getJob(this.jobId).getEmail(); if (emailTo != null) { LOG.info("User email address: " + emailTo); if (sendEmail(emailTo, killed)) { // Email sent successfully. Update success email counter this.genieNodeStatistics.incrSuccessfulEmailCount(); } else { // Failed to send email. Update email failed counter LOG.warn("Failed to send email."); this.genieNodeStatistics.incrFailedEmailCount(); } } } catch (final GenieException ge) { //TODO: Some sort of better handling. LOG.error(ge.getMessage(), ge); } } /** * Is the job running? * * @return true if job is running, false otherwise */ private boolean isRunning() { try { this.process.exitValue(); } catch (final IllegalThreadStateException e) { return true; } return false; } /** * Check if it is time to update the job status. * * @return true if job hasn't been updated for configured time, false * otherwise */ private boolean shouldUpdateJob() { final long curTimeMS = System.currentTimeMillis(); final long timeSinceStartMS = curTimeMS - this.lastUpdatedTimeMS; return timeSinceStartMS >= JOB_UPDATE_TIME_MS; } /** * Wait until the job finishes, and then return exit code. Also ensure that * stdout is within the limit (if specified), and update DB status * periodically (as RUNNING). * * @return exit code for the job after it finishes * @throws GenieException on issue */ private int waitForExit() throws GenieException { this.lastUpdatedTimeMS = System.currentTimeMillis(); while (this.isRunning()) { try { Thread.sleep(this.threadSleepTime); } catch (final InterruptedException e) { LOG.error("Exception while waiting for job " + this.jobId + " to finish", e); // move on } // update status only in JOB_UPDATE_TIME_MS intervals if (shouldUpdateJob()) { this.lastUpdatedTimeMS = this.jobService.setUpdateTime(this.jobId); // kill the job if it is writing out more than the max stdout/stderr limit // if it has been terminated already, move on and wait for it to clean up after itself String issueFile = null; if (!this.terminated) { if (this.stdOutFile != null && this.stdOutFile.exists() && this.maxStdoutSize != null && this.stdOutFile.length() > this.maxStdoutSize ) { issueFile = STDOUT_FILENAME; } else if ( this.stdErrFile != null && this.stdErrFile.exists() && this.maxStderrSize != null && this.stdErrFile.length() > this.maxStderrSize ) { issueFile = STDERR_FILENAME; } } if (issueFile != null) { LOG.warn("Killing job " + this.jobId + " as its " + issueFile + " is greater than limit"); // kill the job - no need to update status, as it will be updated during next iteration try { this.jobManager.kill(); this.terminated = true; } catch (final GenieException e) { LOG.error("Can't kill job " + this.jobId + " after exceeding " + issueFile + " limit", e); // continue - hoping that it can get cleaned up during next iteration } } } } return this.process.exitValue(); } /** * Check the properties file to figure out if an email needs to be sent at * the end of the job. If yes, get mail properties and try and send email * about Job Status. * * @return 0 for success, -1 for failure * @throws GenieException on issue */ //TODO: Re-write this using Spring Mail private boolean sendEmail(final String emailTo, final boolean killed) throws GenieException { // LOG.debug("called"); // final Job job = this.jobService.getJob(this.jobId); // // if (!this.config.getBoolean("com.netflix.genie.server.mail.enable", false)) { // LOG.warn("Email is disabled but user has specified an email address."); // return false; // } // // // Sender's email ID // final String fromEmail = this.config.getString( // "com.netflix.genie.server.mail.smpt.from", // "[email protected]" // ); // LOG.info("From email address to use to send email: " // + fromEmail); // // // Set the smtp server hostname. Use localhost as default // final String smtpHost = this.config.getString("com.netflix.genie.server.mail.smtp.host", "localhost"); // LOG.debug("Email smtp server: " // + smtpHost); // // // Get system properties // final Properties properties = new Properties(); // // // Setup mail server // properties.setProperty("mail.smtp.host", smtpHost); // // // check whether authentication should be turned on // Authenticator auth = null; // // if (this.config.getBoolean("com.netflix.genie.server.mail.smtp.auth", false)) { // LOG.debug("Email Authentication Enabled"); // // properties.put("mail.smtp.starttls.enable", "true"); // properties.put("mail.smtp.auth", "true"); // // final String userName = config.getString("com.netflix.genie.server.mail.smtp.user"); // final String password = config.getString("com.netflix.genie.server.mail.smtp.password"); // // if (userName == null || password == null) { // LOG.error("Authentication is enabled and username/password for smtp server is null"); // return false; // } // LOG.debug("Constructing authenticator object with username" // + userName // + " and password " // + password); // auth = new SMTPAuthenticator(userName, // password); // } else { // LOG.debug("Email authentication not enabled."); // } // // // Get the default Session object. // final Session session = Session.getInstance(properties, auth); // // try { // // Create a default MimeMessage object. // final MimeMessage message = new MimeMessage(session); // // // Set From: header field of the header. // message.setFrom(new InternetAddress(fromEmail)); // // // Set To: header field of the header. // message.addRecipient(Message.RecipientType.TO, // new InternetAddress(emailTo)); // // JobStatus jobStatus; // // if (killed) { // jobStatus = JobStatus.KILLED; // } else { // jobStatus = job.getStatus(); // } // // // Set Subject: header field // message.setSubject("Genie Job " // + job.getName() // + " completed with Status: " // + jobStatus); // // // Now set the actual message // final String body = "Your Genie Job is complete\n\n" // + "Job ID: " // + job.getId() // + "\n" // + "Job Name: " // + job.getName() // + "\n" // + "Status: " // + job.getStatus() // + "\n" // + "Status Message: " // + job.getStatusMsg() // + "\n" // + "Output Base URL: " // + job.getOutputURI() // + "\n"; // // message.setText(body); // // // Send message // Transport.send(message); // LOG.info("Sent email message successfully...."); // return true; // } catch (final MessagingException mex) { // LOG.error("Got exception while sending email", mex); // return false; // } return true; } // private static class SMTPAuthenticator extends Authenticator { // // private final String username; // private final String password; // // /** // * Default constructor. // */ // SMTPAuthenticator(final String username, final String password) { // this.username = username; // this.password = password; // } // // /** // * Return a PasswordAuthentication object based on username/password. // */ // @Override // public PasswordAuthentication getPasswordAuthentication() { // return new PasswordAuthentication(this.username, this.password); // } // } }
/** */ package etlMetaModel.impl; import etlMetaModel.EolOperation; import etlMetaModel.ErlNamedRule; import etlMetaModel.EtlMetaModelPackage; import etlMetaModel.EtlModule; import etlMetaModel.EtlTransformationRule; import etlMetaModel.MofAssociation; import etlMetaModel.MofClass; import java.util.Collection; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.util.EObjectContainmentEList; import org.eclipse.emf.ecore.util.InternalEList; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Etl Module</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * <ul> * <li>{@link etlMetaModel.impl.EtlModuleImpl#getMofClassesSource <em>Mof Classes Source</em>}</li> * <li>{@link etlMetaModel.impl.EtlModuleImpl#getMofAssociationsSource <em>Mof Associations Source</em>}</li> * <li>{@link etlMetaModel.impl.EtlModuleImpl#getMofClassesTarget <em>Mof Classes Target</em>}</li> * <li>{@link etlMetaModel.impl.EtlModuleImpl#getMofAssociationsTarget <em>Mof Associations Target</em>}</li> * <li>{@link etlMetaModel.impl.EtlModuleImpl#getPre <em>Pre</em>}</li> * <li>{@link etlMetaModel.impl.EtlModuleImpl#getTransformationRules <em>Transformation Rules</em>}</li> * <li>{@link etlMetaModel.impl.EtlModuleImpl#getPost <em>Post</em>}</li> * <li>{@link etlMetaModel.impl.EtlModuleImpl#getOperations <em>Operations</em>}</li> * </ul> * </p> * * @generated */ public class EtlModuleImpl extends EolLibraryModuleImpl implements EtlModule { /** * The cached value of the '{@link #getMofClassesSource() <em>Mof Classes Source</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getMofClassesSource() * @generated * @ordered */ protected EList<MofClass> mofClassesSource; /** * The cached value of the '{@link #getMofAssociationsSource() <em>Mof Associations Source</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getMofAssociationsSource() * @generated * @ordered */ protected EList<MofAssociation> mofAssociationsSource; /** * The cached value of the '{@link #getMofClassesTarget() <em>Mof Classes Target</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getMofClassesTarget() * @generated * @ordered */ protected EList<MofClass> mofClassesTarget; /** * The cached value of the '{@link #getMofAssociationsTarget() <em>Mof Associations Target</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getMofAssociationsTarget() * @generated * @ordered */ protected EList<MofAssociation> mofAssociationsTarget; /** * The cached value of the '{@link #getPre() <em>Pre</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getPre() * @generated * @ordered */ protected ErlNamedRule pre; /** * The cached value of the '{@link #getTransformationRules() <em>Transformation Rules</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getTransformationRules() * @generated * @ordered */ protected EList<EtlTransformationRule> transformationRules; /** * The cached value of the '{@link #getPost() <em>Post</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getPost() * @generated * @ordered */ protected ErlNamedRule post; /** * The cached value of the '{@link #getOperations() <em>Operations</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getOperations() * @generated * @ordered */ protected EList<EolOperation> operations; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EtlModuleImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return EtlMetaModelPackage.Literals.ETL_MODULE; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<MofClass> getMofClassesSource() { if (mofClassesSource == null) { mofClassesSource = new EObjectContainmentEList<MofClass>(MofClass.class, this, EtlMetaModelPackage.ETL_MODULE__MOF_CLASSES_SOURCE); } return mofClassesSource; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<MofAssociation> getMofAssociationsSource() { if (mofAssociationsSource == null) { mofAssociationsSource = new EObjectContainmentEList<MofAssociation>(MofAssociation.class, this, EtlMetaModelPackage.ETL_MODULE__MOF_ASSOCIATIONS_SOURCE); } return mofAssociationsSource; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<MofClass> getMofClassesTarget() { if (mofClassesTarget == null) { mofClassesTarget = new EObjectContainmentEList<MofClass>(MofClass.class, this, EtlMetaModelPackage.ETL_MODULE__MOF_CLASSES_TARGET); } return mofClassesTarget; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<MofAssociation> getMofAssociationsTarget() { if (mofAssociationsTarget == null) { mofAssociationsTarget = new EObjectContainmentEList<MofAssociation>(MofAssociation.class, this, EtlMetaModelPackage.ETL_MODULE__MOF_ASSOCIATIONS_TARGET); } return mofAssociationsTarget; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ErlNamedRule getPre() { return pre; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetPre(ErlNamedRule newPre, NotificationChain msgs) { ErlNamedRule oldPre = pre; pre = newPre; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EtlMetaModelPackage.ETL_MODULE__PRE, oldPre, newPre); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setPre(ErlNamedRule newPre) { if (newPre != pre) { NotificationChain msgs = null; if (pre != null) msgs = ((InternalEObject)pre).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EtlMetaModelPackage.ETL_MODULE__PRE, null, msgs); if (newPre != null) msgs = ((InternalEObject)newPre).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EtlMetaModelPackage.ETL_MODULE__PRE, null, msgs); msgs = basicSetPre(newPre, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EtlMetaModelPackage.ETL_MODULE__PRE, newPre, newPre)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<EtlTransformationRule> getTransformationRules() { if (transformationRules == null) { transformationRules = new EObjectContainmentEList<EtlTransformationRule>(EtlTransformationRule.class, this, EtlMetaModelPackage.ETL_MODULE__TRANSFORMATION_RULES); } return transformationRules; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ErlNamedRule getPost() { return post; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetPost(ErlNamedRule newPost, NotificationChain msgs) { ErlNamedRule oldPost = post; post = newPost; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EtlMetaModelPackage.ETL_MODULE__POST, oldPost, newPost); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setPost(ErlNamedRule newPost) { if (newPost != post) { NotificationChain msgs = null; if (post != null) msgs = ((InternalEObject)post).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EtlMetaModelPackage.ETL_MODULE__POST, null, msgs); if (newPost != null) msgs = ((InternalEObject)newPost).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EtlMetaModelPackage.ETL_MODULE__POST, null, msgs); msgs = basicSetPost(newPost, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EtlMetaModelPackage.ETL_MODULE__POST, newPost, newPost)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<EolOperation> getOperations() { if (operations == null) { operations = new EObjectContainmentEList<EolOperation>(EolOperation.class, this, EtlMetaModelPackage.ETL_MODULE__OPERATIONS); } return operations; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case EtlMetaModelPackage.ETL_MODULE__MOF_CLASSES_SOURCE: return ((InternalEList<?>)getMofClassesSource()).basicRemove(otherEnd, msgs); case EtlMetaModelPackage.ETL_MODULE__MOF_ASSOCIATIONS_SOURCE: return ((InternalEList<?>)getMofAssociationsSource()).basicRemove(otherEnd, msgs); case EtlMetaModelPackage.ETL_MODULE__MOF_CLASSES_TARGET: return ((InternalEList<?>)getMofClassesTarget()).basicRemove(otherEnd, msgs); case EtlMetaModelPackage.ETL_MODULE__MOF_ASSOCIATIONS_TARGET: return ((InternalEList<?>)getMofAssociationsTarget()).basicRemove(otherEnd, msgs); case EtlMetaModelPackage.ETL_MODULE__PRE: return basicSetPre(null, msgs); case EtlMetaModelPackage.ETL_MODULE__TRANSFORMATION_RULES: return ((InternalEList<?>)getTransformationRules()).basicRemove(otherEnd, msgs); case EtlMetaModelPackage.ETL_MODULE__POST: return basicSetPost(null, msgs); case EtlMetaModelPackage.ETL_MODULE__OPERATIONS: return ((InternalEList<?>)getOperations()).basicRemove(otherEnd, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case EtlMetaModelPackage.ETL_MODULE__MOF_CLASSES_SOURCE: return getMofClassesSource(); case EtlMetaModelPackage.ETL_MODULE__MOF_ASSOCIATIONS_SOURCE: return getMofAssociationsSource(); case EtlMetaModelPackage.ETL_MODULE__MOF_CLASSES_TARGET: return getMofClassesTarget(); case EtlMetaModelPackage.ETL_MODULE__MOF_ASSOCIATIONS_TARGET: return getMofAssociationsTarget(); case EtlMetaModelPackage.ETL_MODULE__PRE: return getPre(); case EtlMetaModelPackage.ETL_MODULE__TRANSFORMATION_RULES: return getTransformationRules(); case EtlMetaModelPackage.ETL_MODULE__POST: return getPost(); case EtlMetaModelPackage.ETL_MODULE__OPERATIONS: return getOperations(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case EtlMetaModelPackage.ETL_MODULE__MOF_CLASSES_SOURCE: getMofClassesSource().clear(); getMofClassesSource().addAll((Collection<? extends MofClass>)newValue); return; case EtlMetaModelPackage.ETL_MODULE__MOF_ASSOCIATIONS_SOURCE: getMofAssociationsSource().clear(); getMofAssociationsSource().addAll((Collection<? extends MofAssociation>)newValue); return; case EtlMetaModelPackage.ETL_MODULE__MOF_CLASSES_TARGET: getMofClassesTarget().clear(); getMofClassesTarget().addAll((Collection<? extends MofClass>)newValue); return; case EtlMetaModelPackage.ETL_MODULE__MOF_ASSOCIATIONS_TARGET: getMofAssociationsTarget().clear(); getMofAssociationsTarget().addAll((Collection<? extends MofAssociation>)newValue); return; case EtlMetaModelPackage.ETL_MODULE__PRE: setPre((ErlNamedRule)newValue); return; case EtlMetaModelPackage.ETL_MODULE__TRANSFORMATION_RULES: getTransformationRules().clear(); getTransformationRules().addAll((Collection<? extends EtlTransformationRule>)newValue); return; case EtlMetaModelPackage.ETL_MODULE__POST: setPost((ErlNamedRule)newValue); return; case EtlMetaModelPackage.ETL_MODULE__OPERATIONS: getOperations().clear(); getOperations().addAll((Collection<? extends EolOperation>)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case EtlMetaModelPackage.ETL_MODULE__MOF_CLASSES_SOURCE: getMofClassesSource().clear(); return; case EtlMetaModelPackage.ETL_MODULE__MOF_ASSOCIATIONS_SOURCE: getMofAssociationsSource().clear(); return; case EtlMetaModelPackage.ETL_MODULE__MOF_CLASSES_TARGET: getMofClassesTarget().clear(); return; case EtlMetaModelPackage.ETL_MODULE__MOF_ASSOCIATIONS_TARGET: getMofAssociationsTarget().clear(); return; case EtlMetaModelPackage.ETL_MODULE__PRE: setPre((ErlNamedRule)null); return; case EtlMetaModelPackage.ETL_MODULE__TRANSFORMATION_RULES: getTransformationRules().clear(); return; case EtlMetaModelPackage.ETL_MODULE__POST: setPost((ErlNamedRule)null); return; case EtlMetaModelPackage.ETL_MODULE__OPERATIONS: getOperations().clear(); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case EtlMetaModelPackage.ETL_MODULE__MOF_CLASSES_SOURCE: return mofClassesSource != null && !mofClassesSource.isEmpty(); case EtlMetaModelPackage.ETL_MODULE__MOF_ASSOCIATIONS_SOURCE: return mofAssociationsSource != null && !mofAssociationsSource.isEmpty(); case EtlMetaModelPackage.ETL_MODULE__MOF_CLASSES_TARGET: return mofClassesTarget != null && !mofClassesTarget.isEmpty(); case EtlMetaModelPackage.ETL_MODULE__MOF_ASSOCIATIONS_TARGET: return mofAssociationsTarget != null && !mofAssociationsTarget.isEmpty(); case EtlMetaModelPackage.ETL_MODULE__PRE: return pre != null; case EtlMetaModelPackage.ETL_MODULE__TRANSFORMATION_RULES: return transformationRules != null && !transformationRules.isEmpty(); case EtlMetaModelPackage.ETL_MODULE__POST: return post != null; case EtlMetaModelPackage.ETL_MODULE__OPERATIONS: return operations != null && !operations.isEmpty(); } return super.eIsSet(featureID); } } //EtlModuleImpl
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.axis2.util; import org.apache.axiom.soap.SOAPBody; import org.apache.axiom.soap.SOAPEnvelope; import org.apache.axiom.soap.SOAPFault; import org.apache.axiom.util.UIDGenerator; import org.apache.axis2.AxisFault; import org.apache.axis2.Constants; import org.apache.axis2.ServiceObjectSupplier; import org.apache.axis2.transport.TransportListener; import org.apache.axis2.transport.http.HTTPConstants; import org.apache.axis2.context.ConfigurationContext; import org.apache.axis2.context.ConfigurationContextFactory; import org.apache.axis2.context.MessageContext; import org.apache.axis2.context.ServiceContext; import org.apache.axis2.context.ServiceGroupContext; import org.apache.axis2.description.AxisModule; import org.apache.axis2.description.AxisOperation; import org.apache.axis2.description.AxisService; import org.apache.axis2.description.Flow; import org.apache.axis2.description.HandlerDescription; import org.apache.axis2.description.InOnlyAxisOperation; import org.apache.axis2.description.InOutAxisOperation; import org.apache.axis2.description.OutInAxisOperation; import org.apache.axis2.description.Parameter; import org.apache.axis2.description.PhaseRule; import org.apache.axis2.description.Version; import org.apache.axis2.description.WSDL2Constants; import org.apache.axis2.engine.AxisConfiguration; import org.apache.axis2.engine.AxisError; import org.apache.axis2.engine.Handler; import org.apache.axis2.engine.MessageReceiver; import org.apache.axis2.i18n.Messages; import org.apache.axis2.receivers.RawXMLINOutMessageReceiver; import org.apache.axis2.wsdl.WSDLConstants; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import javax.xml.namespace.QName; import java.io.File; import java.lang.reflect.Modifier; import java.security.AccessController; import java.security.PrivilegedAction; import java.security.PrivilegedExceptionAction; import java.text.ParseException; import java.util.HashMap; import java.util.Iterator; import java.util.Enumeration; import java.util.Map; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.SocketException; import java.net.NetworkInterface; import java.net.InetAddress; public class Utils { private static final Log log = LogFactory.getLog(Utils.class); private static final String LOCAL_TRANSPORT_PREFIX = "local:/"; private static final String HTTP_URL_PREFIX = "http://"; private static final String AXIS2_SERVICE_TEST_PREFIX = "/axis2/services/"; public static void addHandler(Flow flow, Handler handler, String phaseName) { HandlerDescription handlerDesc = new HandlerDescription(handler.getName()); PhaseRule rule = new PhaseRule(phaseName); handlerDesc.setRules(rule); handler.init(handlerDesc); handlerDesc.setHandler(handler); flow.addHandler(handlerDesc); } /** * @see org.apache.axis2.util.MessageContextBuilder:createOutMessageContext() * @deprecated (post1.1branch) */ public static MessageContext createOutMessageContext(MessageContext inMessageContext) throws AxisFault { return MessageContextBuilder.createOutMessageContext(inMessageContext); } public static AxisService createSimpleService(QName serviceName, String className, QName opName) throws AxisFault { return createSimpleService(serviceName, new RawXMLINOutMessageReceiver(), className, opName); } public static AxisService createSimpleServiceforClient(QName serviceName, String className, QName opName) throws AxisFault { return createSimpleServiceforClient(serviceName, new RawXMLINOutMessageReceiver(), className, opName); } public static AxisService createSimpleInOnlyService(QName serviceName, MessageReceiver messageReceiver, QName opName) throws AxisFault { AxisService service = new AxisService(serviceName.getLocalPart()); service.setClassLoader(getContextClassLoader_DoPriv()); AxisOperation axisOp = new InOnlyAxisOperation(opName); axisOp.setMessageReceiver(messageReceiver); axisOp.setStyle(WSDLConstants.STYLE_RPC); service.addOperation(axisOp); service.mapActionToOperation(Constants.AXIS2_NAMESPACE_URI + "/" + opName.getLocalPart(), axisOp); return service; } private static ClassLoader getContextClassLoader_DoPriv() { return (ClassLoader) org.apache.axis2.java.security.AccessController.doPrivileged( new PrivilegedAction<ClassLoader>() { public ClassLoader run() { return Thread.currentThread().getContextClassLoader(); } } ); } public static AxisService createSimpleService(QName serviceName, MessageReceiver messageReceiver, String className, QName opName) throws AxisFault { AxisService service = new AxisService(serviceName.getLocalPart()); service.setClassLoader(getContextClassLoader_DoPriv()); service.addParameter(new Parameter(Constants.SERVICE_CLASS, className)); AxisOperation axisOp = new InOutAxisOperation(opName); axisOp.setMessageReceiver(messageReceiver); axisOp.setStyle(WSDLConstants.STYLE_RPC); service.addOperation(axisOp); service.mapActionToOperation(Constants.AXIS2_NAMESPACE_URI + "/" + opName.getLocalPart(), axisOp); return service; } public static AxisService createSimpleServiceforClient(QName serviceName, MessageReceiver messageReceiver, String className, QName opName) throws AxisFault { AxisService service = new AxisService(serviceName.getLocalPart()); service.setClassLoader(getContextClassLoader_DoPriv()); service.addParameter(new Parameter(Constants.SERVICE_CLASS, className)); AxisOperation axisOp = new OutInAxisOperation(opName); axisOp.setMessageReceiver(messageReceiver); axisOp.setStyle(WSDLConstants.STYLE_RPC); service.addOperation(axisOp); return service; } public static ServiceContext fillContextInformation(AxisService axisService, ConfigurationContext configurationContext) throws AxisFault { // 2. if null, create new opCtxt // fill the service group context and service context info return fillServiceContextAndServiceGroupContext(axisService, configurationContext); } private static ServiceContext fillServiceContextAndServiceGroupContext(AxisService axisService, ConfigurationContext configurationContext) throws AxisFault { String serviceGroupContextId = UIDGenerator.generateURNString(); ServiceGroupContext serviceGroupContext = configurationContext.createServiceGroupContext(axisService.getAxisServiceGroup()); serviceGroupContext.setId(serviceGroupContextId); configurationContext.addServiceGroupContextIntoSoapSessionTable(serviceGroupContext); return serviceGroupContext.getServiceContext(axisService); } /** * Break a full path into pieces * * @return an array where element [0] always contains the service, and element 1, if not null, contains * the path after the first element. all ? parameters are discarded. */ public static String[] parseRequestURLForServiceAndOperation(String path, String servicePath) { if (log.isDebugEnabled()) { log.debug("parseRequestURLForServiceAndOperation : [" + path + "][" + servicePath + "]"); } if (path == null) { return null; } String[] values = new String[2]; // TODO. This is kind of brittle. Any service with the name /services would cause fun. int index = path.lastIndexOf(servicePath); String service; if (-1 != index) { int serviceStart = index + servicePath.length(); if (path.length() > serviceStart + 1) { service = path.substring(serviceStart + 1); int queryIndex = service.indexOf('?'); if (queryIndex > 0) { service = service.substring(0, queryIndex); } int operationIndex = service.indexOf('/'); if (operationIndex > 0) { values[0] = service.substring(0, operationIndex); values[1] = service.substring(operationIndex + 1); operationIndex = values[1].lastIndexOf('/'); if (operationIndex > 0) { values[1] = values[1].substring(operationIndex + 1); } } else { values[0] = service; } } } else { if (log.isDebugEnabled()) { log.debug("Unable to parse request URL [" + path + "][" + servicePath + "]"); } } return values; } /** * Gives the service/operation part from the incoming EPR * Ex: ..services/foo/bar/Version/getVersion -> foo/bar/Version/getVersion * @param path - incoming EPR * @param servicePath - Ex: 'services' * @return - service/operation part */ public static String getServiceAndOperationPart(String path, String servicePath) { if (path == null) { return null; } //with this chances that substring matching a different place in the URL is reduced if(!servicePath.endsWith("/")){ servicePath = servicePath+"/"; } //Adding "/" at the beginning of the "servicePath" to check the "paths" which are starting with "servicePath" if(!servicePath.startsWith("/")){ servicePath = "/"+servicePath; } String serviceOpPart = null; /* Here we had to check multiple cases to prevent the tests from failing with the fix given to https://wso2.org/jira/browse/ESBJAVA-4014. 1. Accessing admin services through local transport (local:/AuthenticationAdmin) 2. Dispatching requests coming to "http://127.0.0.1" kind of paths 3. Dispatching requests coming to "10.100.0.37/axis2/services" kind of paths */ if (path.startsWith(servicePath) || path.startsWith(LOCAL_TRANSPORT_PREFIX) || path.startsWith(HTTP_URL_PREFIX) || path.contains(AXIS2_SERVICE_TEST_PREFIX)) { int index = path.lastIndexOf(servicePath); int serviceStart = index + servicePath.length(); //get the string after services path if (path.length() > serviceStart) { serviceOpPart = path.substring(serviceStart); //remove everything after ? int queryIndex = serviceOpPart.indexOf('?'); if (queryIndex > 0) { serviceOpPart = serviceOpPart.substring(0, queryIndex); } } } return serviceOpPart; } /** * Compute the operation path from request URI using the servince name. Service name can be a * normal one or a hierarchical one. * Ex: ../services/Echo/echoString -> echoString * ../services/foo/1.0.0/Echo/echoString -> echoString * ../services/Echo/ -> null * @param path - request URI * @param serviceName - service name * @return - operation name if any, else null */ public static String getOperationName(String path, String serviceName) { if (path == null || serviceName == null) { return null; } String[] temp = path.split(serviceName + "/"); String operationName = null; if (temp.length > 1) { operationName = temp[temp.length - 1]; } else { //this scenario occurs if the endpoint name is there in the URL after service name temp = path.split(serviceName + "\\."); if (temp.length > 1) { operationName = temp[temp.length - 1]; operationName = operationName.substring(operationName.indexOf('/') + 1); } } if (operationName != null) { //remove everyting after '?' int queryIndex = operationName.indexOf('?'); if (queryIndex > 0) { operationName = operationName.substring(0, queryIndex); } //take the part upto / as the operation name if (operationName.indexOf("/") != -1) { operationName = operationName.substring(0, operationName.indexOf("/")); } } return operationName; } public static ConfigurationContext getNewConfigurationContext(String repositry) throws Exception { final File file = new File(repositry); boolean exists = exists(file); if (!exists) { throw new Exception("repository directory " + file.getAbsolutePath() + " does not exists"); } File axis2xml = new File(file, "axis.xml"); String axis2xmlString = null; if (exists(axis2xml)) { axis2xmlString = axis2xml.getName(); } String path = (String) org.apache.axis2.java.security.AccessController.doPrivileged( new PrivilegedAction<String>() { public String run() { return file.getAbsolutePath(); } } ); return ConfigurationContextFactory .createConfigurationContextFromFileSystem(path, axis2xmlString); } private static boolean exists(final File file) { Boolean exists = (Boolean) org.apache.axis2.java.security.AccessController.doPrivileged( new PrivilegedAction<Boolean>() { public Boolean run() { return new Boolean(file.exists()); } } ); return exists.booleanValue(); } public static String getParameterValue(Parameter param) { if (param == null) { return null; } else { return (String) param.getValue(); } } public static String getModuleName(String moduleName, String moduleVersion) { if (moduleVersion != null && !moduleVersion.isEmpty()) { moduleName = moduleName + "-" + moduleVersion; } return moduleName; } private static final String ILLEGAL_CHARACTERS = "/\n\r\t\0\f`?*\\<>|\":"; public static boolean isValidModuleName(String moduleName) { for (int i = 0; i < moduleName.length(); i++) { char c = moduleName.charAt(i); if ((c > 127) || (ILLEGAL_CHARACTERS.indexOf(c) >= 0)) { return false; } } return true; } /** * - if he trying to engage the same module then method will returen false * - else it will return true * */ public static boolean checkVersion(Version module1version, Version module2version) throws AxisFault { if ((module1version !=null && !module1version.equals(module2version)) || module2version !=null && !module2version.equals(module1version)) { throw new AxisFault("trying to engage two different module versions " + module1version + " : " + module2version); } return true; } public static void calculateDefaultModuleVersion(HashMap modules, AxisConfiguration axisConfig) { Iterator allModules = modules.values().iterator(); Map<String,Version> defaultModules = new HashMap<String,Version>(); while (allModules.hasNext()) { AxisModule axisModule = (AxisModule) allModules.next(); String name = axisModule.getName(); Version currentDefaultVersion = defaultModules.get(name); Version version = axisModule.getVersion(); if (currentDefaultVersion == null || (version != null && version.compareTo(currentDefaultVersion) > 0)) { defaultModules.put(name, version); } } Iterator def_mod_itr = defaultModules.keySet().iterator(); while (def_mod_itr.hasNext()) { String moduleName = (String) def_mod_itr.next(); Version version = defaultModules.get(moduleName); axisConfig.addDefaultModuleVersion(moduleName, version == null ? null : version.toString()); } } /** * Check if a MessageContext property is true. * * @param messageContext the MessageContext * @param propertyName the property name * @return true if the property is Boolean.TRUE, "true", 1, etc. or false otherwise * @deprecated please use MessageContext.isTrue(propertyName) instead */ public static boolean isExplicitlyTrue(MessageContext messageContext, String propertyName) { Object flag = messageContext.getProperty(propertyName); return JavaUtils.isTrueExplicitly(flag); } /** * Maps the String URI of the Message exchange pattern to a integer. * Further, in the first lookup, it will cache the looked * up value so that the subsequent method calls are extremely efficient. */ @SuppressWarnings("deprecation") public static int getAxisSpecifMEPConstant(String messageExchangePattern) { int mepConstant = WSDLConstants.MEP_CONSTANT_INVALID; if (WSDL2Constants.MEP_URI_IN_OUT.equals(messageExchangePattern) || WSDLConstants.WSDL20_2006Constants.MEP_URI_IN_OUT.equals(messageExchangePattern) || WSDLConstants.WSDL20_2004_Constants.MEP_URI_IN_OUT.equals(messageExchangePattern)) { mepConstant = WSDLConstants.MEP_CONSTANT_IN_OUT; } else if ( WSDL2Constants.MEP_URI_IN_ONLY.equals(messageExchangePattern) || WSDLConstants.WSDL20_2006Constants.MEP_URI_IN_ONLY.equals(messageExchangePattern) || WSDLConstants.WSDL20_2004_Constants.MEP_URI_IN_ONLY .equals(messageExchangePattern)) { mepConstant = WSDLConstants.MEP_CONSTANT_IN_ONLY; } else if (WSDL2Constants.MEP_URI_IN_OPTIONAL_OUT .equals(messageExchangePattern) || WSDLConstants.WSDL20_2006Constants.MEP_URI_IN_OPTIONAL_OUT .equals(messageExchangePattern) || WSDLConstants.WSDL20_2004_Constants.MEP_URI_IN_OPTIONAL_OUT .equals(messageExchangePattern)) { mepConstant = WSDLConstants.MEP_CONSTANT_IN_OPTIONAL_OUT; } else if (WSDL2Constants.MEP_URI_OUT_IN.equals(messageExchangePattern) || WSDLConstants.WSDL20_2006Constants.MEP_URI_OUT_IN.equals(messageExchangePattern) || WSDLConstants.WSDL20_2004_Constants.MEP_URI_OUT_IN .equals(messageExchangePattern)) { mepConstant = WSDLConstants.MEP_CONSTANT_OUT_IN; } else if (WSDL2Constants.MEP_URI_OUT_ONLY.equals(messageExchangePattern) || WSDLConstants.WSDL20_2006Constants.MEP_URI_OUT_ONLY .equals(messageExchangePattern) || WSDLConstants.WSDL20_2004_Constants .MEP_URI_OUT_ONLY.equals(messageExchangePattern)) { mepConstant = WSDLConstants.MEP_CONSTANT_OUT_ONLY; } else if (WSDL2Constants.MEP_URI_OUT_OPTIONAL_IN.equals(messageExchangePattern) || WSDLConstants.WSDL20_2006Constants.MEP_URI_OUT_OPTIONAL_IN .equals(messageExchangePattern) || WSDLConstants.WSDL20_2004_Constants.MEP_URI_OUT_OPTIONAL_IN .equals(messageExchangePattern)) { mepConstant = WSDLConstants.MEP_CONSTANT_OUT_OPTIONAL_IN; } else if (WSDL2Constants.MEP_URI_ROBUST_IN_ONLY.equals(messageExchangePattern) || WSDLConstants.WSDL20_2006Constants.MEP_URI_ROBUST_IN_ONLY .equals(messageExchangePattern) || WSDLConstants.WSDL20_2004_Constants.MEP_URI_ROBUST_IN_ONLY .equals(messageExchangePattern)) { mepConstant = WSDLConstants.MEP_CONSTANT_ROBUST_IN_ONLY; } else if (WSDL2Constants.MEP_URI_ROBUST_OUT_ONLY.equals(messageExchangePattern) || WSDLConstants.WSDL20_2006Constants.MEP_URI_ROBUST_OUT_ONLY .equals(messageExchangePattern) || WSDLConstants.WSDL20_2004_Constants.MEP_URI_ROBUST_OUT_ONLY .equals(messageExchangePattern)) { mepConstant = WSDLConstants.MEP_CONSTANT_ROBUST_OUT_ONLY; } if (mepConstant == WSDLConstants.MEP_CONSTANT_INVALID) { throw new AxisError(Messages.getMessage("mepmappingerror")); } return mepConstant; } /** * Get an AxisFault object to represent the SOAPFault in the SOAPEnvelope attached * to the provided MessageContext. This first check for an already extracted AxisFault * and otherwise does a simple extract. * <p/> * MUST NOT be passed a MessageContext which does not contain a SOAPFault * * @param messageContext * @return */ public static AxisFault getInboundFaultFromMessageContext(MessageContext messageContext) { // Get the fault if it's already been extracted by a handler AxisFault result = (AxisFault) messageContext.getProperty(Constants.INBOUND_FAULT_OVERRIDE); // Else, extract it from the SOAPBody if (result == null) { SOAPEnvelope envelope = messageContext.getEnvelope(); SOAPFault soapFault; SOAPBody soapBody; if (envelope != null && (soapBody = envelope.getBody()) != null) { if ((soapFault = soapBody.getFault()) != null) { return new AxisFault(soapFault, messageContext); } // If its a REST response the content is not a SOAP envelop and hence we will // Have use the soap body as the exception if (messageContext.isDoingREST() && soapBody.getFirstElement() != null) { AxisFault fault = new AxisFault(soapBody.getFirstElement().toString()); fault.setDetail(soapBody.getFirstElement()); return fault; } // if axis2 receives an rest type fault for an soap message then message context // has not been set to isDoingREST() but in this case we can detect it by using // the message type. so if the message type is application/xml we assum it as an rest call if ((messageContext.getProperty(Constants.Configuration.MESSAGE_TYPE) != null) && messageContext.getProperty(Constants.Configuration.MESSAGE_TYPE).equals(HTTPConstants.MEDIA_TYPE_APPLICATION_XML)){ if (soapBody.getFirstElement() != null){ AxisFault fault = new AxisFault(soapBody.getFirstElement().toString()); fault.setDetail(soapBody.getFirstElement()); return fault; } else { return new AxisFault("application/xml type error received."); } } } // Not going to be able to throw new IllegalArgumentException( "The MessageContext does not have an associated SOAPFault."); } return result; } /** * This method will provide the logic needed to retrieve an Object's classloader * in a Java 2 Security compliant manner. */ public static ClassLoader getObjectClassLoader(final Object object) { if(object == null) { return null; } else { return (ClassLoader) AccessController.doPrivileged(new PrivilegedAction() { public Object run() { return object.getClass().getClassLoader(); } }); } } public static int getMtomThreshold(MessageContext msgCtxt){ Integer value = null; if(!msgCtxt.isServerSide()){ value = (Integer)msgCtxt.getProperty(Constants.Configuration.MTOM_THRESHOLD); }else{ Parameter param = msgCtxt.getParameter(Constants.Configuration.MTOM_THRESHOLD); if(param!=null){ value = (Integer)param.getValue(); } } int threshold = (value!=null)?value.intValue():0; if(log.isDebugEnabled()){ log.debug("MTOM optimized Threshold value ="+threshold); } return threshold; } /** * Returns the ip address to be used for the replyto epr * CAUTION: * This will go through all the available network interfaces and will try to return an ip address. * First this will try to get the first IP which is not loopback address (127.0.0.1). If none is found * then this will return this will return 127.0.0.1. * This will <b>not<b> consider IPv6 addresses. * <p/> * TODO: * - Improve this logic to genaralize it a bit more * - Obtain the ip to be used here from the Call API * * @return Returns String. * @throws java.net.SocketException */ public static String getIpAddress() throws SocketException { Enumeration e = NetworkInterface.getNetworkInterfaces(); String address = "127.0.0.1"; while (e.hasMoreElements()) { NetworkInterface netface = (NetworkInterface) e.nextElement(); Enumeration addresses = netface.getInetAddresses(); while (addresses.hasMoreElements()) { InetAddress ip = (InetAddress) addresses.nextElement(); if (!ip.isLoopbackAddress() && isIP(ip.getHostAddress())) { return ip.getHostAddress(); } } } return address; } /** * First check whether the hostname parameter is there in AxisConfiguration (axis2.xml) , * if it is there then this will retun that as the host name , o.w will return the IP address. */ public static String getIpAddress(AxisConfiguration axisConfiguration) throws SocketException { if(axisConfiguration!=null){ Parameter param = axisConfiguration.getParameter(TransportListener.HOST_ADDRESS); if (param != null) { String hostAddress = ((String) param.getValue()).trim(); if(hostAddress!=null){ return hostAddress; } } } return getIpAddress(); } /** * First check whether the hostname parameter is there in AxisConfiguration (axis2.xml) , * if it is there then this will return that as the host name , o.w will return the IP address. * @param axisConfiguration * @return hostname */ public static String getHostname(AxisConfiguration axisConfiguration) { if(axisConfiguration!=null){ Parameter param = axisConfiguration.getParameter(TransportListener.HOST_ADDRESS); if (param != null) { String hostAddress = ((String) param.getValue()).trim(); if(hostAddress!=null){ return hostAddress; } } } return null; } private static boolean isIP(String hostAddress) { return hostAddress.split("[.]").length == 4; } /** * Get the scheme part from a URI (or URL). * * @param uri the URI * @return the scheme of the URI */ public static String getURIScheme(String uri) { int index = uri.indexOf(':'); return index > 0 ? uri.substring(0, index) : null; } public static String sanitizeWebOutput(String text) { text = text.replaceAll("<", "&lt;"); return text; } /** * Create a service object for a given service. The method first looks for * the {@link Constants#SERVICE_OBJECT_SUPPLIER} service parameter and if * this parameter is present, it will use the specified class to create the * service object. If the parameter is not present, it will create an * instance of the class specified by the {@link Constants#SERVICE_CLASS} * parameter. * * @param service * the service * @return The service object or <code>null</code> if neither the * {@link Constants#SERVICE_OBJECT_SUPPLIER} nor the * {@link Constants#SERVICE_CLASS} parameter was found on the * service, i.e. if the service doesn't specify how to create a * service object. If the return value is non null, it will always * be a newly created instance. * @throws AxisFault * if an error occurred while attempting to instantiate the * service object */ public static Object createServiceObject(final AxisService service) throws AxisFault { try { ClassLoader classLoader = service.getClassLoader(); // allow alternative definition of makeNewServiceObject Parameter serviceObjectSupplierParam = service.getParameter(Constants.SERVICE_OBJECT_SUPPLIER); if (serviceObjectSupplierParam != null) { final Class<?> serviceObjectSupplierClass = Loader.loadClass(classLoader, ((String) serviceObjectSupplierParam.getValue()).trim()); if (ServiceObjectSupplier.class.isAssignableFrom(serviceObjectSupplierClass)) { ServiceObjectSupplier serviceObjectSupplier = org.apache.axis2.java.security.AccessController.doPrivileged( new PrivilegedExceptionAction<ServiceObjectSupplier>() { public ServiceObjectSupplier run() throws InstantiationException, IllegalAccessException { return (ServiceObjectSupplier)serviceObjectSupplierClass.newInstance(); } } ); return serviceObjectSupplier.getServiceObject(service); } else { // Prior to r439555 service object suppliers were actually defined by a static method // with a given signature defined on an arbitrary class. The ServiceObjectSupplier // interface was only introduced by r439555. We still support the old way, but // issue a warning inviting the user to provide a proper ServiceObjectSupplier // implementation. // Find static getServiceObject() method, call it if there final Method method = org.apache.axis2.java.security.AccessController.doPrivileged( new PrivilegedExceptionAction<Method>() { public Method run() throws NoSuchMethodException { return serviceObjectSupplierClass.getMethod("getServiceObject", AxisService.class); } } ); log.warn("The class specified by the " + Constants.SERVICE_OBJECT_SUPPLIER + " property on service " + service.getName() + " does not implement the " + ServiceObjectSupplier.class.getName() + " interface. This is deprecated."); return org.apache.axis2.java.security.AccessController.doPrivileged( new PrivilegedExceptionAction<Object>() { public Object run() throws InvocationTargetException, IllegalAccessException, InstantiationException { return method.invoke(serviceObjectSupplierClass.newInstance(), new Object[]{service}); } } ); } } else { Parameter serviceClassParam = service.getParameter(Constants.SERVICE_CLASS); if (serviceClassParam != null) { final Class<?> serviceClass = Loader.loadClass( classLoader, ((String) serviceClassParam.getValue()).trim()); String className = ((String) serviceClassParam.getValue()).trim(); Class serviceObjectMaker = Loader.loadClass(classLoader, className); if (serviceObjectMaker.getModifiers() != Modifier.PUBLIC) { throw new AxisFault("Service class " + className + " must have public as access Modifier"); } return org.apache.axis2.java.security.AccessController.doPrivileged( new PrivilegedExceptionAction<Object>() { public Object run() throws InstantiationException, IllegalAccessException { return serviceClass.newInstance(); } } ); } else { return null; } } } catch (Exception e) { throw AxisFault.makeFault(e); } } /** * Get the service class for a given service. This method will first check * the {@link Constants#SERVICE_CLASS} service parameter and if that * parameter is not present, inspect the instance returned by the service * object supplier specified by {@link Constants#SERVICE_OBJECT_SUPPLIER}. * * @param service * the service * @return The service class or <code>null</code> if neither the * {@link Constants#SERVICE_CLASS} nor the * {@link Constants#SERVICE_OBJECT_SUPPLIER} parameter was found on * the service, i.e. if the service doesn't specify a service class. * @throws AxisFault * if an error occurred while attempting to load the service * class or to instantiate the service object */ public static Class<?> getServiceClass(AxisService service) throws AxisFault { Parameter serviceClassParam = service.getParameter(Constants.SERVICE_CLASS); if (serviceClassParam != null) { try { return Loader.loadClass(service.getClassLoader(), ((String) serviceClassParam.getValue()).trim()); } catch (Exception e) { throw AxisFault.makeFault(e); } } else { Object serviceObject = createServiceObject(service); return serviceObject == null ? null : serviceObject.getClass(); } } /** * this is to make is backward compatible. Get rid of this at the next major release. * @param messageContext * @return */ public static boolean isClientThreadNonBlockingPropertySet(MessageContext messageContext){ Object val = messageContext.getProperty( MessageContext.CLIENT_API_NON_BLOCKING); if(val != null && ((Boolean)val).booleanValue()){ return true; }else{ //put the string inline as this is to be removed val = messageContext.getProperty("transportNonBlocking"); return val != null && ((Boolean)val).booleanValue(); } } }
package imagescience.feature; import imagescience.ImageScience; import imagescience.image.Aspects; import imagescience.image.Axes; import imagescience.image.Coordinates; import imagescience.image.Dimensions; import imagescience.image.FloatImage; import imagescience.image.Image; import imagescience.utility.Messenger; import imagescience.utility.Progressor; import imagescience.utility.Timer; import java.util.Vector; /** Computes Hessian eigenimages. */ public class Hessian { /** Default constructor. */ public Hessian() { } /** Computes Hessian eigenimages of images. @param image The input image for which Hessian eigenimages need to be computed. If it is of type {@link FloatImage}, it will be used to store intermediate results. Otherwise it will be left unaltered. If the size of the image in the z-dimension equals {@code 1}, this method will compute, for every image element, the two-dimensional (2D) Hessian and its two eigenvalues. Otherwise it will compute for every image element the full three-dimensional (3D) Hessian and its three eigenvalues. These computations are performed on every x-y(-z) subimage in a 5D image. @param scale The smoothing scale at which the required image derivatives are computed. The scale is equal to the standard deviation of the Gaussian kernel used for differentiation and must be larger than {@code 0}. In order to enforce physical isotropy, for each dimension, the scale is divided by the size of the image elements (aspect ratio) in that dimension. @param absolute Determines whether eigenvalues are compared in absolute sense. @return An array containing the eigenimages. The images are always of type {@link FloatImage}.<br> If only the two-dimensional (2D) Hessian and its two eigenvalues were computed for every image element, the returned array contains two eigenimages:<br> Element {@code 0} = the image with, for every element, the largest (absolute) eigenvalue,<br> Element {@code 1} = the image with, for every element, the smallest (absolute) eigenvalue.<br> If the full three-dimensional (3D) Hessian and its three eigenvalues were computed for every image element, the returned array contains three eigenimages:<br> Element {@code 0} = the image with, for every element, the largest (absolute) eigenvalue,<br> Element {@code 1} = the image with, for every element, the middle (absolute) eigenvalue,<br> Element {@code 2} = the image with, for every element, the smallest (absolute) eigenvalue. @throws IllegalArgumentException If {@code scale} is less than or equal to {@code 0}. @throws IllegalStateException If the size of the image elements (aspect ratio) is less than or equal to {@code 0} in the x-, y-, or z-dimension. @throws NullPointerException If {@code image} is {@code null}. */ public Vector<Image> run(final Image image, final double scale, final boolean absolute) { messenger.log(ImageScience.prelude()+"Hessian"); final Timer timer = new Timer(); timer.messenger.log(messenger.log()); timer.start(); // Initialize: messenger.log("Checking arguments"); if (scale <= 0) throw new IllegalArgumentException("Smoothing scale less than or equal to 0"); final Dimensions dims = image.dimensions(); messenger.log("Input image dimensions: (x,y,z,t,c) = ("+dims.x+","+dims.y+","+dims.z+","+dims.t+","+dims.c+")"); final Aspects asps = image.aspects(); messenger.log("Element aspect ratios: ("+asps.x+","+asps.y+","+asps.z+","+asps.t+","+asps.c+")"); if (asps.x <= 0) throw new IllegalStateException("Aspect ratio in x-dimension less than or equal to 0"); if (asps.y <= 0) throw new IllegalStateException("Aspect ratio in y-dimension less than or equal to 0"); if (asps.z <= 0) throw new IllegalStateException("Aspect ratio in z-dimension less than or equal to 0"); final Image smoothImage = (image instanceof FloatImage) ? image : new FloatImage(image); Vector<Image> eigenimages = null; final String name = image.name(); differentiator.messenger.log(messenger.log()); differentiator.progressor.parent(progressor); // Compute Hessian matrix and eigenimages: if (dims.z == 1) { // 2D case final double[] pls = {0, 0.32, 0.64, 0.96, 1}; int pl = 0; // Compute Hessian components: logus("Computing Hxx"); progressor.range(pls[pl],pls[++pl]); final Image Hxx = differentiator.run(smoothImage.duplicate(),scale,2,0,0); logus("Computing Hxy"); progressor.range(pls[pl],pls[++pl]); final Image Hxy = differentiator.run(smoothImage.duplicate(),scale,1,1,0); logus("Computing Hyy"); progressor.range(pls[pl],pls[++pl]); final Image Hyy = differentiator.run(smoothImage,scale,0,2,0); // Compute eigenimages (Hxx and Hyy are reused to save memory): logus("Computing eigenimages"); progressor.steps(dims.c*dims.t*dims.y); progressor.range(pls[pl],pls[++pl]); Hxx.axes(Axes.X); Hxy.axes(Axes.X); Hyy.axes(Axes.X); final double[] ahxx = new double[dims.x]; final double[] ahxy = new double[dims.x]; final double[] ahyy = new double[dims.x]; final Coordinates coords = new Coordinates(); progressor.start(); if (absolute) { messenger.log("Comparing and storing absolute eigenvalues"); for (coords.c=0; coords.c<dims.c; ++coords.c) for (coords.t=0; coords.t<dims.t; ++coords.t) for (coords.y=0; coords.y<dims.y; ++coords.y) { Hxx.get(coords,ahxx); Hxy.get(coords,ahxy); Hyy.get(coords,ahyy); for (int x=0; x<dims.x; ++x) { final double b = -(ahxx[x] + ahyy[x]); final double c = ahxx[x]*ahyy[x] - ahxy[x]*ahxy[x]; final double q = -0.5*(b + (b < 0 ? -1 : 1)*Math.sqrt(b*b - 4*c)); double absh1, absh2; if (q == 0) { absh1 = 0; absh2 = 0; } else { absh1 = Math.abs(q); absh2 = Math.abs(c/q); } if (absh1 > absh2) { ahxx[x] = absh1; ahyy[x] = absh2; } else { ahxx[x] = absh2; ahyy[x] = absh1; } } Hxx.set(coords,ahxx); Hyy.set(coords,ahyy); progressor.step(); } } else { messenger.log("Comparing and storing actual eigenvalues"); for (coords.c=0; coords.c<dims.c; ++coords.c) for (coords.t=0; coords.t<dims.t; ++coords.t) for (coords.y=0; coords.y<dims.y; ++coords.y) { Hxx.get(coords,ahxx); Hxy.get(coords,ahxy); Hyy.get(coords,ahyy); for (int x=0; x<dims.x; ++x) { final double b = -(ahxx[x] + ahyy[x]); final double c = ahxx[x]*ahyy[x] - ahxy[x]*ahxy[x]; final double q = -0.5*(b + (b < 0 ? -1 : 1)*Math.sqrt(b*b - 4*c)); double h1, h2; if (q == 0) { h1 = 0; h2 = 0; } else { h1 = q; h2 = c/q; } if (h1 > h2) { ahxx[x] = h1; ahyy[x] = h2; } else { ahxx[x] = h2; ahyy[x] = h1; } } Hxx.set(coords,ahxx); Hyy.set(coords,ahyy); progressor.step(); } } progressor.stop(); Hxx.name(name+" largest Hessian eigenvalues"); Hyy.name(name+" smallest Hessian eigenvalues"); Hxx.aspects(asps.duplicate()); Hyy.aspects(asps.duplicate()); eigenimages = new Vector<Image>(2); eigenimages.add(Hxx); eigenimages.add(Hyy); } else { // 3D case final double[] pls = {0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 1}; int pl = 0; // Compute Hessian components: logus("Computing Hxx"); progressor.range(pls[pl],pls[++pl]); final Image Hxx = differentiator.run(smoothImage.duplicate(),scale,2,0,0); logus("Computing Hxy"); progressor.range(pls[pl],pls[++pl]); final Image Hxy = differentiator.run(smoothImage.duplicate(),scale,1,1,0); logus("Computing Hxz"); progressor.range(pls[pl],pls[++pl]); final Image Hxz = differentiator.run(smoothImage.duplicate(),scale,1,0,1); logus("Computing Hyy"); progressor.range(pls[pl],pls[++pl]); final Image Hyy = differentiator.run(smoothImage.duplicate(),scale,0,2,0); logus("Computing Hyz"); progressor.range(pls[pl],pls[++pl]); final Image Hyz = differentiator.run(smoothImage.duplicate(),scale,0,1,1); logus("Computing Hzz"); progressor.range(pls[pl],pls[++pl]); final Image Hzz = differentiator.run(smoothImage,scale,0,0,2); // Compute eigenimages (Hxx, Hyy, Hzz are reused to save memory): logus("Computing eigenimages"); progressor.steps(dims.c*dims.t*dims.z*dims.y); progressor.range(pls[pl],pls[++pl]); Hxx.axes(Axes.X); Hxy.axes(Axes.X); Hxz.axes(Axes.X); Hyy.axes(Axes.X); Hyz.axes(Axes.X); Hzz.axes(Axes.X); final double[] ahxx = new double[dims.x]; final double[] ahxy = new double[dims.x]; final double[] ahxz = new double[dims.x]; final double[] ahyy = new double[dims.x]; final double[] ahyz = new double[dims.x]; final double[] ahzz = new double[dims.x]; final Coordinates coords = new Coordinates(); progressor.start(); if (absolute) { messenger.log("Comparing and storing absolute eigenvalues"); for (coords.c=0; coords.c<dims.c; ++coords.c) for (coords.t=0; coords.t<dims.t; ++coords.t) for (coords.z=0; coords.z<dims.z; ++coords.z) for (coords.y=0; coords.y<dims.y; ++coords.y) { Hxx.get(coords,ahxx); Hxy.get(coords,ahxy); Hxz.get(coords,ahxz); Hyy.get(coords,ahyy); Hyz.get(coords,ahyz); Hzz.get(coords,ahzz); for (int x=0; x<dims.x; ++x) { final double fhxx = ahxx[x]; final double fhxy = ahxy[x]; final double fhxz = ahxz[x]; final double fhyy = ahyy[x]; final double fhyz = ahyz[x]; final double fhzz = ahzz[x]; final double a = -(fhxx + fhyy + fhzz); final double b = fhxx*fhyy + fhxx*fhzz + fhyy*fhzz - fhxy*fhxy - fhxz*fhxz - fhyz*fhyz; final double c = fhxx*(fhyz*fhyz - fhyy*fhzz) + fhyy*fhxz*fhxz + fhzz*fhxy*fhxy - 2*fhxy*fhxz*fhyz; final double q = (a*a - 3*b)/9; final double r = (a*a*a - 4.5*a*b + 13.5*c)/27; final double sqrtq = (q > 0) ? Math.sqrt(q) : 0; final double sqrtq3 = sqrtq*sqrtq*sqrtq; double absh1, absh2, absh3; if (sqrtq3 == 0) { absh1 = 0; absh2 = 0; absh3 = 0; } else { final double rsqq3 = r/sqrtq3; final double angle = (rsqq3*rsqq3 <= 1) ? Math.acos(rsqq3) : Math.acos(rsqq3 < 0 ? -1 : 1); absh1 = Math.abs(-2*sqrtq*Math.cos(angle/3) - a/3); absh2 = Math.abs(-2*sqrtq*Math.cos((angle + TWOPI)/3) - a/3); absh3 = Math.abs(-2*sqrtq*Math.cos((angle - TWOPI)/3) - a/3); } if (absh2 < absh3) { final double tmp = absh2; absh2 = absh3; absh3 = tmp; } if (absh1 < absh2) { final double tmp1 = absh1; absh1 = absh2; absh2 = tmp1; if (absh2 < absh3) { final double tmp2 = absh2; absh2 = absh3; absh3 = tmp2; }} ahxx[x] = absh1; ahyy[x] = absh2; ahzz[x] = absh3; } Hxx.set(coords,ahxx); Hyy.set(coords,ahyy); Hzz.set(coords,ahzz); progressor.step(); } } else { messenger.log("Comparing and storing actual eigenvalues"); for (coords.c=0; coords.c<dims.c; ++coords.c) for (coords.t=0; coords.t<dims.t; ++coords.t) for (coords.z=0; coords.z<dims.z; ++coords.z) for (coords.y=0; coords.y<dims.y; ++coords.y) { Hxx.get(coords,ahxx); Hxy.get(coords,ahxy); Hxz.get(coords,ahxz); Hyy.get(coords,ahyy); Hyz.get(coords,ahyz); Hzz.get(coords,ahzz); for (int x=0; x<dims.x; ++x) { final double fhxx = ahxx[x]; final double fhxy = ahxy[x]; final double fhxz = ahxz[x]; final double fhyy = ahyy[x]; final double fhyz = ahyz[x]; final double fhzz = ahzz[x]; final double a = -(fhxx + fhyy + fhzz); final double b = fhxx*fhyy + fhxx*fhzz + fhyy*fhzz - fhxy*fhxy - fhxz*fhxz - fhyz*fhyz; final double c = fhxx*(fhyz*fhyz - fhyy*fhzz) + fhyy*fhxz*fhxz + fhzz*fhxy*fhxy - 2*fhxy*fhxz*fhyz; final double q = (a*a - 3*b)/9; final double r = (a*a*a - 4.5*a*b + 13.5*c)/27; final double sqrtq = (q > 0) ? Math.sqrt(q) : 0; final double sqrtq3 = sqrtq*sqrtq*sqrtq; double h1, h2, h3; if (sqrtq3 == 0) { h1 = 0; h2 = 0; h3 = 0; } else { final double rsqq3 = r/sqrtq3; final double angle = (rsqq3*rsqq3 <= 1) ? Math.acos(rsqq3) : Math.acos(rsqq3 < 0 ? -1 : 1); h1 = -2*sqrtq*Math.cos(angle/3) - a/3; h2 = -2*sqrtq*Math.cos((angle + TWOPI)/3) - a/3; h3 = -2*sqrtq*Math.cos((angle - TWOPI)/3) - a/3; } if (h2 < h3) { final double tmp = h2; h2 = h3; h3 = tmp; } if (h1 < h2) { final double tmp1 = h1; h1 = h2; h2 = tmp1; if (h2 < h3) { final double tmp2 = h2; h2 = h3; h3 = tmp2; }} ahxx[x] = h1; ahyy[x] = h2; ahzz[x] = h3; } Hxx.set(coords,ahxx); Hyy.set(coords,ahyy); Hzz.set(coords,ahzz); progressor.step(); } } progressor.stop(); Hxx.name(name+" largest Hessian eigenvalues"); Hyy.name(name+" middle Hessian eigenvalues"); Hzz.name(name+" smallest Hessian eigenvalues"); Hxx.aspects(asps.duplicate()); Hyy.aspects(asps.duplicate()); Hzz.aspects(asps.duplicate()); eigenimages = new Vector<Image>(3); eigenimages.add(Hxx); eigenimages.add(Hyy); eigenimages.add(Hzz); } timer.stop(); return eigenimages; } private void logus(final String s) { messenger.log(s); progressor.status(s+"..."); } /** The object used for message displaying. */ public final Messenger messenger = new Messenger(); /** The object used for progress displaying. */ public final Progressor progressor = new Progressor(); /** The object used for image differentiation. */ public final Differentiator differentiator = new Differentiator(); private static final double TWOPI = 2*Math.PI; }
package org.motechproject.event.listener.impl; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.invocation.InvocationOnMock; import org.mockito.runners.MockitoJUnitRunner; import org.mockito.stubbing.Answer; import org.motechproject.event.MotechEvent; import org.motechproject.event.domain.BuggyListener; import org.motechproject.event.exception.CallbackServiceNotFoundException; import org.motechproject.event.listener.EventCallbackService; import org.motechproject.event.listener.EventListener; import org.motechproject.event.messaging.MotechEventConfig; import org.motechproject.event.messaging.OutboundEventGateway; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceReference; import org.osgi.service.event.Event; import org.osgi.service.event.EventAdmin; import java.util.Arrays; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.Map; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.any; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith(MockitoJUnitRunner.class) public class ServerEventRelayTest { public static final String LISTENER_IDENTIFIER = "test-identifier"; public static final String SECONDARY_LISTENER_IDENTIFIER = "secondary-test-identifier"; public static final String SUBJECT = "org.motechproject.server.someevent"; private static final String TEST_SERVICE_CALLBACK = "TestServiceCallback"; @Mock private OutboundEventGateway outboundEventGateway; @Mock private MotechEventConfig motechEventConfig; @Mock private EventListener eventListener; @Mock private EventListener secondaryEventListener; @Mock private EventAdmin eventAdmin; @Mock private EventListenerRegistry registry; @Mock private BundleContext bundleContext; @Mock private ServiceReference<EventCallbackService> serviceReference; @Mock private EventCallbackService callbackService; private ServerEventRelay eventRelay; @Before public void setUp() throws Exception { eventRelay = new ServerEventRelay(outboundEventGateway, registry, motechEventConfig, eventAdmin, bundleContext); when(eventListener.getIdentifier()).thenReturn(LISTENER_IDENTIFIER); when(secondaryEventListener.getIdentifier()).thenReturn(SECONDARY_LISTENER_IDENTIFIER); } @Test public void testRelayToSingleListenerWithMessageDestination() throws Exception { MotechEvent motechEvent = createEvent(LISTENER_IDENTIFIER); setUpListeners(SUBJECT, eventListener); eventRelay.relayQueueEvent(motechEvent); verify(eventListener).handle(motechEvent); } @Test public void shouldNotifyCallbackServiceOnSuccessfulEventHandling() throws Exception { MotechEvent motechEvent = createEvent(LISTENER_IDENTIFIER); motechEvent.setCallbackName(TEST_SERVICE_CALLBACK); setUpListeners(SUBJECT, eventListener); when(bundleContext.getServiceReferences(EventCallbackService.class, null)).thenReturn(Arrays.asList(serviceReference)); when(bundleContext.getService(serviceReference)).thenReturn(callbackService); when(callbackService.getName()).thenReturn(TEST_SERVICE_CALLBACK); eventRelay.relayQueueEvent(motechEvent); verify(eventListener).handle(motechEvent); verify(callbackService).successCallback(motechEvent); } @Test(expected = CallbackServiceNotFoundException.class) public void shouldThrowExceptionWhenCallbackServiceOfTheGivenNameIsNotFound() throws Exception { MotechEvent motechEvent = createEvent(LISTENER_IDENTIFIER); motechEvent.setCallbackName(TEST_SERVICE_CALLBACK); setUpListeners(SUBJECT, eventListener); eventRelay.relayQueueEvent(motechEvent); verify(eventListener).handle(motechEvent); verify(callbackService).successCallback(motechEvent); } @Test public void shouldNotifyCallbackServiceOnFailedEventHandling() throws Exception { MotechEvent motechEvent = createEvent(LISTENER_IDENTIFIER); motechEvent.setCallbackName(TEST_SERVICE_CALLBACK); setUpListeners(SUBJECT, eventListener); when(bundleContext.getServiceReferences(EventCallbackService.class, null)).thenReturn(Arrays.asList(serviceReference)); when(bundleContext.getService(serviceReference)).thenReturn(callbackService); when(callbackService.getName()).thenReturn(TEST_SERVICE_CALLBACK); RuntimeException initCause = new RuntimeException(); doThrow(new RuntimeException("Failed", initCause)).when(eventListener).handle(any(MotechEvent.class)); eventRelay.relayQueueEvent(motechEvent); verify(eventListener).handle(motechEvent); verify(callbackService).failureCallback(motechEvent, initCause); } @Test public void testRelayToSingleListenerWithoutMessageDestination() throws Exception { MotechEvent motechEvent = createEvent(); setUpListeners(SUBJECT, eventListener); eventRelay.relayQueueEvent(motechEvent); verify(eventListener, never()).handle(motechEvent); } @Test public void testSplitEvents() throws Exception { MotechEvent motechEvent = createEvent(); setUpListeners(SUBJECT, eventListener, secondaryEventListener); eventRelay.sendEventMessage(motechEvent); ArgumentCaptor<MotechEvent> argumentCaptor = ArgumentCaptor.forClass(MotechEvent.class); verify(outboundEventGateway, times(2)).sendEventMessage(argumentCaptor.capture()); MotechEvent capturedEvent; capturedEvent = argumentCaptor.getAllValues().get(0); assertEquals(capturedEvent.getMessageDestination(), LISTENER_IDENTIFIER); capturedEvent = argumentCaptor.getAllValues().get(1); assertEquals(capturedEvent.getMessageDestination(), SECONDARY_LISTENER_IDENTIFIER); } @Test(expected = IllegalArgumentException.class) public void testRelayNullQueueEvent() throws Exception { eventRelay.relayQueueEvent(null); } @Test(expected = IllegalArgumentException.class) public void testRelayNullTopicEvent() throws Exception { eventRelay.relayTopicEvent(null); } @Test public void shouldPreserveEventDestinationIfListenerFails() { when(motechEventConfig.getMessageMaxRedeliveryCount()).thenReturn(2); BuggyListener buggyListener = new BuggyListener(1); setUpListeners(SUBJECT, buggyListener); MotechEvent event = createEvent(buggyListener.getIdentifier()); eventRelay.relayQueueEvent(event); assertThat(event.getMessageDestination().toString(), is(buggyListener.getIdentifier())); } @Test public void testThatOnlyListenerIdentifiedByMessageDestinationHandlesEvent() throws Exception { setUpListeners(SUBJECT, eventListener, secondaryEventListener); MotechEvent motechEvent = createEvent(LISTENER_IDENTIFIER); eventRelay.relayQueueEvent(motechEvent); verify(eventListener).handle(motechEvent); verify(secondaryEventListener, never()).handle(any(MotechEvent.class)); } @Test public void shouldRetryEventHandlingWhenRelyingTopicEvent() { final BooleanValue handled = new BooleanValue(false); when(motechEventConfig.getMessageMaxRedeliveryCount()).thenReturn(2); when(eventListener.getIdentifier()).thenReturn("retrying"); doThrow(new RuntimeException()) .doThrow(new RuntimeException()) .doAnswer(new Answer<Void>() { @Override public Void answer(InvocationOnMock invocationOnMock) throws Throwable { handled.setValue(true); return null; } }) .when(eventListener).handle(any(MotechEvent.class)); setUpListeners(SUBJECT, eventListener); eventRelay.relayTopicEvent(new MotechEvent(SUBJECT)); verify(eventListener, times(3)).handle(any(MotechEvent.class)); assertTrue(handled.getValue()); verify(eventAdmin, never()).postEvent(any(Event.class)); verify(eventAdmin, never()).sendEvent(any(Event.class)); } @Test public void shouldStopRetryingEventHandlingAfterMaxRedeliveryCountIsHitWhenRelyingTopicEvent() { final BooleanValue handled = new BooleanValue(false); when(motechEventConfig.getMessageMaxRedeliveryCount()).thenReturn(2); when(eventListener.getIdentifier()).thenReturn("retrying"); doThrow(new RuntimeException()) .doThrow(new RuntimeException()) .doThrow(new RuntimeException()) .doAnswer(new Answer<Void>() { @Override public Void answer(InvocationOnMock invocationOnMock) throws Throwable { handled.setValue(true); return null; } }) .when(eventListener).handle(any(MotechEvent.class)); setUpListeners(SUBJECT, eventListener); eventRelay.relayTopicEvent(new MotechEvent(SUBJECT)); verify(eventListener, times(3)).handle(any(MotechEvent.class)); assertFalse(handled.getValue()); } @Test public void shouldProxyBroadcastEventsInOSGi() { Map<String, Object> params = new HashMap<>(); params.put("proxy-in-osgi", true); MotechEvent event = new MotechEvent("subject", params); eventRelay.relayTopicEvent(event); ArgumentCaptor<Event> captor = ArgumentCaptor.forClass(Event.class); verify(eventAdmin).postEvent(captor.capture()); assertEquals("subject", captor.getValue().getTopic()); } private MotechEvent createEvent(String messageDestination) { MotechEvent event = createEvent(); event.setMessageDestination(messageDestination); return event; } private MotechEvent createEvent() { Map<String, Object> parameters = new HashMap<>(); parameters.put("test", "value"); return new MotechEvent(SUBJECT, parameters); } private void setUpListeners(String subject, EventListener... listeners) { when(registry.getListeners(eq(subject))).thenReturn(new LinkedHashSet<>(Arrays.asList(listeners))); } private class BooleanValue { private Boolean value; public BooleanValue(Boolean value) { this.value = value; } public Boolean getValue() { return value; } public void setValue(Boolean value) { this.value = value; } } }
package org.ethereum.net.eth; import org.ethereum.core.Block; import org.ethereum.core.Genesis; import org.ethereum.core.Transaction; import org.ethereum.facade.Blockchain; import org.ethereum.manager.WorldManager; import org.ethereum.net.BlockQueue; import org.ethereum.net.MessageQueue; import org.ethereum.net.message.ReasonCode; import org.ethereum.net.p2p.DisconnectMessage; import org.ethereum.util.ByteUtil; import org.ethereum.util.FastByteComparisons; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.SimpleChannelInboundHandler; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; import java.math.BigInteger; import java.util.Arrays; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.Timer; import java.util.TimerTask; import java.util.Vector; import static org.ethereum.config.SystemProperties.CONFIG; import static org.ethereum.net.message.StaticMessages.GET_TRANSACTIONS_MESSAGE; /** * Process the messages between peers with 'eth' capability on the network. * <p> * Peers with 'eth' capability can send/receive: * <ul> * <li>STATUS : Announce their status to the peer</li> * <li>GET_TRANSACTIONS : Request a list of pending transactions</li> * <li>TRANSACTIONS : Send a list of pending transactions</li> * <li>GET_BLOCK_HASHES : Request a list of known block hashes</li> * <li>BLOCK_HASHES : Send a list of known block hashes</li> * <li>GET_BLOCKS : Request a list of blocks</li> * <li>BLOCKS : Send a list of blocks</li> * </ul> */ @Component @Scope("prototype") public class EthHandler extends SimpleChannelInboundHandler<EthMessage> { public final static byte VERSION = 53; public final static byte NETWORK_ID = 0x0; private final static Logger logger = LoggerFactory.getLogger("net"); private String peerId; private static String hashRetrievalLock; private MessageQueue msgQueue = null; private SyncStatus syncStatus = SyncStatus.INIT; private boolean active = false; private StatusMessage handshakeStatusMessage = null; private BigInteger totalDifficulty = Genesis.getInstance().getCumulativeDifficulty(); private boolean peerDiscoveryMode = false; private Timer getBlocksTimer = new Timer("GetBlocksTimer"); private Timer getTxTimer = new Timer("GetTransactionsTimer"); @Autowired private Blockchain blockchain; @Autowired private WorldManager worldManager; private List<byte[]> sentHashes; private Block lastBlock = Genesis.getInstance(); public EthHandler() { this.peerDiscoveryMode = false; } public EthHandler(MessageQueue msgQueue, boolean peerDiscoveryMode) { this.peerDiscoveryMode = peerDiscoveryMode; this.msgQueue = msgQueue; } public void activate() { logger.info("ETH protocol activated"); worldManager.getListener().trace("ETH protocol activated"); active = true; sendStatus(); } public void setBlockchain(Blockchain blockchain) { this.blockchain = blockchain; } public boolean isActive() { return active; } @Override public void channelRead0(final ChannelHandlerContext ctx, EthMessage msg) throws InterruptedException { if (!isActive()) return; if (EthMessageCodes.inRange(msg.getCommand().asByte())) logger.info("EthHandler invoke: [{}]", msg.getCommand()); worldManager.getListener().trace(String.format("EthHandler invoke: [%s]", msg.getCommand())); switch (msg.getCommand()) { case STATUS: msgQueue.receivedMessage(msg); processStatus((StatusMessage) msg, ctx); break; case GET_TRANSACTIONS: // todo: eventually get_transaction is going deprecated // msgQueue.receivedMessage(msg); // sendPendingTransactions(); break; case TRANSACTIONS: msgQueue.receivedMessage(msg); processTransactions((TransactionsMessage) msg); // List<Transaction> txList = transactionsMessage.getTransactions(); // for(Transaction tx : txList) // WorldManager.getInstance().getBlockchain().applyTransaction(null, // tx); // WorldManager.getInstance().getWallet().addTransaction(tx); break; case GET_BLOCK_HASHES: msgQueue.receivedMessage(msg); processGetBlockHashes((GetBlockHashesMessage) msg); break; case BLOCK_HASHES: msgQueue.receivedMessage(msg); processBlockHashes((BlockHashesMessage) msg); break; case GET_BLOCKS: msgQueue.receivedMessage(msg); processGetBlocks((GetBlocksMessage) msg); break; case BLOCKS: msgQueue.receivedMessage(msg); processBlocks((BlocksMessage) msg); break; case NEW_BLOCK: msgQueue.receivedMessage(msg); processNewBlock((NewBlockMessage) msg); case PACKET_COUNT: break; default: break; } } private void processTransactions(TransactionsMessage msg) { Set<Transaction> txSet = msg.getTransactions(); worldManager.getBlockchain(). addPendingTransactions(txSet); for (Transaction tx : txSet) { worldManager.getWallet().addTransaction(tx); } } @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { logger.error(cause.getCause().toString()); super.exceptionCaught(ctx, cause); ctx.close(); } @Override public void handlerRemoved(ChannelHandlerContext ctx) throws Exception { logger.debug("handlerRemoved: kill timers in EthHandler"); active = false; this.killTimers(); } /** * Processing: * <ul> * <li>checking if peer is using the same genesis, protocol and network</li> * <li>seeing if total difficulty is higher than total difficulty from all other peers</li> * <li>send GET_BLOCK_HASHES to this peer based on bestHash</li> * </ul> * * @param msg is the StatusMessage * @param ctx the ChannelHandlerContext */ public void processStatus(StatusMessage msg, ChannelHandlerContext ctx) throws InterruptedException { this.handshakeStatusMessage = msg; if (peerDiscoveryMode) { msgQueue.sendMessage(new DisconnectMessage(ReasonCode.REQUESTED)); killTimers(); ctx.close().sync(); ctx.disconnect().sync(); return; } if (!Arrays.equals(msg.getGenesisHash(), Blockchain.GENESIS_HASH) || msg.getProtocolVersion() != VERSION) { logger.info("Removing EthHandler for {} due to protocol incompatibility", ctx.channel().remoteAddress()); // msgQueue.sendMessage(new DisconnectMessage(ReasonCode.INCOMPATIBLE_NETWORK)); ctx.pipeline().remove(this); // Peer is not compatible for the 'eth' sub-protocol } else if (msg.getNetworkId() != NETWORK_ID) msgQueue.sendMessage(new DisconnectMessage(ReasonCode.INCOMPATIBLE_NETWORK)); else { BlockQueue chainQueue = blockchain.getQueue(); BigInteger peerTotalDifficulty = new BigInteger(1, msg.getTotalDifficulty()); BigInteger highestKnownTotalDifficulty = blockchain.getTotalDifficulty(); boolean synced = FastByteComparisons.compareTo(msg.getBestHash(), 0, 32, blockchain.getBestBlockHash(), 0, 32) == 0; if (!synced && (highestKnownTotalDifficulty == null || peerTotalDifficulty.compareTo(highestKnownTotalDifficulty) > 0)) { logger.info(" Their chain is better: total difficulty : {} vs {}", peerTotalDifficulty.toString(), highestKnownTotalDifficulty == null ? "0" : highestKnownTotalDifficulty.toString()); hashRetrievalLock = this.peerId; chainQueue.setHighestTotalDifficulty(peerTotalDifficulty); chainQueue.setBestHash(msg.getBestHash()); syncStatus = SyncStatus.HASH_RETRIEVING; sendGetBlockHashes(); } else { logger.info("The peer sync process fully complete"); syncStatus = SyncStatus.SYNC_DONE; } } } private void processBlockHashes(BlockHashesMessage blockHashesMessage) { List<byte[]> receivedHashes = blockHashesMessage.getBlockHashes(); BlockQueue chainQueue = blockchain.getQueue(); // result is empty, peer has no more hashes // or peer doesn't have the best hash anymore if (receivedHashes.isEmpty() || !this.peerId.equals(hashRetrievalLock)) { sendGetBlocks(); // start getting blocks from hash queue return; } Iterator<byte[]> hashIterator = receivedHashes.iterator(); byte[] foundHash, latestHash = blockchain.getBestBlockHash(); while (hashIterator.hasNext()) { foundHash = hashIterator.next(); if (FastByteComparisons.compareTo(foundHash, 0, 32, latestHash, 0, 32) != 0) { chainQueue.addHash(foundHash); // store unknown hashes in queue until known hash is found } else { logger.trace("Catch up with the hashes until: {[]}", foundHash); // if known hash is found, ignore the rest sendGetBlocks(); // start getting blocks from hash queue return; } } // no known hash has been reached chainQueue.logHashQueueSize(); sendGetBlockHashes(); // another getBlockHashes with last received hash. } private void processBlocks(BlocksMessage blocksMessage) { List<Block> blockList = blocksMessage.getBlocks(); if (!blockList.isEmpty()) { Block block = blockList.get(blockList.size() - 1); if (block.getNumber() > lastBlock.getNumber()) lastBlock = blockList.get(blockList.size() - 1); } // check if you got less blocks than you asked if (blockList.size() < sentHashes.size()) { for (int i = 0; i < blockList.size(); ++i) sentHashes.remove(0); logger.info("Got less blocks: [{}], return [{}] hashes to the queue", blockList.size(), sentHashes.size()); blockchain.getQueue().returnHashes(sentHashes); } if (blockchain.getQueue().isHashesEmpty()) { logger.info(" The peer sync process fully complete"); syncStatus = SyncStatus.SYNC_DONE; blockchain.getQueue().addBlocks(blockList); blockchain.getQueue().logHashQueueSize(); } else { if (blockList.isEmpty()) return; blockchain.getQueue().addBlocks(blockList); blockchain.getQueue().logHashQueueSize(); sendGetBlocks(); } for (Block block : blockList) { totalDifficulty.add(block.getCumulativeDifficulty()); } } /** * Processing NEW_BLOCK announce message * * @param newBlockMessage - new block message */ public void processNewBlock(NewBlockMessage newBlockMessage) { Block newBlock = newBlockMessage.getBlock(); if (newBlock.getNumber() > this.lastBlock.getNumber()) this.lastBlock = newBlock; // If the hashes still being downloaded ignore the NEW_BLOCKs // that block hash will be retrieved by the others and letter the block itself if (syncStatus == SyncStatus.INIT || syncStatus == SyncStatus.HASH_RETRIEVING) { logger.debug("Sync status INIT or HASH_RETREIVING adding to hashes new block.index: [{}]", newBlock.getNumber()); blockchain.getQueue().addNewBlockHash(newBlock.getHash()); return; } // If the GET_BLOCKs stage started add hash to the end of the hash list // then the block will be retrieved in it's turn; if (syncStatus == SyncStatus.BLOCK_RETRIEVING) { logger.debug("Sync status BLOCK_RETREIVING add to the end of hash list: block.index: [{}]", newBlock.getNumber()); blockchain.getQueue().addNewBlockHash(newBlock.getHash()); return; } // here is post sync process logger.info("New block received: block.index [{}]", newBlock.getNumber()); // adding block to the queue // there will be decided how to // connect it to the chain blockchain.getQueue().addBlock(newBlockMessage.getBlock()); blockchain.getQueue().logHashQueueSize(); totalDifficulty = new BigInteger(1, newBlockMessage.getDifficulty()); } private void sendStatus() { byte protocolVersion = EthHandler.VERSION, networkId = EthHandler.NETWORK_ID; BigInteger totalDifficulty = blockchain.getTotalDifficulty(); byte[] bestHash = blockchain.getBestBlockHash(); StatusMessage msg = new StatusMessage(protocolVersion, networkId, ByteUtil.bigIntegerToBytes(totalDifficulty), bestHash, Blockchain.GENESIS_HASH); msgQueue.sendMessage(msg); } /* * The wire gets data for signed transactions and * sends it to the net. */ public void sendTransaction(Transaction transaction) { Set<Transaction> txs = new HashSet<>(Arrays.asList(transaction)); TransactionsMessage msg = new TransactionsMessage(txs); msgQueue.sendMessage(msg); } public void sendNewBlock(Block block) { NewBlockMessage msg = new NewBlockMessage(block, block.getDifficulty()); msgQueue.sendMessage(msg); } private void sendGetTransactions() { msgQueue.sendMessage(GET_TRANSACTIONS_MESSAGE); } private void sendGetBlockHashes() { byte[] bestHash = blockchain.getQueue().getBestHash(); GetBlockHashesMessage msg = new GetBlockHashesMessage(bestHash, CONFIG.maxHashesAsk()); msgQueue.sendMessage(msg); } // Parallel download blocks based on hashQueue private void sendGetBlocks() { BlockQueue queue = blockchain.getQueue(); if (queue.size() > CONFIG.maxBlocksQueued()) { logger.info("postpone asking for blocks: queue: {}", queue.size()); getBlocksTimer.schedule(new TimerTask() { @Override public void run() { sendGetBlocks(); } }, 100); return; } // retrieve list of block hashes from queue // save them locally in case the remote peer // will return less blocks than requested. List<byte[]> hashes = queue.getHashes(); this.sentHashes = hashes; if (hashes.isEmpty()) { return; } GetBlocksMessage msg = new GetBlocksMessage(hashes); msgQueue.sendMessage(msg); } private void sendPendingTransactions() { Set<Transaction> pendingTxs = worldManager.getBlockchain() .getPendingTransactions(); TransactionsMessage msg = new TransactionsMessage(pendingTxs); msgQueue.sendMessage(msg); } private void processGetBlockHashes(GetBlockHashesMessage msg) { List<byte[]> hashes = blockchain.getListOfHashesStartFrom(msg.getBestHash(), msg.getMaxBlocks()); BlockHashesMessage msgHashes = new BlockHashesMessage(hashes); msgQueue.sendMessage(msgHashes); } private void processGetBlocks(GetBlocksMessage msg) { List<byte[]> hashes = msg.getBlockHashes(); Vector<Block> blocks = new Vector<>(); for (byte[] hash : hashes) { Block block = blockchain.getBlockByHash(hash); blocks.add(block); } BlocksMessage bm = new BlocksMessage(blocks); msgQueue.sendMessage(bm); } private void startTxTimer() { getTxTimer.scheduleAtFixedRate(new TimerTask() { public void run() { sendGetTransactions(); } }, 2000, 10000); } // public void startGetBlockTimer() { // syncStatus = SyncSatus.BLOCK_RETRIEVING; // getBlocksTimer = new Timer("GetBlocksTimer"); // getBlocksTimer.scheduleAtFixedRate(new TimerTask() { // public void run() { // BlockQueue blockQueue = blockchain.getQueue(); // if (blockQueue.size() > CONFIG.maxBlocksQueued()) { // logger.trace("Blocks queue too big temporary postpone blocks request"); // return; // } // sendGetBlocks(); // } // }, 300, 10); // } private void stopGetBlocksTimer() { getBlocksTimer.cancel(); getBlocksTimer.purge(); } private void stopGetTxTimer() { getTxTimer.cancel(); getTxTimer.purge(); } public void killTimers() { stopGetBlocksTimer(); stopGetTxTimer(); } public void setSyncStatus(SyncStatus syncStatus) { this.syncStatus = syncStatus; } public SyncStatus getSyncStatus() { return syncStatus; } public void setPeerId(String peerId) { this.peerId = peerId; } public enum SyncStatus { INIT, HASH_RETRIEVING, BLOCK_RETRIEVING, SYNC_DONE; } public void setBestHash(byte[] hash) { blockchain.getQueue().addHash(hash); } public void doSync() { logger.info("Sync force activated, block: {}", lastBlock); syncStatus = SyncStatus.HASH_RETRIEVING; setBestHash(lastBlock.getHash()); sendGetBlockHashes(); } public StatusMessage getHandshakeStatusMessage() { return handshakeStatusMessage; } public void setMsgQueue(MessageQueue msgQueue) { this.msgQueue = msgQueue; } public void setPeerDiscoveryMode(boolean peerDiscoveryMode) { this.peerDiscoveryMode = peerDiscoveryMode; } public BigInteger getTotalDifficulty() { return totalDifficulty; } }
/* * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kra.institutionalproposal.service.impl; import org.apache.commons.lang3.StringUtils; import org.kuali.coeus.common.framework.version.VersionStatus; import org.kuali.kra.infrastructure.Constants; import org.kuali.kra.institutionalproposal.contacts.InstitutionalProposalPerson; import org.kuali.kra.institutionalproposal.document.InstitutionalProposalDocument; import org.kuali.kra.institutionalproposal.document.authorization.InstitutionalProposalDocumentAuthorizer; import org.kuali.kra.institutionalproposal.home.InstitutionalProposal; import org.kuali.kra.institutionalproposal.proposaladmindetails.ProposalAdminDetails; import org.kuali.kra.institutionalproposal.service.InstitutionalProposalService; import org.kuali.kra.lookup.KraLookupableHelperServiceImpl; import org.kuali.coeus.propdev.impl.core.DevelopmentProposal; import org.kuali.rice.kew.api.KewApiConstants; import org.kuali.rice.kew.api.exception.WorkflowException; import org.kuali.rice.kim.api.identity.Person; import org.kuali.rice.kns.lookup.HtmlData; import org.kuali.rice.kns.lookup.HtmlData.AnchorHtmlData; import org.kuali.rice.krad.bo.BusinessObject; import org.kuali.rice.krad.document.Document; import org.kuali.rice.krad.service.DocumentService; import org.kuali.rice.krad.util.GlobalVariables; import org.kuali.rice.krad.util.KRADConstants; import org.kuali.rice.krad.util.UrlFactory; import java.util.*; /** * This class is used to control behavior of Institutional Proposal lookups. Depending * on where the lookup is coming from, we may need to add custom action links and/or * filter the lookup results. */ public class InstitutionalProposalLookupableHelperServiceImpl extends KraLookupableHelperServiceImpl { private static final long serialVersionUID = 1L; private static final String MERGE_PROPOSAL_LOG_ACTION = "mergeProposalLog.do"; private static final String AWARD_HOME_ACTION = "awardHome.do"; private static final String OPEN = "open"; private boolean includeMainSearchCustomActionUrls; private boolean includeMergeCustomActionUrls; private DocumentService documentService; private InstitutionalProposalService institutionalProposalService; public void setDocumentService(DocumentService documentService) { this.documentService = documentService; } /* * Overriding this to only return the currently Active version of a proposal */ @Override @SuppressWarnings("unchecked") public List<? extends BusinessObject> getSearchResults(Map<String, String> fieldValues) { super.setBackLocationDocFormKey(fieldValues); configureCustomActions(fieldValues); fieldValues.remove(InstitutionalProposal.PROPOSAL_SEQUENCE_STATUS_PROPERTY_STRING); fieldValues.put(InstitutionalProposal.PROPOSAL_SEQUENCE_STATUS_PROPERTY_STRING, VersionStatus.ACTIVE.toString()); Map<String, String> formProps = new HashMap<String, String>(); if (!StringUtils.isEmpty(fieldValues.get("lookupUnit.unitName"))) { formProps.put("units.unit.unitName", fieldValues.get("lookupUnit.unitName")); } if (!StringUtils.isEmpty(fieldValues.get("lookupUnit.unitNumber"))) { formProps.put("units.unitNumber", fieldValues.get("lookupUnit.unitNumber")); } fieldValues.remove("lookupUnit.unitNumber"); fieldValues.remove("lookupUnit.unitName"); if (!formProps.isEmpty()) { List<Long> ids = new ArrayList<Long>(); Collection<InstitutionalProposalPerson> persons = getLookupService().findCollectionBySearch(InstitutionalProposalPerson.class, formProps); if (persons.isEmpty()) { return new ArrayList<InstitutionalProposal>(); } for (InstitutionalProposalPerson person : persons) { ids.add(person.getInstitutionalProposalContactId()); } fieldValues.put("projectPersons.institutionalProposalContactId", StringUtils.join(ids, '|')); } List<InstitutionalProposal> searchResults = (List<InstitutionalProposal>) super.getSearchResults(fieldValues); if (lookupIsFromAward(fieldValues)) { filterAlreadyLinkedProposals(searchResults, fieldValues); filterApprovedPendingSubmitProposals(searchResults); filterInvalidProposalStatus(searchResults); } List<InstitutionalProposal> filteredResults = filterForPermissions(searchResults); return filteredResults; } /** * This method filters results so that the person doing the lookup only gets back the documents he can view. * @param results * @return */ protected List<InstitutionalProposal> filterForPermissions(List<InstitutionalProposal> results) { Person user = GlobalVariables.getUserSession().getPerson(); InstitutionalProposalDocumentAuthorizer authorizer = new InstitutionalProposalDocumentAuthorizer(); List<InstitutionalProposal> filteredResults = new ArrayList<InstitutionalProposal>(); for (InstitutionalProposal institutionalProposal : results) { String documentNumber = institutionalProposal.getInstitutionalProposalDocument().getDocumentNumber(); try { InstitutionalProposalDocument document = (InstitutionalProposalDocument) documentService.getByDocumentHeaderId(documentNumber); if (authorizer.canOpen(document, user)) { filteredResults.add(institutionalProposal); } } catch (WorkflowException e) { LOG.warn("Cannot find Document with header id " + documentNumber); } } return filteredResults; } @SuppressWarnings("unchecked") @Override public List<HtmlData> getCustomActionUrls(BusinessObject businessObject, List pkNames) { List<HtmlData> htmlDataList = new ArrayList<HtmlData>(); if (includeMainSearchCustomActionUrls) { htmlDataList.add(getOpenLink(((InstitutionalProposal) businessObject).getInstitutionalProposalDocument())); } if (includeMergeCustomActionUrls) { htmlDataList.add(getSelectLink((InstitutionalProposal) businessObject)); } htmlDataList.add(getMedusaLink(((InstitutionalProposal) businessObject).getInstitutionalProposalDocument(), false)); return htmlDataList; } protected AnchorHtmlData getOpenLink(Document document) { AnchorHtmlData htmlData = new AnchorHtmlData(); htmlData.setDisplayText(OPEN); Properties parameters = new Properties(); parameters.put(KRADConstants.DISPATCH_REQUEST_PARAMETER, KRADConstants.DOC_HANDLER_METHOD); parameters.put(KRADConstants.PARAMETER_COMMAND, KewApiConstants.DOCSEARCH_COMMAND); parameters.put(KRADConstants.DOCUMENT_TYPE_NAME, getDocumentTypeName()); parameters.put("viewDocument", "true"); parameters.put("docOpenedFromIPSearch", "true"); parameters.put("docId", document.getDocumentNumber()); String href = UrlFactory.parameterizeUrl("../"+getHtmlAction(), parameters); htmlData.setHref(href); return htmlData; } @Override protected String getHtmlAction() { return "institutionalProposalHome.do"; } @Override protected String getDocumentTypeName() { return "InstitutionalProposalDocument"; } @Override protected String getKeyFieldName() { return InstitutionalProposal.PROPOSAL_NUMBER_PROPERTY_STRING; } protected boolean lookupIsFromAward(Map<String, String> fieldValues) { String returnLocation = fieldValues.get(KRADConstants.BACK_LOCATION); return returnLocation != null && returnLocation.contains(AWARD_HOME_ACTION); } /* * Filters will set flag in IP indicating that they should not be selectable * in an Award-based search if they are already linked, if they are Approval Pending Submitted, or if * they are not of status 1, 2, or 4. We do this here (instead of in the IP object itself) because * this object knows the origin of the lookup, and can easily determine if the IP is already linked. */ /* * This method filters out IP's which are already linked to proposals */ @SuppressWarnings("unchecked") protected void filterAlreadyLinkedProposals(List<? extends BusinessObject> searchResults, Map<String, String> fieldValues) { List<Long> linkedProposals = (List<Long>) GlobalVariables.getUserSession().retrieveObject(Constants.LINKED_FUNDING_PROPOSALS_KEY); if (linkedProposals == null) { return; } for (Long linkedProposalId : linkedProposals) { for (int j = 0; j < searchResults.size(); j++) { InstitutionalProposal institutionalProposal = (InstitutionalProposal) searchResults.get(j); if (linkedProposalId.equals(institutionalProposal.getProposalId())) { institutionalProposal.setShowReturnLink(false); break; } } } } /* * This method filters out IP's which were generated from PD whose ProposeStateType is "Approval Pending Submitted" */ protected void filterApprovedPendingSubmitProposals(List<? extends BusinessObject> searchResults) { for (int j = 0; j < searchResults.size(); j++) { if (isDevelopmentProposalAppPendingSubmitted((InstitutionalProposal) searchResults.get(j))) { ((InstitutionalProposal)searchResults.get(j)).setShowReturnLink(false); } } } /** * This method is to filter out IP's not having codes in the valid funding proposal status codes parameter. **/ protected void filterInvalidProposalStatus(List<? extends BusinessObject> searchResults) { Collection<String> validCodes = getInstitutionalProposalService().getValidFundingProposalStatusCodes(); for (int j = 0; j < searchResults.size(); j++) { InstitutionalProposal result = (InstitutionalProposal) searchResults.get(j); if (!validCodes.contains(result.getStatusCode().toString())) { result.setShowReturnLink(false); } } } /** * Find if any proposal associate with this INSP has 'Approval Pending Submitted' proposal state type **/ protected boolean isDevelopmentProposalAppPendingSubmitted(InstitutionalProposal ip) { boolean isApprovePending = false; Collection<DevelopmentProposal> devProposals = getDevelopmentProposals(ip); for (DevelopmentProposal developmentProposal : devProposals) { if ("5".equals(developmentProposal.getProposalStateTypeCode())) { isApprovePending = true; break; } } return isApprovePending; } /* * find any version of IP that has PD with approve pending */ @SuppressWarnings("unchecked") protected Collection<DevelopmentProposal> getDevelopmentProposals(InstitutionalProposal instProposal) { //find any dev prop linked to any version of this inst prop Collection<DevelopmentProposal> devProposals = new ArrayList<DevelopmentProposal>(); List<ProposalAdminDetails> proposalAdminDetails = (List<ProposalAdminDetails>) businessObjectService.findMatchingOrderBy(ProposalAdminDetails.class, getFieldValues("instProposalId", instProposal.getProposalId()), "devProposalNumber", true); if(proposalAdminDetails.size() > 0) { String latestDevelopmentProposalDocNumber = proposalAdminDetails.get(proposalAdminDetails.size() - 1).getDevProposalNumber(); DevelopmentProposal devProp = (DevelopmentProposal)businessObjectService.findBySinglePrimaryKey(DevelopmentProposal.class, latestDevelopmentProposalDocNumber); devProposals.add(devProp); } return devProposals; } protected Map<String, Object> getFieldValues(String key, Object value){ Map<String, Object> fieldValues = new HashMap<String, Object>(); fieldValues.put(key, value); return fieldValues; } /* * Determine whether lookup is being called from a location that shouldn't include the custom action links */ protected void configureCustomActions(Map<String, String> fieldValues) { String returnLocation = fieldValues.get(KRADConstants.BACK_LOCATION); if (returnLocation != null) { if (returnLocation.contains(AWARD_HOME_ACTION)) { includeMainSearchCustomActionUrls = false; includeMergeCustomActionUrls = false; } else if (returnLocation.contains(MERGE_PROPOSAL_LOG_ACTION)) { includeMainSearchCustomActionUrls = false; includeMergeCustomActionUrls = true; } else { includeMainSearchCustomActionUrls = true; includeMergeCustomActionUrls = false; } } else { includeMainSearchCustomActionUrls = false; includeMergeCustomActionUrls = false; } } protected AnchorHtmlData getSelectLink(InstitutionalProposal institutionalProposal) { AnchorHtmlData htmlData = new AnchorHtmlData(); htmlData.setDisplayText("select"); Properties parameters = new Properties(); parameters.put(KRADConstants.DISPATCH_REQUEST_PARAMETER, "mergeToInstitutionalProposal"); parameters.put("institutionalProposalNumber", institutionalProposal.getProposalNumber()); String href = UrlFactory.parameterizeUrl("../" + MERGE_PROPOSAL_LOG_ACTION, parameters); htmlData.setHref(href); return htmlData; } @Override public boolean isResultReturnable(BusinessObject object) { InstitutionalProposal institutionalProposal = (InstitutionalProposal)object; return institutionalProposal.getShowReturnLink(); } protected InstitutionalProposalService getInstitutionalProposalService() { return institutionalProposalService; } public void setInstitutionalProposalService(InstitutionalProposalService institutionalProposalService) { this.institutionalProposalService = institutionalProposalService; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs; import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.anyShort; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.spy; import java.io.DataOutputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.security.PrivilegedExceptionAction; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Options; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.client.impl.LeaseRenewer; import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter; import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Time; import org.junit.Assert; import org.junit.Test; import org.mockito.Mockito; public class TestLease { static boolean hasLease(MiniDFSCluster cluster, Path src) { return NameNodeAdapter.getLeaseForPath(cluster.getNameNode(), src.toString()) != null; } static int leaseCount(MiniDFSCluster cluster) { return NameNodeAdapter.getLeaseManager(cluster.getNamesystem()).countLease(); } static final String dirString = "/test/lease"; final Path dir = new Path(dirString); static final Logger LOG = LoggerFactory.getLogger(TestLease.class); final Configuration conf = new HdfsConfiguration(); @Test public void testLeaseAbort() throws Exception { MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); try { cluster.waitActive(); NamenodeProtocols preSpyNN = cluster.getNameNodeRpc(); NamenodeProtocols spyNN = spy(preSpyNN); DFSClient dfs = new DFSClient(null, spyNN, conf, null); byte[] buf = new byte[1024]; FSDataOutputStream c_out = createFsOut(dfs, dirString + "c"); c_out.write(buf, 0, 1024); c_out.close(); DFSInputStream c_in = dfs.open(dirString + "c"); FSDataOutputStream d_out = createFsOut(dfs, dirString + "d"); // stub the renew method. doThrow(new RemoteException(InvalidToken.class.getName(), "Your token is worthless")).when(spyNN).renewLease(anyString()); // We don't need to wait the lease renewer thread to act. // call renewLease() manually. // make it look like the soft limit has been exceeded. LeaseRenewer originalRenewer = dfs.getLeaseRenewer(); dfs.lastLeaseRenewal = Time.monotonicNow() - HdfsConstants.LEASE_SOFTLIMIT_PERIOD - 1000; try { dfs.renewLease(); } catch (IOException e) {} // Things should continue to work it passes hard limit without // renewing. try { d_out.write(buf, 0, 1024); LOG.info("Write worked beyond the soft limit as expected."); } catch (IOException e) { Assert.fail("Write failed."); } long hardlimit = conf.getLong(DFSConfigKeys.DFS_LEASE_HARDLIMIT_KEY, DFSConfigKeys.DFS_LEASE_HARDLIMIT_DEFAULT) * 1000; // make it look like the hard limit has been exceeded. dfs.lastLeaseRenewal = Time.monotonicNow() - hardlimit - 1000; dfs.renewLease(); // this should not work. try { d_out.write(buf, 0, 1024); d_out.close(); Assert.fail("Write did not fail even after the fatal lease renewal failure"); } catch (IOException e) { LOG.info("Write failed as expected. ", e); } // If aborted, the renewer should be empty. (no reference to clients) Thread.sleep(1000); Assert.assertTrue(originalRenewer.isEmpty()); // unstub doNothing().when(spyNN).renewLease(anyString()); // existing input streams should work try { int num = c_in.read(buf, 0, 1); if (num != 1) { Assert.fail("Failed to read 1 byte"); } c_in.close(); } catch (IOException e) { LOG.error("Read failed with ", e); Assert.fail("Read after lease renewal failure failed"); } // new file writes should work. try { c_out = createFsOut(dfs, dirString + "c"); c_out.write(buf, 0, 1024); c_out.close(); } catch (IOException e) { LOG.error("Write failed with ", e); Assert.fail("Write failed"); } } finally { cluster.shutdown(); } } @Test public void testLeaseAfterRename() throws Exception { MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); try { Path p = new Path("/test-file"); Path d = new Path("/test-d"); Path d2 = new Path("/test-d-other"); // open a file to get a lease FileSystem fs = cluster.getFileSystem(); FSDataOutputStream out = fs.create(p); out.writeBytes("something"); //out.hsync(); Assert.assertTrue(hasLease(cluster, p)); Assert.assertEquals(1, leaseCount(cluster)); // just to ensure first fs doesn't have any logic to twiddle leases DistributedFileSystem fs2 = (DistributedFileSystem) FileSystem.newInstance(fs.getUri(), fs.getConf()); // rename the file into an existing dir LOG.info("DMS: rename file into dir"); Path pRenamed = new Path(d, p.getName()); fs2.mkdirs(d); fs2.rename(p, pRenamed); Assert.assertFalse(p+" exists", fs2.exists(p)); Assert.assertTrue(pRenamed+" not found", fs2.exists(pRenamed)); Assert.assertFalse("has lease for "+p, hasLease(cluster, p)); Assert.assertTrue("no lease for "+pRenamed, hasLease(cluster, pRenamed)); Assert.assertEquals(1, leaseCount(cluster)); // rename the parent dir to a new non-existent dir LOG.info("DMS: rename parent dir"); Path pRenamedAgain = new Path(d2, pRenamed.getName()); fs2.rename(d, d2); // src gone Assert.assertFalse(d+" exists", fs2.exists(d)); Assert.assertFalse("has lease for "+pRenamed, hasLease(cluster, pRenamed)); // dst checks Assert.assertTrue(d2+" not found", fs2.exists(d2)); Assert.assertTrue(pRenamedAgain+" not found", fs2.exists(pRenamedAgain)); Assert.assertTrue("no lease for "+pRenamedAgain, hasLease(cluster, pRenamedAgain)); Assert.assertEquals(1, leaseCount(cluster)); // rename the parent dir to existing dir // NOTE: rename w/o options moves paths into existing dir LOG.info("DMS: rename parent again"); pRenamed = pRenamedAgain; pRenamedAgain = new Path(new Path(d, d2.getName()), p.getName()); fs2.mkdirs(d); fs2.rename(d2, d); // src gone Assert.assertFalse(d2+" exists", fs2.exists(d2)); Assert.assertFalse("no lease for "+pRenamed, hasLease(cluster, pRenamed)); // dst checks Assert.assertTrue(d+" not found", fs2.exists(d)); Assert.assertTrue(pRenamedAgain +" not found", fs2.exists(pRenamedAgain)); Assert.assertTrue("no lease for "+pRenamedAgain, hasLease(cluster, pRenamedAgain)); Assert.assertEquals(1, leaseCount(cluster)); // rename with opts to non-existent dir pRenamed = pRenamedAgain; pRenamedAgain = new Path(d2, p.getName()); fs2.rename(pRenamed.getParent(), d2, Options.Rename.OVERWRITE); // src gone Assert.assertFalse(pRenamed.getParent() +" not found", fs2.exists(pRenamed.getParent())); Assert.assertFalse("has lease for "+pRenamed, hasLease(cluster, pRenamed)); // dst checks Assert.assertTrue(d2+" not found", fs2.exists(d2)); Assert.assertTrue(pRenamedAgain+" not found", fs2.exists(pRenamedAgain)); Assert.assertTrue("no lease for "+pRenamedAgain, hasLease(cluster, pRenamedAgain)); Assert.assertEquals(1, leaseCount(cluster)); // rename with opts to existing dir // NOTE: rename with options will not move paths into the existing dir pRenamed = pRenamedAgain; pRenamedAgain = new Path(d, p.getName()); fs2.rename(pRenamed.getParent(), d, Options.Rename.OVERWRITE); // src gone Assert.assertFalse(pRenamed.getParent() +" not found", fs2.exists(pRenamed.getParent())); Assert.assertFalse("has lease for "+pRenamed, hasLease(cluster, pRenamed)); // dst checks Assert.assertTrue(d+" not found", fs2.exists(d)); Assert.assertTrue(pRenamedAgain+" not found", fs2.exists(pRenamedAgain)); Assert.assertTrue("no lease for "+pRenamedAgain, hasLease(cluster, pRenamedAgain)); Assert.assertEquals(1, leaseCount(cluster)); out.close(); } finally { cluster.shutdown(); } } /** * Test that we can open up a file for write, move it to another location, * and then create a new file in the previous location, without causing any * lease conflicts. This is possible because we now use unique inode IDs * to identify files to the NameNode. */ @Test public void testLeaseAfterRenameAndRecreate() throws Exception { MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); try { final Path path1 = new Path("/test-file"); final String contents1 = "contents1"; final Path path2 = new Path("/test-file-new-location"); final String contents2 = "contents2"; // open a file to get a lease FileSystem fs = cluster.getFileSystem(); FSDataOutputStream out1 = fs.create(path1); out1.writeBytes(contents1); Assert.assertTrue(hasLease(cluster, path1)); Assert.assertEquals(1, leaseCount(cluster)); DistributedFileSystem fs2 = (DistributedFileSystem) FileSystem.newInstance(fs.getUri(), fs.getConf()); fs2.rename(path1, path2); FSDataOutputStream out2 = fs2.create(path1); out2.writeBytes(contents2); out2.close(); // The first file should still be open and valid Assert.assertTrue(hasLease(cluster, path2)); out1.close(); // Contents should be as expected DistributedFileSystem fs3 = (DistributedFileSystem) FileSystem.newInstance(fs.getUri(), fs.getConf()); Assert.assertEquals(contents1, DFSTestUtil.readFile(fs3, path2)); Assert.assertEquals(contents2, DFSTestUtil.readFile(fs3, path1)); } finally { cluster.shutdown(); } } @Test public void testLease() throws Exception { MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); try { FileSystem fs = cluster.getFileSystem(); Assert.assertTrue(fs.mkdirs(dir)); Path a = new Path(dir, "a"); Path b = new Path(dir, "b"); DataOutputStream a_out = fs.create(a); a_out.writeBytes("something"); Assert.assertTrue(hasLease(cluster, a)); Assert.assertTrue(!hasLease(cluster, b)); DataOutputStream b_out = fs.create(b); b_out.writeBytes("something"); Assert.assertTrue(hasLease(cluster, a)); Assert.assertTrue(hasLease(cluster, b)); a_out.close(); b_out.close(); Assert.assertTrue(!hasLease(cluster, a)); Assert.assertTrue(!hasLease(cluster, b)); Path fileA = new Path(dir, "fileA"); FSDataOutputStream fileA_out = fs.create(fileA); fileA_out.writeBytes("something"); Assert.assertTrue("Failed to get the lease!", hasLease(cluster, fileA)); fs.delete(dir, true); try { fileA_out.hflush(); Assert.fail("Should validate file existence!"); } catch (FileNotFoundException e) { // expected GenericTestUtils.assertExceptionContains("File does not exist", e); } } finally { if (cluster != null) {cluster.shutdown();} } } @SuppressWarnings("unchecked") @Test public void testFactory() throws Exception { final String[] groups = new String[]{"supergroup"}; final UserGroupInformation[] ugi = new UserGroupInformation[3]; for(int i = 0; i < ugi.length; i++) { ugi[i] = UserGroupInformation.createUserForTesting("user" + i, groups); } Mockito.doReturn(new HdfsFileStatus.Builder() .replication(1) .blocksize(1024) .perm(new FsPermission((short) 777)) .owner("owner") .group("group") .symlink(new byte[0]) .path(new byte[0]) .fileId(1010) .build()) .when(mcp) .getFileInfo(anyString()); Mockito.doReturn(new HdfsFileStatus.Builder() .replication(1) .blocksize(1024) .perm(new FsPermission((short) 777)) .owner("owner") .group("group") .symlink(new byte[0]) .path(new byte[0]) .fileId(1010) .build()) .when(mcp) .create(anyString(), any(), anyString(), any(), anyBoolean(), anyShort(), anyLong(), any(), any(), any()); final Configuration conf = new Configuration(); final DFSClient c1 = createDFSClientAs(ugi[0], conf); FSDataOutputStream out1 = createFsOut(c1, "/out1"); final DFSClient c2 = createDFSClientAs(ugi[0], conf); FSDataOutputStream out2 = createFsOut(c2, "/out2"); Assert.assertEquals(c1.getLeaseRenewer(), c2.getLeaseRenewer()); final DFSClient c3 = createDFSClientAs(ugi[1], conf); FSDataOutputStream out3 = createFsOut(c3, "/out3"); Assert.assertTrue(c1.getLeaseRenewer() != c3.getLeaseRenewer()); final DFSClient c4 = createDFSClientAs(ugi[1], conf); FSDataOutputStream out4 = createFsOut(c4, "/out4"); Assert.assertEquals(c3.getLeaseRenewer(), c4.getLeaseRenewer()); final DFSClient c5 = createDFSClientAs(ugi[2], conf); FSDataOutputStream out5 = createFsOut(c5, "/out5"); Assert.assertTrue(c1.getLeaseRenewer() != c5.getLeaseRenewer()); Assert.assertTrue(c3.getLeaseRenewer() != c5.getLeaseRenewer()); } private FSDataOutputStream createFsOut(DFSClient dfs, String path) throws IOException { return new FSDataOutputStream(dfs.create(path, true), null); } static final ClientProtocol mcp = Mockito.mock(ClientProtocol.class); static public DFSClient createDFSClientAs(UserGroupInformation ugi, final Configuration conf) throws Exception { return ugi.doAs(new PrivilegedExceptionAction<DFSClient>() { @Override public DFSClient run() throws Exception { return new DFSClient(null, mcp, conf, null); } }); } }
/* * Copyright 2007-2022 Scott C. Gray * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sqsh.commands; import org.sqsh.Command; import org.sqsh.DatabaseCommand; import org.sqsh.SQLTools; import org.sqsh.Session; import org.sqsh.SessionRedrawBufferMessage; import org.sqsh.SqshOptions; import org.sqsh.options.Argv; import org.sqsh.options.OptionProperty; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import static org.sqsh.options.ArgumentRequired.NONE; /** * Implements the \select command. */ public class Select extends Command implements DatabaseCommand { private static class Options extends SqshOptions { @OptionProperty(option = 'p', longOption = "print", arg = NONE, description = "Print statement to screen, do not append to SQL buffer") public boolean printOnly = false; @OptionProperty(option = 'n', longOption = "natural-join", arg = NONE, description = "Create a natural join instead of a key join") public boolean naturalJoin = false; @Argv(program = "\\select", min = 1, usage = "table [table ...]") public List<String> arguments = new ArrayList<>(); } @Override public SqshOptions getOptions() { return new Options(); } @Override public int execute(Session session, SqshOptions opts) throws Exception { Options options = (Options) opts; // Make sure the caller provided some table names. if (options.arguments.size() == 0) { session.err.println("Use: \\select table [table ...]"); return 1; } // First, gather up a description of every table. Table[] descriptions = new Table[options.arguments.size()]; for (int i = 0; i < descriptions.length; i++) { try { descriptions[i] = describe(session, options.arguments.get(i)); descriptions[i].alias = Character.toString((char) ('a' + i)); } catch (SQLException e) { session.err.println("Failed to fetch column descriptions for " + "table " + options.arguments.get(i) + ": " + e.getMessage()); return 1; } // If no columns where found for a given table, that means it couldn't be found. if (descriptions[i].columns.length == 0) { session.err.println("Unable to find description of table " + options.arguments.get(i)); return 1; } } // Now we can start trying to build our query! String linesep = System.getProperty("line.separator"); StringBuilder query = new StringBuilder(); // Build the select list. query.append("SELECT "); for (int i = 0; i < descriptions.length; i++) { Table table = descriptions[i]; for (int j = 0; j < table.columns.length; j++) { if (i > 0 || j > 0) { query.append(',').append(linesep).append(" "); } query.append(table.alias).append('.').append(SQLTools.quoteIdentifier(table.columns[j].name)); } } // Next, build the FROM clause. query.append(linesep).append(" FROM "); for (int i = 0; i < descriptions.length; i++) { Table table = descriptions[i]; if (i > 0) { query.append(", "); } query.append(table.name).append(' ').append(table.alias); } // Finally, the WHERE clause. if (descriptions.length > 1) { String join = null; if (options.naturalJoin) { join = getNaturalJoin(descriptions, linesep); } else { try { join = getKeyJoin(session, descriptions, linesep); } catch (SQLException e) { SQLTools.printException(session, e); return 1; } } if (join.length() > 0) { query.append(linesep).append(" WHERE ").append(join); } } // Ok, we are finished. Now lets update the query buffer. if (options.printOnly) { session.out.println(query); } else { session.getBufferManager().getCurrent().set(query.toString()); throw new SessionRedrawBufferMessage(); } return 0; } /** * Attempts to fetch a join clause by asking the database about primary/foriegn key relationships between tables. * * @param session The session. * @param tables The tables to join * @param linesep Linesep * @return The join * @throws SQLException */ private String getKeyJoin(Session session, Table[] tables, String linesep) throws SQLException { Connection conn = session.getConnection(); DatabaseMetaData meta = conn.getMetaData(); StringBuilder join = new StringBuilder(); int joinCount = 0; for (int t1 = 0; t1 < tables.length; ++t1) { Table table1 = tables[t1]; for (int t2 = 0; t2 < tables.length; ++t2) { Table table2 = tables[t2]; ResultSet rs = meta.getCrossReference(table1.catalog, table1.schema, table1.tableName, table2.catalog, table2.schema, table2.tableName); while (rs.next()) { if (joinCount > 0) { join.append(linesep).append(" AND "); } join.append(table1.alias).append('.').append(SQLTools.quoteIdentifier(rs.getString(4))).append(" = ").append(table2.alias).append('.').append(SQLTools.quoteIdentifier(rs.getString(8))); ++joinCount; } } } return join.toString(); } /** * Generates a "natural" join between tables...that is, it attempts to join all columns of the same name and * datatype. * * @param tables The tables. * @param linesep The line separator * @return The join. */ private String getNaturalJoin(Table[] tables, String linesep) { StringBuilder join = new StringBuilder(); int joinCount = 0; for (int t1 = 0; t1 < tables.length; ++t1) { Table table1 = tables[t1]; for (int t2 = t1 + 1; t2 < tables.length; ++t2) { Table table2 = tables[t2]; for (Column c1 : table1.columns) { for (Column c2 : table2.columns) { if (c1.name.equals(c2.name) && c1.type == c2.type) { if (joinCount > 0) { join.append(linesep).append(" AND "); } join.append(table1.alias).append('.').append(SQLTools.quoteIdentifier(c1.name)).append(" = ").append(table2.alias).append('.').append(SQLTools.quoteIdentifier(c2.name)); ++joinCount; } } } } } return join.toString(); } /** * Helper method to fetch the description of a table. * * @param session The session * @param name The name of the table. * @return A description of the table. * @throws SQLException Thrown if things don't work out so good. */ private Table describe(Session session, String name) throws SQLException { Connection conn = session.getConnection(); DatabaseMetaData meta = conn.getMetaData(); SQLTools.ObjectDescription nameDescription = SQLTools.parseObjectName(name); Table table = new Table(); table.name = name; ResultSet result = meta.getColumns(nameDescription.getCatalog(), nameDescription.getSchema(), nameDescription.getName(), "%"); ArrayList<Column> cols = new ArrayList<Column>(); while (result.next()) { Column col = new Column(); table.catalog = result.getString(1); table.schema = result.getString(2); table.tableName = result.getString(3); col.name = result.getString(4); col.type = result.getInt(5); cols.add(col); } table.columns = cols.toArray(new Column[0]); return table; } private static class Table { public String name; public String catalog; public String tableName; public String schema; public String owner; public String alias; public Column[] columns; } private static class Column { public String name; public int type; } }
/* * The MIT License * * Copyright (c) 2016 The Broad Institute * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package picard.fingerprint; import htsjdk.samtools.BamFileIoUtils; import htsjdk.samtools.SAMReadGroupRecord; import htsjdk.samtools.SamReader; import htsjdk.samtools.SamReaderFactory; import htsjdk.samtools.metrics.MetricsFile; import htsjdk.samtools.util.CloserUtil; import htsjdk.samtools.util.IOUtil; import htsjdk.samtools.util.Log; import htsjdk.samtools.util.SequenceUtil; import htsjdk.variant.utils.SAMSequenceDictionaryExtractor; import htsjdk.variant.vcf.VCFFileReader; import htsjdk.variant.vcf.VCFHeader; import picard.PicardException; import picard.analysis.FingerprintingDetailMetrics; import picard.analysis.FingerprintingSummaryMetrics; import picard.cmdline.CommandLineProgram; import picard.cmdline.CommandLineProgramProperties; import picard.cmdline.Option; import picard.cmdline.StandardOptionDefinitions; import picard.cmdline.programgroups.Fingerprinting; import java.io.File; import java.util.Collections; import java.util.List; /** * Attempts to check the sample identity of the sequence/genotype data in the provided file (SAM/BAM or VCF) * against a set of known genotypes in the supplied genotype file (in either GELI or VCF format). * * @author Tim Fennell */ @CommandLineProgramProperties( usage = CheckFingerprint.USAGE_DETAILS, usageShort = "Computes a fingerprint from the supplied input (SAM/BAM or VCF) file and compares it to the provided genotypes", programGroup = Fingerprinting.class ) public class CheckFingerprint extends CommandLineProgram { static final String USAGE_DETAILS = "Computes a fingerprint from the supplied input file (SAM/BAM or VCF) file and " + "compares it to the expected fingerprint genotypes provided. The key output is a LOD score " + "which represents the relative likelihood of the sequence data originating from the same " + "sample as the genotypes vs. from a random sample. Two outputs are produced: (1) a summary " + "metrics file that gives metrics at the single sample level (if the input was a VCF) or at the read " + "level (lane or index within a lane) (if the input was a SAM/BAM) " + "versus a set of known genotypes for the expected sample, and (2) a detail metrics file that " + "contains an individual SNP/Haplotype comparison within a fingerprint comparison. The two " + "files may be specified individually using the SUMMARY_OUTPUT and DETAIL_OUTPUT options. " + "Alternatively the OUTPUT option may be used instead to give the base of the two output " + "files, with the summary metrics having a file extension '" + CheckFingerprint.FINGERPRINT_SUMMARY_FILE_SUFFIX + "' " + "and the detail metrics having a file extension '" + CheckFingerprint.FINGERPRINT_DETAIL_FILE_SUFFIX + "'."; @Option(shortName=StandardOptionDefinitions.INPUT_SHORT_NAME, doc = "Input file SAM/BAM or VCF. If a VCF is used, " + "it must have at least one sample. If there are more than one samples in the VCF, the parameter OBSERVED_SAMPLE_ALIAS must " + "be provided in order to indicate which sample's data to use. If there are no samples in the VCF, an exception will be thrown.") public File INPUT; @Option(optional = true, doc = "If the input is a VCF, this parameters used to select which sample's data in the VCF to use.") public String OBSERVED_SAMPLE_ALIAS; @Option(shortName=StandardOptionDefinitions.OUTPUT_SHORT_NAME, doc = "The base prefix of output files to write. The summary metrics " + "will have the file extension '" + CheckFingerprint.FINGERPRINT_SUMMARY_FILE_SUFFIX + "' and the detail metrics will have " + "the extension '" + CheckFingerprint.FINGERPRINT_DETAIL_FILE_SUFFIX + "'.", mutex = {"SUMMARY_OUTPUT", "DETAIL_OUTPUT"}) public String OUTPUT; @Option(shortName = "S", doc = "The text file to which to write summary metrics.", mutex = {"OUTPUT"}) public File SUMMARY_OUTPUT; @Option(shortName = "D", doc = "The text file to which to write detail metrics.", mutex = {"OUTPUT"}) public File DETAIL_OUTPUT; @Option(shortName="G", doc = "File of genotypes (VCF or GELI) to be used in comparison. May contain " + "any number of genotypes; CheckFingerprint will use only those that are usable for fingerprinting.") public File GENOTYPES; @Option(shortName = "SAMPLE_ALIAS", optional=true, doc = "This parameter can be used to specify which sample's genotypes to use from the " + "expected VCF file (the GENOTYPES file). If it is not supplied, the sample name from the input " + "(VCF or BAM read group header) will be used.") public String EXPECTED_SAMPLE_ALIAS; @Option(shortName="H", doc = "A file of haplotype information produced by the CheckFingerprint program.") public File HAPLOTYPE_MAP; @Option(shortName="LOD", doc = "When counting haplotypes checked and matching, count only haplotypes " + "where the most likely haplotype achieves at least this LOD.") public double GENOTYPE_LOD_THRESHOLD = 5; @Option(optional=true, shortName="IGNORE_RG", doc = "If the input is a SAM/BAM, and this parameter is true, treat the " + "entire input BAM as one single read group in the calculation, " + "ignoring RG annotations, and producing a single fingerprint metric for the entire BAM.") public boolean IGNORE_READ_GROUPS = false; private final Log log = Log.getInstance(CheckFingerprint.class); public static final String FINGERPRINT_SUMMARY_FILE_SUFFIX = "fingerprinting_summary_metrics"; public static final String FINGERPRINT_DETAIL_FILE_SUFFIX = "fingerprinting_detail_metrics"; // Stock main method public static void main(final String[] args) { new CheckFingerprint().instanceMainWithExit(args); } @Override protected int doWork() { final File outputDetailMetricsFile, outputSummaryMetricsFile; if (OUTPUT == null) { outputDetailMetricsFile = DETAIL_OUTPUT; outputSummaryMetricsFile = SUMMARY_OUTPUT; } else { if (!OUTPUT.endsWith(".")) OUTPUT = OUTPUT + "."; outputDetailMetricsFile = new File(OUTPUT + FINGERPRINT_DETAIL_FILE_SUFFIX); outputSummaryMetricsFile = new File(OUTPUT + FINGERPRINT_SUMMARY_FILE_SUFFIX); } IOUtil.assertFileIsReadable(INPUT); IOUtil.assertFileIsReadable(HAPLOTYPE_MAP); IOUtil.assertFileIsReadable(GENOTYPES); IOUtil.assertFileIsWritable(outputDetailMetricsFile); IOUtil.assertFileIsWritable(outputSummaryMetricsFile); final FingerprintChecker checker = new FingerprintChecker(HAPLOTYPE_MAP); List<FingerprintResults> results; String observedSampleAlias = null; final boolean isBamOrSamFile = isBamOrSamFile(INPUT); if (isBamOrSamFile) { SequenceUtil.assertSequenceDictionariesEqual(SAMSequenceDictionaryExtractor.extractDictionary(INPUT), SAMSequenceDictionaryExtractor.extractDictionary(GENOTYPES), true); SequenceUtil.assertSequenceDictionariesEqual(SAMSequenceDictionaryExtractor.extractDictionary(INPUT), checker.getHeader().getSequenceDictionary(), true); // Verify that there's only one sample in the SAM/BAM. final SamReader in = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).open(INPUT); for (final SAMReadGroupRecord rec : in.getFileHeader().getReadGroups()) { if (observedSampleAlias == null) { observedSampleAlias = rec.getSample(); } else if (!observedSampleAlias.equals(rec.getSample())) { throw new PicardException("INPUT SAM/BAM file must not contain data from multiple samples."); } } CloserUtil.close(in); // If expected sample alias isn't supplied, assume it's the one from the INPUT file's RGs if (EXPECTED_SAMPLE_ALIAS == null) { EXPECTED_SAMPLE_ALIAS = observedSampleAlias; } results = checker.checkFingerprints( Collections.singletonList(INPUT), Collections.singletonList(GENOTYPES), EXPECTED_SAMPLE_ALIAS, IGNORE_READ_GROUPS); } else { // Input is a VCF // Note that FingerprintChecker.loadFingerprints() verifies that the VCF's Sequence Dictionaries agree with that of the Haplotye Map File // Verify that there is only one sample in the VCF final VCFFileReader fileReader = new VCFFileReader(INPUT, false); final VCFHeader fileHeader = fileReader.getFileHeader(); if (fileHeader.getNGenotypeSamples() < 1) { throw new PicardException("INPUT VCF file must contain at least one sample."); } if ((fileHeader.getNGenotypeSamples() > 1) && (OBSERVED_SAMPLE_ALIAS == null)) { throw new PicardException("INPUT VCF file contains multiple samples and yet the OBSERVED_SAMPLE_ALIAS parameter is not set."); } // set observedSampleAlias to the parameter, if set. Otherwise, if here, this must be a single sample VCF, get it's sample observedSampleAlias = (OBSERVED_SAMPLE_ALIAS != null) ? OBSERVED_SAMPLE_ALIAS : fileHeader.getGenotypeSamples().get(0); // Now verify that observedSampleAlias is, in fact, in the VCF if (!fileHeader.getGenotypeSamples().contains(observedSampleAlias)) { throw new PicardException("INPUT VCF file does not contain OBSERVED_SAMPLE_ALIAS: " + observedSampleAlias); } if (OBSERVED_SAMPLE_ALIAS == null) { observedSampleAlias = fileHeader.getGenotypeSamples().get(0); } fileReader.close(); // If expected sample alias isn't supplied, assume it's the one from the INPUT file if (EXPECTED_SAMPLE_ALIAS == null) { EXPECTED_SAMPLE_ALIAS = observedSampleAlias; } results = checker.checkFingerprints( Collections.singletonList(INPUT), Collections.singletonList(GENOTYPES), observedSampleAlias, EXPECTED_SAMPLE_ALIAS); } final MetricsFile<FingerprintingSummaryMetrics,?> summaryFile = getMetricsFile(); final MetricsFile<FingerprintingDetailMetrics,?> detailsFile = getMetricsFile(); for (final FingerprintResults fpr : results) { final MatchResults mr = fpr.getMatchResults().first(); final FingerprintingSummaryMetrics metrics = new FingerprintingSummaryMetrics(); metrics.READ_GROUP = fpr.getReadGroup(); metrics.SAMPLE = EXPECTED_SAMPLE_ALIAS; metrics.LL_EXPECTED_SAMPLE = mr.getSampleLikelihood(); metrics.LL_RANDOM_SAMPLE = mr.getPopulationLikelihood(); metrics.LOD_EXPECTED_SAMPLE = mr.getLOD(); for (final LocusResult lr : mr.getLocusResults()) { final DiploidGenotype expectedGenotype = lr.getExpectedGenotype(); final DiploidGenotype observedGenotype = lr.getMostLikelyGenotype(); // Update the summary metrics metrics.HAPLOTYPES_WITH_GENOTYPES++; if (lr.getLodGenotype() >= GENOTYPE_LOD_THRESHOLD) { metrics.HAPLOTYPES_CONFIDENTLY_CHECKED++; if (lr.getExpectedGenotype() == lr.getMostLikelyGenotype()) { metrics.HAPLOTYPES_CONFIDENTLY_MATCHING++; } if (expectedGenotype.isHeterozygous() && observedGenotype.isHomomozygous()) { metrics.HET_AS_HOM++; } if (expectedGenotype.isHomomozygous() && observedGenotype.isHeterozygous()) { metrics.HOM_AS_HET++; } if (expectedGenotype.isHomomozygous() && observedGenotype.isHomomozygous() && expectedGenotype.compareTo(observedGenotype) != 0) { metrics.HOM_AS_OTHER_HOM++; } } // Build the detail metrics final FingerprintingDetailMetrics details = new FingerprintingDetailMetrics(); details.READ_GROUP = fpr.getReadGroup(); details.SAMPLE = EXPECTED_SAMPLE_ALIAS; details.SNP = lr.getSnp().getName(); details.SNP_ALLELES = lr.getSnp().getAlleleString(); details.CHROM = lr.getSnp().getChrom(); details.POSITION = lr.getSnp().getPos(); details.EXPECTED_GENOTYPE = expectedGenotype.toString(); details.OBSERVED_GENOTYPE = observedGenotype.toString(); details.LOD = lr.getLodGenotype(); details.OBS_A = lr.getAllele1Count(); details.OBS_B = lr.getAllele2Count(); detailsFile.addMetric(details); } summaryFile.addMetric(metrics); log.info("Read Group: " + metrics.READ_GROUP + " / " + observedSampleAlias + " vs. " + metrics.SAMPLE + ": LOD = " + metrics.LOD_EXPECTED_SAMPLE); } summaryFile.write(outputSummaryMetricsFile); detailsFile.write(outputDetailMetricsFile); return 0; } protected String[] customCommandLineValidation() { IOUtil.assertFileIsReadable(INPUT); boolean isBamOrSamFile = isBamOrSamFile(INPUT); if (!isBamOrSamFile && IGNORE_READ_GROUPS) { return new String[]{"The parameter IGNORE_READ_GROUPS can only be used with BAM/SAM inputs."}; } if (isBamOrSamFile && OBSERVED_SAMPLE_ALIAS != null) { return new String[]{"The parameter OBSERVED_SAMPLE_ALIAS can only be used with a VCF input."}; } return super.customCommandLineValidation(); } private boolean isBamOrSamFile(final File f) { return (BamFileIoUtils.isBamFile(f) || f.getName().endsWith(IOUtil.SAM_FILE_EXTENSION)); } }
package com.astir_trotter.atcustom.ui.layout.bubble; import android.graphics.Canvas; import android.graphics.ColorFilter; import android.graphics.Paint; import android.graphics.Path; import android.graphics.PixelFormat; import android.graphics.Rect; import android.graphics.RectF; import android.graphics.drawable.Drawable; /** * @author - Saori Sugiyama * @contact - [email protected] * @date - 12/2/16 */ class Bubble extends Drawable { private RectF mRect; private Path mPath = new Path(); private Paint mPaint = new Paint(Paint.ANTI_ALIAS_FLAG); private Path mStrokePath; private Paint mStrokePaint; private float mArrowWidth; private float mCornersRadius; private float mArrowHeight; private float mArrowPosition; private float mStrokeWidth; public Bubble(RectF rect, float arrowWidth, float cornersRadius, float arrowHeight, float arrowPosition, float strokeWidth, int strokeColor, int bubbleColor, ArrowDirection arrowDirection) { this.mRect = rect; this.mArrowWidth = arrowWidth; this.mCornersRadius = cornersRadius; this.mArrowHeight = arrowHeight; this.mArrowPosition = arrowPosition; this.mStrokeWidth = strokeWidth; mPaint.setColor(bubbleColor); if (strokeWidth > 0) { mStrokePaint = new Paint(Paint.ANTI_ALIAS_FLAG); mStrokePaint.setColor(strokeColor); mStrokePath = new Path(); initPath(arrowDirection, mPath, strokeWidth); initPath(arrowDirection, mStrokePath, 0); } else { initPath(arrowDirection, mPath, 0); } } @Override protected void onBoundsChange(Rect bounds) { super.onBoundsChange(bounds); } @Override public void draw(Canvas canvas) { if (mStrokeWidth > 0) { canvas.drawPath(mStrokePath, mStrokePaint); } canvas.drawPath(mPath, mPaint); } @Override public int getOpacity() { return PixelFormat.TRANSLUCENT; } @Override public void setAlpha(int alpha) { mPaint.setAlpha(alpha); } @Override public void setColorFilter(ColorFilter cf) { mPaint.setColorFilter(cf); } @Override public int getIntrinsicWidth() { return (int) mRect.width(); } @Override public int getIntrinsicHeight() { return (int) mRect.height(); } private void initPath(ArrowDirection arrowDirection, Path path, float strokeWidth) { switch (arrowDirection) { case LEFT: if (mCornersRadius <= 0) { initLeftSquarePath(mRect, path, strokeWidth); break; } if (strokeWidth > 0 && strokeWidth > mCornersRadius) { initLeftSquarePath(mRect, path, strokeWidth); break; } initLeftRoundedPath(mRect, path, strokeWidth); break; case TOP: if (mCornersRadius <= 0) { initTopSquarePath(mRect, path, strokeWidth); break; } if (strokeWidth > 0 && strokeWidth > mCornersRadius) { initTopSquarePath(mRect, path, strokeWidth); break; } initTopRoundedPath(mRect, path, strokeWidth); break; case RIGHT: if (mCornersRadius <= 0) { initRightSquarePath(mRect, path, strokeWidth); break; } if (strokeWidth > 0 && strokeWidth > mCornersRadius) { initRightSquarePath(mRect, path, strokeWidth); break; } initRightRoundedPath(mRect, path, strokeWidth); break; case BOTTOM: if (mCornersRadius <= 0) { initBottomSquarePath(mRect, path, strokeWidth); break; } if (strokeWidth > 0 && strokeWidth > mCornersRadius) { initBottomSquarePath(mRect, path, strokeWidth); break; } initBottomRoundedPath(mRect, path, strokeWidth); break; } } private void initLeftRoundedPath(RectF rect, Path path, float strokeWidth) { path.moveTo(mArrowWidth + rect.left + mCornersRadius + strokeWidth, rect.top + strokeWidth); path.lineTo(rect.width() - mCornersRadius - strokeWidth, rect.top + strokeWidth); path.arcTo(new RectF(rect.right - mCornersRadius, rect.top + strokeWidth, rect.right - strokeWidth, mCornersRadius + rect.top), 270, 90); path.lineTo(rect.right - strokeWidth, rect.bottom - mCornersRadius - strokeWidth); path.arcTo(new RectF(rect.right - mCornersRadius, rect.bottom - mCornersRadius, rect.right - strokeWidth, rect.bottom - strokeWidth), 0, 90); path.lineTo(rect.left + mArrowWidth + mCornersRadius + strokeWidth, rect.bottom - strokeWidth); path.arcTo(new RectF(rect.left + mArrowWidth + strokeWidth, rect.bottom - mCornersRadius, mCornersRadius + rect.left + mArrowWidth, rect.bottom - strokeWidth), 90, 90); path.lineTo(rect.left + mArrowWidth + strokeWidth, mArrowHeight + mArrowPosition - (strokeWidth / 2)); path.lineTo(rect.left + strokeWidth + strokeWidth, mArrowPosition + mArrowHeight / 2); path.lineTo(rect.left + mArrowWidth + strokeWidth, mArrowPosition + (strokeWidth / 2)); path.lineTo(rect.left + mArrowWidth + strokeWidth, rect.top + mCornersRadius + strokeWidth); path.arcTo(new RectF(rect.left + mArrowWidth + strokeWidth, rect.top + strokeWidth, mCornersRadius + rect.left + mArrowWidth, mCornersRadius + rect.top), 180, 90); path.close(); } private void initLeftSquarePath(RectF rect, Path path, float strokeWidth) { path.moveTo(mArrowWidth + rect.left + strokeWidth, rect.top + strokeWidth); path.lineTo(rect.width() - strokeWidth, rect.top + strokeWidth); path.lineTo(rect.right - strokeWidth, rect.bottom - strokeWidth); path.lineTo(rect.left + mArrowWidth + strokeWidth, rect.bottom - strokeWidth); path.lineTo(rect.left + mArrowWidth + strokeWidth, mArrowHeight + mArrowPosition - (strokeWidth / 2)); path.lineTo(rect.left + strokeWidth + strokeWidth, mArrowPosition + mArrowHeight / 2); path.lineTo(rect.left + mArrowWidth + strokeWidth, mArrowPosition + (strokeWidth / 2)); path.lineTo(rect.left + mArrowWidth + strokeWidth, rect.top + strokeWidth); path.close(); } private void initTopRoundedPath(RectF rect, Path path, float strokeWidth) { path.moveTo(rect.left + Math.min(mArrowPosition, mCornersRadius) + strokeWidth, rect.top + mArrowHeight + strokeWidth); path.lineTo(rect.left + mArrowPosition + (strokeWidth / 2), rect.top + mArrowHeight + strokeWidth); path.lineTo(rect.left + mArrowWidth / 2 + mArrowPosition, rect.top + strokeWidth + strokeWidth); path.lineTo(rect.left + mArrowWidth + mArrowPosition - (strokeWidth / 2), rect.top + mArrowHeight + strokeWidth); path.lineTo(rect.right - mCornersRadius - strokeWidth, rect.top + mArrowHeight + strokeWidth); path.arcTo(new RectF(rect.right - mCornersRadius, rect.top + mArrowHeight + strokeWidth, rect.right - strokeWidth, mCornersRadius + rect.top + mArrowHeight), 270, 90); path.lineTo(rect.right - strokeWidth, rect.bottom - mCornersRadius - strokeWidth); path.arcTo(new RectF(rect.right - mCornersRadius, rect.bottom - mCornersRadius, rect.right - strokeWidth, rect.bottom - strokeWidth), 0, 90); path.lineTo(rect.left + mCornersRadius + strokeWidth, rect.bottom - strokeWidth); path.arcTo(new RectF(rect.left + strokeWidth, rect.bottom - mCornersRadius, mCornersRadius + rect.left, rect.bottom - strokeWidth), 90, 90); path.lineTo(rect.left + strokeWidth, rect.top + mArrowHeight + mCornersRadius + strokeWidth); path.arcTo(new RectF(rect.left + strokeWidth, rect.top + mArrowHeight + strokeWidth, mCornersRadius + rect.left, mCornersRadius + rect.top + mArrowHeight), 180, 90); path.close(); } private void initTopSquarePath(RectF rect, Path path, float strokeWidth) { path.moveTo(rect.left + mArrowPosition + strokeWidth, rect.top + mArrowHeight + strokeWidth); path.lineTo(rect.left + mArrowPosition + (strokeWidth / 2), rect.top + mArrowHeight + strokeWidth); path.lineTo(rect.left + mArrowWidth / 2 + mArrowPosition, rect.top + strokeWidth + strokeWidth); path.lineTo(rect.left + mArrowWidth + mArrowPosition - (strokeWidth / 2), rect.top + mArrowHeight + strokeWidth); path.lineTo(rect.right - strokeWidth, rect.top + mArrowHeight + strokeWidth); path.lineTo(rect.right - strokeWidth, rect.bottom - strokeWidth); path.lineTo(rect.left + strokeWidth, rect.bottom - strokeWidth); path.lineTo(rect.left + strokeWidth, rect.top + mArrowHeight + strokeWidth); path.lineTo(rect.left + mArrowPosition + strokeWidth, rect.top + mArrowHeight + strokeWidth); path.close(); } private void initRightRoundedPath(RectF rect, Path path, float strokeWidth) { path.moveTo(rect.left + mCornersRadius + strokeWidth, rect.top + strokeWidth); path.lineTo(rect.width() - mCornersRadius - mArrowWidth - strokeWidth, rect.top + strokeWidth); path.arcTo(new RectF(rect.right - mCornersRadius - mArrowWidth, rect.top + strokeWidth, rect.right - mArrowWidth - strokeWidth, mCornersRadius + rect.top), 270, 90); path.lineTo(rect.right - mArrowWidth - strokeWidth, mArrowPosition + (strokeWidth / 2)); path.lineTo(rect.right - strokeWidth - strokeWidth, mArrowPosition + mArrowHeight / 2); path.lineTo(rect.right - mArrowWidth - strokeWidth, mArrowPosition + mArrowHeight - (strokeWidth / 2)); path.lineTo(rect.right - mArrowWidth - strokeWidth, rect.bottom - mCornersRadius - strokeWidth); path.arcTo(new RectF(rect.right - mCornersRadius - mArrowWidth, rect.bottom - mCornersRadius, rect.right - mArrowWidth - strokeWidth, rect.bottom - strokeWidth), 0, 90); path.lineTo(rect.left + mArrowWidth + strokeWidth, rect.bottom - strokeWidth); path.arcTo(new RectF(rect.left + strokeWidth, rect.bottom - mCornersRadius, mCornersRadius + rect.left, rect.bottom - strokeWidth), 90, 90); path.arcTo(new RectF(rect.left + strokeWidth, rect.top + strokeWidth, mCornersRadius + rect.left, mCornersRadius + rect.top), 180, 90); path.close(); } private void initRightSquarePath(RectF rect, Path path, float strokeWidth) { path.moveTo(rect.left + strokeWidth, rect.top + strokeWidth); path.lineTo(rect.width() - mArrowWidth - strokeWidth, rect.top + strokeWidth); path.lineTo(rect.right - mArrowWidth - strokeWidth, mArrowPosition + (strokeWidth / 2)); path.lineTo(rect.right - strokeWidth - strokeWidth, mArrowPosition + mArrowHeight / 2); path.lineTo(rect.right - mArrowWidth - strokeWidth, mArrowPosition + mArrowHeight - (strokeWidth / 2)); path.lineTo(rect.right - mArrowWidth - strokeWidth, rect.bottom - strokeWidth); path.lineTo(rect.left + strokeWidth, rect.bottom - strokeWidth); path.lineTo(rect.left + strokeWidth, rect.top + strokeWidth); path.close(); } private void initBottomRoundedPath(RectF rect, Path path, float strokeWidth) { path.moveTo(rect.left + mCornersRadius + strokeWidth, rect.top + strokeWidth); path.lineTo(rect.width() - mCornersRadius - strokeWidth, rect.top + strokeWidth); path.arcTo(new RectF(rect.right - mCornersRadius, rect.top + strokeWidth, rect.right - strokeWidth, mCornersRadius + rect.top), 270, 90); path.lineTo(rect.right - strokeWidth, rect.bottom - mArrowHeight - mCornersRadius - strokeWidth); path.arcTo(new RectF(rect.right - mCornersRadius, rect.bottom - mCornersRadius - mArrowHeight, rect.right - strokeWidth, rect.bottom - mArrowHeight - strokeWidth), 0, 90); path.lineTo(rect.left + mArrowWidth + mArrowPosition - (strokeWidth / 2), rect.bottom - mArrowHeight - strokeWidth); path.lineTo(rect.left + mArrowPosition + mArrowWidth / 2, rect.bottom - strokeWidth - strokeWidth); path.lineTo(rect.left + mArrowPosition + (strokeWidth / 2), rect.bottom - mArrowHeight - strokeWidth); path.lineTo(rect.left + Math.min(mCornersRadius, mArrowPosition) + strokeWidth, rect.bottom - mArrowHeight - strokeWidth); path.arcTo(new RectF(rect.left + strokeWidth, rect.bottom - mCornersRadius - mArrowHeight, mCornersRadius + rect.left, rect.bottom - mArrowHeight - strokeWidth), 90, 90); path.lineTo(rect.left + strokeWidth, rect.top + mCornersRadius + strokeWidth); path.arcTo(new RectF(rect.left + strokeWidth, rect.top + strokeWidth, mCornersRadius + rect.left, mCornersRadius + rect.top), 180, 90); path.close(); } private void initBottomSquarePath(RectF rect, Path path, float strokeWidth) { path.moveTo(rect.left + strokeWidth, rect.top + strokeWidth); path.lineTo(rect.right - strokeWidth, rect.top + strokeWidth); path.lineTo(rect.right - strokeWidth, rect.bottom - mArrowHeight - strokeWidth); path.lineTo(rect.left + mArrowWidth + mArrowPosition - (strokeWidth / 2), rect.bottom - mArrowHeight - strokeWidth); path.lineTo(rect.left + mArrowPosition + mArrowWidth / 2, rect.bottom - strokeWidth - strokeWidth); path.lineTo(rect.left + mArrowPosition + (strokeWidth / 2), rect.bottom - mArrowHeight - strokeWidth); path.lineTo(rect.left + mArrowPosition + strokeWidth, rect.bottom - mArrowHeight - strokeWidth); path.lineTo(rect.left + strokeWidth, rect.bottom - mArrowHeight - strokeWidth); path.lineTo(rect.left + strokeWidth, rect.top + strokeWidth); path.close(); } }
/*************************************************************************** * * This file is part of the 'NDEF Tools for Android' project at * http://code.google.com/p/ndef-tools-for-android/ * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ****************************************************************************/ package org.ndeftools.util.activity; import android.app.Activity; import android.app.PendingIntent; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.content.pm.PackageManager; import android.nfc.NfcAdapter; import android.os.Bundle; import android.util.Log; /** * * Abstract {@link Activity} for detecting incoming NFC messages.<br/><br/> * * - detects whether NFC is available (if device has NFC chip).<br/> * - detect whether NFC setting is on or off, and whether it changes from off to on or on to off.<br/> * - detect incoming data tags or beams.<br/> * * @author Thomas Rorvik Skjolberg * */ public abstract class NfcDetectorActivity extends Activity { /** * Broadcast Action: The state of the local NFC adapter has been * changed. * <p>For example, NFC has been turned on or off. * <p>Always contains the extra field {@link #EXTRA_STATE} */ public static final String ACTION_ADAPTER_STATE_CHANGED = "android.nfc.action.ADAPTER_STATE_CHANGED"; /** * Used as an int extra field in {@link #ACTION_STATE_CHANGED} * intents to request the current power state. Possible values are: * {@link #STATE_OFF}, * {@link #STATE_TURNING_ON}, * {@link #STATE_ON}, * {@link #STATE_TURNING_OFF}, */ public static final String EXTRA_ADAPTER_STATE = "android.nfc.extra.ADAPTER_STATE"; public static final int STATE_OFF = 1; public static final int STATE_TURNING_ON = 2; public static final int STATE_ON = 3; public static final int STATE_TURNING_OFF = 4; private static final String TAG = NfcDetectorActivity.class.getName(); private static IntentFilter nfcStateChangeIntentFilter = new IntentFilter(ACTION_ADAPTER_STATE_CHANGED); protected NfcAdapter nfcAdapter; protected IntentFilter[] writeTagFilters; protected PendingIntent nfcPendingIntent; protected boolean foreground = false; protected boolean intentProcessed = false; protected boolean nfcEnabled = false; protected BroadcastReceiver nfcStateChangeBroadcastReceiver; protected boolean detecting = false; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Log.d(TAG, "onCreate"); // Check for available NFC Adapter PackageManager pm = getPackageManager(); if(!pm.hasSystemFeature(PackageManager.FEATURE_NFC)) { Log.d(TAG, "NFC feature not found"); onNfcFeatureNotFound(); } else { Log.d(TAG, "NFC feature found"); onNfcFeatureFound(); } } /** * Notify that NFC is available */ protected void onNfcFeatureFound() { initializeNfc(); detectInitialNfcState(); } /** * * Initialize Nfc fields * */ protected void initializeNfc() { nfcAdapter = NfcAdapter.getDefaultAdapter(this); nfcPendingIntent = PendingIntent.getActivity(this, 0, new Intent(this, this.getClass()).addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP), 0); IntentFilter tagDetected = new IntentFilter(NfcAdapter.ACTION_TAG_DISCOVERED); IntentFilter ndefDetected = new IntentFilter(NfcAdapter.ACTION_NDEF_DISCOVERED); IntentFilter techDetected = new IntentFilter(NfcAdapter.ACTION_TECH_DISCOVERED); writeTagFilters = new IntentFilter[] {ndefDetected, tagDetected, techDetected}; nfcStateChangeBroadcastReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { final int state = intent.getIntExtra(EXTRA_ADAPTER_STATE, -1); if(state == STATE_OFF || state == STATE_ON) { runOnUiThread(new Runnable() { public void run() { if(state == STATE_ON) { if(detecting) { enableForeground(); } } detectNfcStateChanges(); } }); } } }; } /** * * Detect initial NFC state. * */ protected void detectInitialNfcState() { nfcEnabled = nfcAdapter.isEnabled(); if(nfcEnabled) { Log.d(TAG, "NFC is enabled"); onNfcStateEnabled(); } else { Log.d(TAG, "NFC is disabled"); // change state in wireless settings onNfcStateDisabled(); } } /** * * NFC feature was found and is currently enabled * */ protected abstract void onNfcStateEnabled(); /** * * NFC feature was found but is currently disabled * */ protected abstract void onNfcStateDisabled(); @Override protected void onResume() { super.onResume(); if(nfcAdapter != null) { // enable foreground mode if nfc is on and we have started detecting boolean enabled = nfcAdapter.isEnabled(); if(enabled && detecting) { enableForeground(); } detectNfcStateChanges(); // for quicksettings startDetectingNfcStateChanges(); } if(!intentProcessed) { intentProcessed = true; processIntent(); } } /** * * NFC setting changed since last check. For example, the user enabled NFC in the wireless settings. * */ protected abstract void onNfcStateChange(boolean enabled); /** * * Detect changes in NFC settings - enabled/disabled * */ protected void detectNfcStateChanges() { Log.d(TAG, "Detect NFC state changes while previously " + (nfcEnabled ? "enabled" : "disabled")); boolean enabled = nfcAdapter.isEnabled(); if(nfcEnabled != enabled) { Log.d(TAG, "NFC state change detected; NFC is now " + (enabled ? "enabled" : "disabled")); onNfcStateChange(enabled); nfcEnabled = enabled; } else { Log.d(TAG, "NFC state remains " + (enabled ? "enabled" : "disabled")); } } public void startDetectingNfcStateChanges() { registerReceiver(nfcStateChangeBroadcastReceiver, nfcStateChangeIntentFilter); } public void stopDetectingNfcStateChanges() { unregisterReceiver(nfcStateChangeBroadcastReceiver); } @Override protected void onPause() { super.onPause(); if(nfcAdapter != null) { disableForeground(); // for quicksettings stopDetectingNfcStateChanges(); } } @Override public void onNewIntent(Intent intent) { Log.d(TAG, "onNewIntent"); // onResume gets called after this to handle the intent intentProcessed = false; setIntent(intent); } protected void enableForeground() { if(!foreground) { Log.d(TAG, "Enable nfc forground mode"); nfcAdapter.enableForegroundDispatch(this, nfcPendingIntent, writeTagFilters, null); foreground = true; } } /** * * Start detecting NDEF messages * */ protected void startDetecting() { if(!detecting) { enableForeground(); detecting = true; } } /** * * Stop detecting NDEF messages * */ protected void stopDetecting() { if(detecting) { disableForeground(); detecting = false; } } protected void disableForeground() { if(foreground) { Log.d(TAG, "Disable nfc forground mode"); nfcAdapter.disableForegroundDispatch(this); foreground = false; } } /** * * Process the current intent, looking for NFC-related actions * */ public void processIntent() { Intent intent = getIntent(); // Check to see that the Activity started due to an Android Beam if (NfcAdapter.ACTION_NDEF_DISCOVERED.equals(intent.getAction())) { Log.d(TAG, "Process NDEF discovered action"); nfcIntentDetected(intent, NfcAdapter.ACTION_NDEF_DISCOVERED); } else if (NfcAdapter.ACTION_TAG_DISCOVERED.equals(intent.getAction())) { Log.d(TAG, "Process TAG discovered action"); nfcIntentDetected(intent, NfcAdapter.ACTION_TAG_DISCOVERED); } else if (NfcAdapter.ACTION_TECH_DISCOVERED.equals(intent.getAction())) { Log.d(TAG, "Process TECH discovered action"); nfcIntentDetected(intent, NfcAdapter.ACTION_TECH_DISCOVERED); } else { Log.d(TAG, "Ignore action " + intent.getAction()); } } /** * * Launch an activity for nfc (or wireless) settings, so that the user might enable or disable nfc * */ protected void startNfcSettingsActivity() { if (android.os.Build.VERSION.SDK_INT >= 16) { startActivity(new Intent(android.provider.Settings.ACTION_NFC_SETTINGS)); } else { startActivity(new Intent(android.provider.Settings.ACTION_WIRELESS_SETTINGS)); } } /** * * Incoming NFC communication (in form of tag or beam) detected * */ protected abstract void nfcIntentDetected(Intent intent, String action); /** * * This device does not have NFC hardware * */ protected abstract void onNfcFeatureNotFound(); public boolean isDetecting() { return detecting; } public void setDetecting(boolean detecting) { this.detecting = detecting; } }
/* * Licensed to The Apereo Foundation under one or more contributor license * agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * The Apereo Foundation licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. * */ package org.unitime.timetable.solver.curricula; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Hashtable; import java.util.LinkedList; import java.util.List; import java.util.Queue; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.cpsolver.ifs.assignment.Assignment; import org.cpsolver.ifs.assignment.DefaultSingleAssignment; import org.cpsolver.ifs.solution.Solution; import org.cpsolver.ifs.util.DataProperties; import org.cpsolver.ifs.util.IdGenerator; import org.cpsolver.ifs.util.Progress; import org.dom4j.Document; import org.dom4j.DocumentHelper; import org.dom4j.Element; import org.unitime.timetable.model.CourseOffering; import org.unitime.timetable.model.Curriculum; import org.unitime.timetable.model.CurriculumClassification; import org.unitime.timetable.model.CurriculumCourse; import org.unitime.timetable.model.CurriculumCourseGroup; import org.unitime.timetable.model.InstructionalOffering; import org.unitime.timetable.model.PosMajor; import org.unitime.timetable.model.Session; import org.unitime.timetable.solver.curricula.students.CurCourse; import org.unitime.timetable.solver.curricula.students.CurModel; import org.unitime.timetable.solver.curricula.students.CurStudent; import org.unitime.timetable.solver.curricula.students.CurValue; import org.unitime.timetable.solver.curricula.students.CurVariable; /** * @author Tomas Muller */ public class CurriculaCourseDemands implements StudentCourseDemands { private static Log sLog = LogFactory.getLog(CurriculaCourseDemands.class); private Hashtable<Long, Set<WeightedStudentId>> iDemands = new Hashtable<Long, Set<WeightedStudentId>>(); private Hashtable<Long, Set<WeightedCourseOffering>> iStudentRequests = new Hashtable<Long, Set<WeightedCourseOffering>>(); private Hashtable<Long, Hashtable<Long, Double>> iEnrollmentPriorities = new Hashtable<Long, Hashtable<Long, Double>>(); private IdGenerator lastStudentId = new IdGenerator(); protected ProjectedStudentCourseDemands iFallback; private Hashtable<Long, Hashtable<String, Set<String>>> iLoadedCurricula = new Hashtable<Long,Hashtable<String, Set<String>>>(); private HashSet<Long> iCheckedCourses = new HashSet<Long>(); private boolean iIncludeOtherStudents = true; private boolean iSetStudentCourseLimits = false; private CurriculumEnrollmentPriorityProvider iEnrollmentPriorityProvider = null; private DataProperties iProperties = null; public CurriculaCourseDemands(DataProperties properties) { iProperties = properties; if (properties != null) iFallback = new ProjectedStudentCourseDemands(properties); iIncludeOtherStudents = properties.getPropertyBoolean("CurriculaCourseDemands.IncludeOtherStudents", iIncludeOtherStudents); iSetStudentCourseLimits = properties.getPropertyBoolean("CurriculaCourseDemands.SetStudentCourseLimits", iSetStudentCourseLimits); iEnrollmentPriorityProvider = new DefaultCurriculumEnrollmentPriorityProvider(properties); if (properties.getProperty("CurriculaCourseDemands.CurriculumEnrollmentPriorityProvider") != null) { try { iEnrollmentPriorityProvider = (CurriculumEnrollmentPriorityProvider)Class.forName( properties.getProperty("CurriculaCourseDemands.CurriculumEnrollmentPriorityProvider")) .getConstructor(DataProperties.class).newInstance(properties); } catch (Exception e) { sLog.error("Failed to use custom enrollment priority provider: " + e.getMessage(), e); } } } public CurriculaCourseDemands() { this(null); } public boolean isMakingUpStudents() { return true; } public boolean canUseStudentClassEnrollmentsAsSolution() { return false; } public boolean isWeightStudentsToFillUpOffering() { return false; } public void init(org.hibernate.Session hibSession, Progress progress, Session session, Collection<InstructionalOffering> offerings) { iFallback.init(hibSession, progress, session, offerings); List<Curriculum> curricula = null; if (offerings != null && offerings.size() <= 1000) { String courses = ""; int nrCourses = 0; for (InstructionalOffering offering: offerings) for (CourseOffering course: offering.getCourseOfferings()) { if (!courses.isEmpty()) courses += ","; courses += course.getUniqueId(); nrCourses++; } if (nrCourses > 0 && nrCourses <= 1000) { curricula = hibSession.createQuery( "select distinct c from CurriculumCourse cc inner join cc.classification.curriculum c where " + "c.academicArea.session.uniqueId = :sessionId and cc.course.uniqueId in (" + courses + ")") .setLong("sessionId", session.getUniqueId()).list(); } } if (curricula == null) { curricula = hibSession.createQuery( "select c from Curriculum c where c.academicArea.session.uniqueId = :sessionId") .setLong("sessionId", session.getUniqueId()).list(); } List<Initialization> inits = new ArrayList<Initialization>(); for (Curriculum curriculum: curricula) { for (CurriculumClassification clasf: curriculum.getClassifications()) { if (clasf.getNrStudents() > 0) inits.add(new Initialization(clasf)); } } new ParallelInitialization("Loading curricula", iProperties.getPropertyInt("CurriculaCourseDemands.NrThreads", 1), inits).execute(hibSession, progress); if (iDemands.isEmpty()) { progress.warn("There are no curricula, using projected course demands instead."); } } protected String getCacheName() { return "curriculum-demands"; } protected void computeTargetShare(CurriculumClassification clasf, CurModel model) { for (CurriculumCourse c1: clasf.getCourses()) { float x1 = c1.getPercShare() * clasf.getNrStudents(); Set<CurriculumCourse>[] group = new HashSet[] { new HashSet<CurriculumCourse>(), new HashSet<CurriculumCourse>()}; Queue<CurriculumCourse> queue = new LinkedList<CurriculumCourse>(); queue.add(c1); Set<CurriculumCourseGroup> done = new HashSet<CurriculumCourseGroup>(); while (!queue.isEmpty()) { CurriculumCourse c = queue.poll(); for (CurriculumCourseGroup g: c.getGroups()) if (done.add(g)) for (CurriculumCourse x: clasf.getCourses()) if (!x.equals(c) && !x.equals(c1) && x.getGroups().contains(g) && group[group[0].contains(c) ? 0 : g.getType()].add(x)) queue.add(x); } for (CurriculumCourse c2: clasf.getCourses()) { float x2 = c2.getPercShare() * clasf.getNrStudents(); if (c1.getUniqueId() >= c2.getUniqueId()) continue; float share = c1.getPercShare() * c2.getPercShare() * clasf.getNrStudents(); boolean opt = group[0].contains(c2); boolean req = !opt && group[1].contains(c2); model.setTargetShare(c1.getUniqueId(), c2.getUniqueId(), opt ? 0.0 : req ? Math.min(x1, x2) : share, true); } } } public Set<WeightedStudentId> getDemands(CourseOffering course) { if (iDemands.isEmpty()) return iFallback.getDemands(course); Set<WeightedStudentId> demands = iDemands.get(course.getUniqueId()); if (!iIncludeOtherStudents) return demands; if (demands == null) { demands = new HashSet<WeightedStudentId>(); iDemands.put(course.getUniqueId(), demands); } if (iCheckedCourses.add(course.getUniqueId())) { int was = demands.size(); Hashtable<String,Set<String>> curricula = iLoadedCurricula.get(course.getUniqueId()); Set<WeightedStudentId> other = iFallback.getDemands(course); if (curricula == null || curricula.isEmpty()) { demands.addAll(other); } else { for (WeightedStudentId student: other) { if (student.getArea() == null) continue; // ignore students w/o academic area Set<String> majors = curricula.get(student.getArea()); if (majors != null && majors.contains("")) continue; // all majors if (majors == null || (student.getMajor() != null && !majors.contains(student.getMajor()))) demands.add(student); } } if (demands.size() > was) sLog.info(course.getCourseName() + " has " + (demands.size() - was) + " other students (besides of the " + was + " curriculum students)."); } return demands; } public Set<WeightedCourseOffering> getCourses(Long studentId) { if (iIncludeOtherStudents && studentId >= 0) return iFallback.getCourses(studentId); return iStudentRequests.get(studentId); } @Override public Double getEnrollmentPriority(Long studentId, Long courseId) { Hashtable<Long, Double> priorities = iEnrollmentPriorities.get(studentId); return (priorities == null ? null : priorities.get(courseId)); } public class Initialization implements ParallelInitialization.Task { private CurriculumClassification iClassification; private boolean iUpdateClassification = false; private CurModel iModel; private Hashtable<Long, CourseOffering> iCourses; private Assignment<CurVariable, CurValue> iAssignment; public Initialization(CurriculumClassification classification) { iClassification = classification; } @Override public void setup(org.hibernate.Session hibSession) { sLog.debug("Processing " + iClassification.getCurriculum().getAbbv() + " " + iClassification.getName() + " ... (" + iClassification.getNrStudents() + " students, " + iClassification.getCourses().size() + " courses)"); // Create model List<CurStudent> students = new ArrayList<CurStudent>(); for (long i = 0; i < iClassification.getNrStudents(); i++) students.add(new CurStudent(- (1 + i), 1f)); iModel = new CurModel(students); iCourses = new Hashtable<Long, CourseOffering>(); for (CurriculumCourse course: iClassification.getCourses()) { iModel.addCourse(course.getUniqueId(), course.getCourse().getCourseName(), course.getPercShare() * iClassification.getNrStudents(), iEnrollmentPriorityProvider.getEnrollmentPriority(course)); iCourses.put(course.getUniqueId(), course.getCourse()); Hashtable<String,Set<String>> curricula = iLoadedCurricula.get(course.getCourse().getUniqueId()); if (curricula == null) { curricula = new Hashtable<String, Set<String>>(); iLoadedCurricula.put(course.getCourse().getUniqueId(), curricula); } Set<String> majors = curricula.get(iClassification.getCurriculum().getAcademicArea().getAcademicAreaAbbreviation()); if (majors == null) { majors = new HashSet<String>(); curricula.put(iClassification.getCurriculum().getAcademicArea().getAcademicAreaAbbreviation(), majors); } if (iClassification.getCurriculum().getMajors().isEmpty()) { majors.add(""); } else { for (PosMajor mj: iClassification.getCurriculum().getMajors()) majors.add(mj.getCode()); } } computeTargetShare(iClassification, iModel); if (iSetStudentCourseLimits) iModel.setStudentLimits(); // Load model from cache (if exists) Solution<CurVariable, CurValue> cachedSolution = null; iAssignment = new DefaultSingleAssignment<CurVariable, CurValue>(); Element cache = (iClassification.getStudents() == null ? null : iClassification.getStudents().getRootElement()); if (cache != null && cache.getName().equals(getCacheName())) { cachedSolution = CurModel.loadFromXml(cache); if (iSetStudentCourseLimits) ((CurModel)cachedSolution.getModel()).setStudentLimits(); } // Check the cached model if (cachedSolution != null && ((CurModel)cachedSolution.getModel()).isSameModel(iModel)) { // Reuse sLog.debug(" using cached model..."); iModel = ((CurModel)cachedSolution.getModel()); iAssignment = cachedSolution.getAssignment(); } else { iUpdateClassification = true; } } @Override public void execute() { if (iUpdateClassification) { // Solve model iModel.solve(iProperties, iAssignment); } } @Override public void teardown(org.hibernate.Session hibSession) { if (iUpdateClassification) { // Save into the cache Document doc = DocumentHelper.createDocument(); iModel.saveAsXml(doc.addElement(getCacheName()), iAssignment); // sLog.debug("Model:\n" + doc.asXML()); iClassification.setStudents(doc); hibSession.update(iClassification); } // Save results String majors = ""; for (PosMajor major: iClassification.getCurriculum().getMajors()) { if (!majors.isEmpty()) majors += "|"; majors += major.getCode(); } for (CurStudent s: iModel.getStudents()) { WeightedStudentId student = new WeightedStudentId(- lastStudentId.newId()); student.setStats(iClassification.getCurriculum().getAcademicArea().getAcademicAreaAbbreviation(), iClassification.getAcademicClassification().getCode(), majors); student.setCurriculum(iClassification.getCurriculum().getAbbv()); Set<WeightedCourseOffering> studentCourses = new HashSet<WeightedCourseOffering>(); iStudentRequests.put(student.getStudentId(), studentCourses); Hashtable<Long, Double> priorities = new Hashtable<Long, Double>(); iEnrollmentPriorities.put(student.getStudentId(), priorities); for (CurCourse course: s.getCourses(iAssignment)) { CourseOffering co = iCourses.get(course.getCourseId()); if (course.getPriority() != null) priorities.put(co.getUniqueId(), course.getPriority()); Set<WeightedStudentId> courseStudents = iDemands.get(co.getUniqueId()); if (courseStudents == null) { courseStudents = new HashSet<WeightedStudentId>(); iDemands.put(co.getUniqueId(), courseStudents); } courseStudents.add(student); studentCourses.add(new WeightedCourseOffering(co, student.getWeight())); } } } } }
/* * Java OTR library * Copyright (C) 2008-2009 Ian Goldberg, Muhaimeen Ashraf, Andrew Chung, * Can Tang * * This library is free software; you can redistribute it and/or * modify it under the terms of version 2.1 of the GNU Lesser General * Public License as published by the Free Software Foundation. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ /* Ported to otr4j by devrandom */ package net.java.otr4j.crypto; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.math.BigInteger; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import net.java.otr4j.io.OtrInputStream; import net.java.otr4j.io.OtrOutputStream; import net.java.otr4j.io.SerializationUtils; public class SM { static public class SMState{ BigInteger secret, x2, x3, g1, g2, g3, g3o, p, q, pab, qab; public int nextExpected; int receivedQuestion; public int smProgState; public boolean approved; public boolean asked; /** * Ctor. */ public SMState(){ g1 = new BigInteger(1, SM.GENERATOR_S); smProgState = SM.PROG_OK; approved = false; asked = false; } } static public class SMException extends Exception { private static final long serialVersionUID = 1L; public SMException() { super(""); } public SMException(Throwable cause) { super(cause); } public SMException(String message) { super(message); } }; public static final int EXPECT1 = 0; public static final int EXPECT2 = 1; public static final int EXPECT3 = 2; public static final int EXPECT4 = 3; public static final int EXPECT5 = 4; public static final int PROG_OK = 0; public static final int PROG_CHEATED = -2; public static final int PROG_FAILED = -1; public static final int PROG_SUCCEEDED = 1; public static final int MSG1_LEN = 6; public static final int MSG2_LEN = 11; public static final int MSG3_LEN = 8; public static final int MSG4_LEN = 3; public static final BigInteger MODULUS_S = new BigInteger( "FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1"+ "29024E088A67CC74020BBEA63B139B22514A08798E3404DD"+ "EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245"+ "E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED"+ "EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D"+ "C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F"+ "83655D23DCA3AD961C62F356208552BB9ED529077096966D"+ "670C354E4ABC9804F1746C08CA237327FFFFFFFFFFFFFFFF", 16); public static final BigInteger MODULUS_MINUS_2 = new BigInteger( "FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1"+ "29024E088A67CC74020BBEA63B139B22514A08798E3404DD"+ "EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245"+ "E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED"+ "EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D"+ "C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F"+ "83655D23DCA3AD961C62F356208552BB9ED529077096966D"+ "670C354E4ABC9804F1746C08CA237327FFFFFFFFFFFFFFFD", 16); public static final BigInteger ORDER_S = new BigInteger( "7FFFFFFFFFFFFFFFE487ED5110B4611A62633145C06E0E68"+ "948127044533E63A0105DF531D89CD9128A5043CC71A026E"+ "F7CA8CD9E69D218D98158536F92F8A1BA7F09AB6B6A8E122"+ "F242DABB312F3F637A262174D31BF6B585FFAE5B7A035BF6"+ "F71C35FDAD44CFD2D74F9208BE258FF324943328F6722D9E"+ "E1003E5C50B1DF82CC6D241B0E2AE9CD348B1FD47E9267AF"+ "C1B2AE91EE51D6CB0E3179AB1042A95DCF6A9483B84B4B36"+ "B3861AA7255E4C0278BA36046511B993FFFFFFFFFFFFFFFF", 16); public static final byte[] GENERATOR_S = Util.hexStringToBytes("02"); public static final int MOD_LEN_BITS = 1536; public static final int MOD_LEN_BYTES = 192; /** * Generate a random exponent * * @return the generated random exponent. */ public static BigInteger randomExponent() { SecureRandom sr = new SecureRandom(); byte[] sb = new byte[MOD_LEN_BYTES]; sr.nextBytes(sb); return new BigInteger(1, sb); } /** * Hash one or two BigIntegers. To hash only one BigInteger, b may be set to * NULL. * * @param version the prefix to use * @param a The 1st BigInteger to hash. * @param b The 2nd BigInteger to hash. * @return the BigInteger for the resulting hash value. * @throws net.java.otr4j.crypto.SM.SMException when the SHA-256 algorithm * is missing or when the biginteger can't be serialized. */ public static BigInteger hash(int version, BigInteger a, BigInteger b) throws SMException { try { MessageDigest sha256 = MessageDigest.getInstance("SHA-256"); sha256.update((byte)version); sha256.update(SerializationUtils.writeMpi(a)); if (b != null) sha256.update(SerializationUtils.writeMpi(b)); return new BigInteger(1, sha256.digest()); } catch (NoSuchAlgorithmException e) { throw new SMException("cannot find SHA-256"); } catch (IOException e) { throw new SMException("cannot serialize bigint"); } } public static byte[] serialize(BigInteger[] ints) throws SMException { try { ByteArrayOutputStream out = new ByteArrayOutputStream(); OtrOutputStream oos = new OtrOutputStream(out); oos.writeInt(ints.length); for (BigInteger i : ints) { oos.writeBigInt(i); } byte[] b = out.toByteArray(); oos.close(); return b; } catch (IOException ex) { throw new SMException("cannot serialize bigints"); } } public static BigInteger[] unserialize(byte[] bytes) throws SMException { try { ByteArrayInputStream in = new ByteArrayInputStream(bytes); OtrInputStream ois = new OtrInputStream(in); int len = ois.readInt(); if (len > 100) throw new SMException("Too many ints"); BigInteger[] ints = new BigInteger[len]; for (int i = 0 ; i < len ; i++) { ints[i] = ois.readBigInt(); } ois.close(); return ints; } catch (IOException ex) { throw new SMException("cannot unserialize bigints"); } } /** * Check that an BigInteger is in the right range to be a (non-unit) group * element. * * @param g the BigInteger to check. * @return true if the BigInteger is in the right range, false otherwise. */ public static boolean checkGroupElem(BigInteger g) { return !(g.compareTo(BigInteger.valueOf(2)) > 0 && g.compareTo(SM.MODULUS_MINUS_2) < 0); } /** * Check that an BigInteger is in the right range to be a (non-zero) * exponent. * * @param x The BigInteger to check. * @return true if the BigInteger is in the right range, false otherwise. */ public static boolean checkExpon(BigInteger x) { return !(x.compareTo(BigInteger.ONE) > 0 && x.compareTo(SM.ORDER_S) <= 0); } /** * Proof of knowledge of a discrete logarithm. * * @param g the group generator * @param x the secret information * @param version the prefix to use for the hashing function * @return c and d. * @throws SMException when c and d could not be calculated */ public static BigInteger[] proofKnowLog(BigInteger g, BigInteger x, int version) throws SMException { BigInteger r = randomExponent(); BigInteger temp = g.modPow(r, SM.MODULUS_S); BigInteger c = hash(version, temp, null); temp = x.multiply(c).mod(ORDER_S); BigInteger d = r.subtract(temp).mod(ORDER_S); BigInteger[] ret = new BigInteger[2]; ret[0]=c; ret[1]=d; return ret; } /** * Verify a proof of knowledge of a discrete logarithm. Checks that c = h(g^d x^c) * * @param c c from remote party * @param d d from remote party * @param g the group generator * @param x our secret information * @param version the prefix to use * @return -1, 0 or 1 as our locally calculated value of c is numerically * less than, equal to, or greater than {@code c}. * @throws SMException when something goes wrong */ public static int checkKnowLog(BigInteger c, BigInteger d, BigInteger g, BigInteger x, int version) throws SMException { BigInteger gd = g.modPow(d, MODULUS_S); BigInteger xc = x.modPow(c, MODULUS_S); BigInteger gdxc = gd.multiply(xc).mod(MODULUS_S); BigInteger hgdxc = hash(version, gdxc, null); return hgdxc.compareTo(c); } /** * Proof of knowledge of coordinates with first components being equal * * @param state MVN_PASS_JAVADOC_INSPECTION * @param r MVN_PASS_JAVADOC_INSPECTION * @param version MVN_PASS_JAVADOC_INSPECTION * @return MVN_PASS_JAVADOC_INSPECTION * @throws SMException MVN_PASS_JAVADOC_INSPECTION */ public static BigInteger[] proofEqualCoords(SMState state, BigInteger r, int version) throws SMException { BigInteger r1 = randomExponent(); BigInteger r2 = randomExponent(); /* Compute the value of c, as c = h(g3^r1, g1^r1 g2^r2) */ BigInteger temp1 = state.g1.modPow(r1, MODULUS_S); BigInteger temp2 = state.g2.modPow(r2, MODULUS_S); temp2 = temp1.multiply(temp2).mod(MODULUS_S); temp1 = state.g3.modPow(r1, MODULUS_S); BigInteger c = hash(version, temp1, temp2); /* Compute the d values, as d1 = r1 - r c, d2 = r2 - secret c */ temp1 = r.multiply(c).mod(ORDER_S); BigInteger d1 = r1.subtract(temp1).mod(ORDER_S); temp1 = state.secret.multiply(c).mod(ORDER_S); BigInteger d2 = r2.subtract(temp1).mod(ORDER_S); BigInteger[] ret = new BigInteger[3]; ret[0]=c; ret[1]=d1; ret[2]=d2; return ret; } /** * Verify a proof of knowledge of coordinates with first components being equal * @param c MVN_PASS_JAVADOC_INSPECTION * @param d1 MVN_PASS_JAVADOC_INSPECTION * @param d2 MVN_PASS_JAVADOC_INSPECTION * @param p MVN_PASS_JAVADOC_INSPECTION * @param q MVN_PASS_JAVADOC_INSPECTION * @param state MVN_PASS_JAVADOC_INSPECTION * @param version MVN_PASS_JAVADOC_INSPECTION * @return MVN_PASS_JAVADOC_INSPECTION * @throws SMException MVN_PASS_JAVADOC_INSPECTION */ public static int checkEqualCoords(BigInteger c, BigInteger d1, BigInteger d2, BigInteger p, BigInteger q, SMState state, int version) throws SMException { /* To verify, we test that hash(g3^d1 * p^c, g1^d1 * g2^d2 * q^c) = c * If indeed c = hash(g3^r1, g1^r1 g2^r2), d1 = r1 - r*c, * d2 = r2 - secret*c. And if indeed p = g3^r, q = g1^r * g2^secret * Then we should have that: * hash(g3^d1 * p^c, g1^d1 * g2^d2 * q^c) * = hash(g3^(r1 - r*c + r*c), g1^(r1 - r*c + q*c) * * g2^(r2 - secret*c + secret*c)) * = hash(g3^r1, g1^r1 g2^r2) * = c */ BigInteger temp2 = state.g3.modPow(d1, MODULUS_S); BigInteger temp3 = p.modPow(c, MODULUS_S); BigInteger temp1 = temp2.multiply(temp3).mod(MODULUS_S); temp2 = state.g1.modPow(d1, MODULUS_S); temp3 = state.g2.modPow(d2, MODULUS_S); temp2 = temp2.multiply(temp3).mod(MODULUS_S); temp3 = q.modPow(c, MODULUS_S); temp2 = temp3.multiply(temp2).mod(MODULUS_S); BigInteger cprime=hash(version, temp1, temp2); return c.compareTo(cprime); } /** * Proof of knowledge of logs with exponents being equal * @param state MVN_PASS_JAVADOC_INSPECTION * @param version MVN_PASS_JAVADOC_INSPECTION * @return MVN_PASS_JAVADOC_INSPECTION * @throws SMException MVN_PASS_JAVADOC_INSPECTION */ public static BigInteger[] proofEqualLogs(SMState state, int version) throws SMException { BigInteger r = randomExponent(); /* Compute the value of c, as c = h(g1^r, (Qa/Qb)^r) */ BigInteger temp1 = state.g1.modPow(r, MODULUS_S); BigInteger temp2 = state.qab.modPow(r, MODULUS_S); BigInteger c = hash(version, temp1, temp2); /* Compute the d values, as d = r - x3 c */ temp1 = state.x3.multiply(c).mod(ORDER_S); BigInteger d = r.subtract(temp1).mod(ORDER_S); BigInteger[] ret = new BigInteger[2]; ret[0]=c; ret[1]=d; return ret; } /** * Verify a proof of knowledge of logs with exponents being equal * @param c MVN_PASS_JAVADOC_INSPECTION * @param d MVN_PASS_JAVADOC_INSPECTION * @param r MVN_PASS_JAVADOC_INSPECTION * @param state MVN_PASS_JAVADOC_INSPECTION * @param version MVN_PASS_JAVADOC_INSPECTION * @return MVN_PASS_JAVADOC_INSPECTION * @throws SMException MVN_PASS_JAVADOC_INSPECTION */ public static int checkEqualLogs(BigInteger c, BigInteger d, BigInteger r, SMState state, int version) throws SMException { /* Here, we recall the exponents used to create g3. * If we have previously seen g3o = g1^x where x is unknown * during the DH exchange to produce g3, then we may proceed with: * * To verify, we test that hash(g1^d * g3o^c, qab^d * r^c) = c * If indeed c = hash(g1^r1, qab^r1), d = r1- x * c * And if indeed r = qab^x * Then we should have that: * hash(g1^d * g3o^c, qab^d r^c) * = hash(g1^(r1 - x*c + x*c), qab^(r1 - x*c + x*c)) * = hash(g1^r1, qab^r1) * = c */ BigInteger temp2 = state.g1.modPow(d, MODULUS_S); BigInteger temp3 = state.g3o.modPow(c, MODULUS_S); BigInteger temp1 = temp2.multiply(temp3).mod(MODULUS_S); temp3 = state.qab.modPow(d, MODULUS_S); temp2 = r.modPow(c, MODULUS_S); temp2 = temp3.multiply(temp2).mod(MODULUS_S); BigInteger cprime = hash(version, temp1, temp2); return c.compareTo(cprime); } /** Create first message in SMP exchange. Input is Alice's secret value * which this protocol aims to compare to Bob's. The return value is a serialized * BigInteger array whose elements correspond to the following: * [0] = g2a, Alice's half of DH exchange to determine g2 * [1] = c2, [2] = d2, Alice's ZK proof of knowledge of g2a exponent * [3] = g3a, Alice's half of DH exchange to determine g3 * [4] = c3, [5] = d3, Alice's ZK proof of knowledge of g3a exponent * @param astate MVN_PASS_JAVADOC_INSPECTION * @param secret MVN_PASS_JAVADOC_INSPECTION * @return MVN_PASS_JAVADOC_INSPECTION * @throws SMException MVN_PASS_JAVADOC_INSPECTION */ public static byte[] step1(SMState astate, byte[] secret) throws SMException { /* Initialize the sm state or update the secret */ //Util.checkBytes("secret", secret); BigInteger secret_mpi = new BigInteger(1, secret); astate.secret = secret_mpi; astate.receivedQuestion = 0; astate.x2 = randomExponent(); astate.x3 = randomExponent(); BigInteger[] msg1 = new BigInteger[6]; msg1[0] = astate.g1.modPow(astate.x2, MODULUS_S); BigInteger[] res = proofKnowLog(astate.g1, astate.x2, 1); msg1[1]=res[0]; msg1[2]=res[1]; msg1[3] = astate.g1.modPow(astate.x3, MODULUS_S); res = proofKnowLog(astate.g1, astate.x3, 2); msg1[4]=res[0]; msg1[5]=res[1]; byte[] ret = serialize(msg1); astate.smProgState = PROG_OK; return ret; } /** Receive the first message in SMP exchange, which was generated by * step1. Input is saved until the user inputs their secret * information. No output. * @param bstate MVN_PASS_JAVADOC_INSPECTION * @param input MVN_PASS_JAVADOC_INSPECTION * @param received_question MVN_PASS_JAVADOC_INSPECTION * @throws SMException MVN_PASS_JAVADOC_INSPECTION */ public static void step2a(SMState bstate, byte[] input, int received_question) throws SMException { /* Initialize the sm state if needed */ bstate.receivedQuestion = received_question; bstate.smProgState = PROG_CHEATED; /* Read from input to find the mpis */ BigInteger[] msg1 = unserialize(input); if (checkGroupElem(msg1[0]) || checkExpon(msg1[2]) || checkGroupElem(msg1[3]) || checkExpon(msg1[5])) { throw new SMException("Invalid parameter"); } /* Store Alice's g3a value for later in the protocol */ bstate.g3o=msg1[3]; /* Verify Alice's proofs */ if (checkKnowLog(msg1[1], msg1[2], bstate.g1, msg1[0], 1)!=0 ||checkKnowLog(msg1[4], msg1[5], bstate.g1, msg1[3], 2)!=0) { throw new SMException("Proof checking failed"); } /* Create Bob's half of the generators g2 and g3 */ bstate.x2 = randomExponent(); bstate.x3 = randomExponent(); /* Combine the two halves from Bob and Alice and determine g2 and g3 */ bstate.g2= msg1[0].modPow(bstate.x2, MODULUS_S); //Util.checkBytes("g2b", bstate.g2.getValue()); bstate.g3= msg1[3].modPow(bstate.x3, MODULUS_S); //Util.checkBytes("g3b", bstate.g3.getValue()); bstate.smProgState = PROG_OK; } /** Create second message in SMP exchange. Input is Bob's secret value. * Information from earlier steps in the exchange is taken from Bob's * state. Output is a serialized mpi array whose elements correspond * to the following: * [0] = g2b, Bob's half of DH exchange to determine g2 * [1] = c2, [2] = d2, Bob's ZK proof of knowledge of g2b exponent * [3] = g3b, Bob's half of DH exchange to determine g3 * [4] = c3, [5] = d3, Bob's ZK proof of knowledge of g3b exponent * [6] = pb, [7] = qb, Bob's halves of the (Pa/Pb) and (Qa/Qb) values * [8] = cp, [9] = d5, [10] = d6, Bob's ZK proof that pb, qb formed correctly * @param bstate MVN_PASS_JAVADOC_INSPECTION * @param secret MVN_PASS_JAVADOC_INSPECTION * @return MVN_PASS_JAVADOC_INSPECTION * @throws SMException MVN_PASS_JAVADOC_INSPECTION */ public static byte[] step2b(SMState bstate, byte[] secret) throws SMException { /* Convert the given secret to the proper form and store it */ //Util.checkBytes("secret", secret); BigInteger secret_mpi = new BigInteger(1, secret); bstate.secret = secret_mpi; BigInteger[] msg2 = new BigInteger[11]; msg2[0] = bstate.g1.modPow(bstate.x2, MODULUS_S); BigInteger[] res = proofKnowLog(bstate.g1,bstate.x2,3); msg2[1]=res[0]; msg2[2]=res[1]; msg2[3] = bstate.g1.modPow(bstate.x3, MODULUS_S); res = proofKnowLog(bstate.g1,bstate.x3,4); msg2[4]=res[0]; msg2[5]=res[1]; /* Calculate P and Q values for Bob */ BigInteger r = randomExponent(); //BigInteger r = new BigInteger(SM.GENERATOR_S); bstate.p = bstate.g3.modPow(r, MODULUS_S); //Util.checkBytes("Pb", bstate.p.getValue()); msg2[6]=bstate.p; BigInteger qb1 = bstate.g1.modPow(r, MODULUS_S); //Util.checkBytes("Qb1", qb1.getValue()); BigInteger qb2 = bstate.g2.modPow(bstate.secret, MODULUS_S); //Util.checkBytes("Qb2", qb2.getValue()); //Util.checkBytes("g2", bstate.g2.getValue()); //Util.checkBytes("secret", bstate.secret.getValue()); bstate.q = qb1.multiply(qb2).mod(MODULUS_S); //Util.checkBytes("Qb", bstate.q.getValue()); msg2[7] = bstate.q; res = proofEqualCoords(bstate, r, 5); msg2[8]=res[0]; msg2[9]=res[1]; msg2[10]=res[2]; /* Convert to serialized form */ return serialize(msg2); } /** Create third message in SMP exchange. Input is a message generated * by otrl_sm_step2b. Output is a serialized mpi array whose elements * correspond to the following: * [0] = pa, [1] = qa, Alice's halves of the (Pa/Pb) and (Qa/Qb) values * [2] = cp, [3] = d5, [4] = d6, Alice's ZK proof that pa, qa formed correctly * [5] = ra, calculated as (Qa/Qb)^x3 where x3 is the exponent used in g3a * [6] = cr, [7] = d7, Alice's ZK proof that ra is formed correctly * @param astate MVN_PASS_JAVADOC_INSPECTION * @param input MVN_PASS_JAVADOC_INSPECTION * @return MVN_PASS_JAVADOC_INSPECTION * @throws SMException MVN_PASS_JAVADOC_INSPECTION */ public static byte[] step3(SMState astate, byte[] input) throws SMException { /* Read from input to find the mpis */ astate.smProgState = PROG_CHEATED; BigInteger[] msg2 = unserialize(input); if (checkGroupElem(msg2[0]) || checkGroupElem(msg2[3]) || checkGroupElem(msg2[6]) || checkGroupElem(msg2[7]) || checkExpon(msg2[2]) || checkExpon(msg2[5]) || checkExpon(msg2[9]) || checkExpon(msg2[10])) { throw new SMException("Invalid Parameter"); } BigInteger[] msg3 = new BigInteger[8]; /* Store Bob's g3a value for later in the protocol */ astate.g3o = msg2[3]; /* Verify Bob's knowledge of discreet log proofs */ if (checkKnowLog(msg2[1], msg2[2], astate.g1, msg2[0], 3)!=0 || checkKnowLog(msg2[4], msg2[5], astate.g1, msg2[3], 4)!=0) { throw new SMException("Proof checking failed"); } /* Combine the two halves from Bob and Alice and determine g2 and g3 */ astate.g2 = msg2[0].modPow(astate.x2, MODULUS_S); //Util.checkBytes("g2a", astate.g2.getValue()); astate.g3 = msg2[3].modPow(astate.x3, MODULUS_S); //Util.checkBytes("g3a", astate.g3.getValue()); /* Verify Bob's coordinate equality proof */ if (checkEqualCoords(msg2[8], msg2[9], msg2[10], msg2[6], msg2[7], astate, 5)!=0) throw new SMException("Invalid Parameter"); /* Calculate P and Q values for Alice */ BigInteger r = randomExponent(); //BigInteger r = new BigInteger(SM.GENERATOR_S); astate.p = astate.g3.modPow(r, MODULUS_S); //Util.checkBytes("Pa", astate.p.getValue()); msg3[0]=astate.p; BigInteger qa1 = astate.g1.modPow(r, MODULUS_S); //Util.checkBytes("Qa1", qa1.getValue()); BigInteger qa2 = astate.g2.modPow(astate.secret, MODULUS_S); //Util.checkBytes("Qa2", qa2.getValue()); //Util.checkBytes("g2", astate.g2.getValue()); //Util.checkBytes("secret", astate.secret.getValue()); astate.q = qa1.multiply(qa2).mod(MODULUS_S); msg3[1] = astate.q; //Util.checkBytes("Qa", astate.q.getValue()); BigInteger[] res = proofEqualCoords(astate,r,6); msg3[2] = res[0]; msg3[3] = res[1]; msg3[4] = res[2]; /* Calculate Ra and proof */ BigInteger inv = msg2[6].modInverse(MODULUS_S); astate.pab = astate.p.multiply(inv).mod(MODULUS_S); inv = msg2[7].modInverse(MODULUS_S); astate.qab = astate.q.multiply(inv).mod(MODULUS_S); msg3[5] = astate.qab.modPow(astate.x3, MODULUS_S); res = proofEqualLogs(astate, 7); msg3[6]=res[0]; msg3[7]=res[1]; byte[] output = serialize(msg3); astate.smProgState = PROG_OK; return output; } /** Create final message in SMP exchange. Input is a message generated * by otrl_sm_step3. Output is a serialized mpi array whose elements * correspond to the following: * [0] = rb, calculated as (Qa/Qb)^x3 where x3 is the exponent used in g3b * [1] = cr, [2] = d7, Bob's ZK proof that rb is formed correctly * This method also checks if Alice and Bob's secrets were the same. If * so, it returns NO_ERROR. If the secrets differ, an INV_VALUE error is * returned instead. * * @param bstate MVN_PASS_JAVADOC_INSPECTION * @param input MVN_PASS_JAVADOC_INSPECTION * @return MVN_PASS_JAVADOC_INSPECTION * @throws SMException MVN_PASS_JAVADOC_INSPECTION */ public static byte[] step4(SMState bstate, byte[] input) throws SMException { /* Read from input to find the mpis */ BigInteger[] msg3 = unserialize(input); bstate.smProgState = PROG_CHEATED; BigInteger[] msg4 = new BigInteger[3]; if (checkGroupElem(msg3[0]) || checkGroupElem(msg3[1]) || checkGroupElem(msg3[5]) || checkExpon(msg3[3]) || checkExpon(msg3[4]) || checkExpon(msg3[7])) { throw new SMException("Invalid Parameter"); } /* Verify Alice's coordinate equality proof */ if (checkEqualCoords(msg3[2], msg3[3], msg3[4], msg3[0], msg3[1], bstate, 6)!=0) throw new SMException("Invalid Parameter"); /* Find Pa/Pb and Qa/Qb */ BigInteger inv = bstate.p.modInverse(MODULUS_S); bstate.pab = msg3[0].multiply(inv).mod(MODULUS_S); inv = bstate.q.modInverse(MODULUS_S); bstate.qab = msg3[1].multiply(inv).mod(MODULUS_S); /* Verify Alice's log equality proof */ if (checkEqualLogs(msg3[6], msg3[7], msg3[5], bstate, 7)!=0){ throw new SMException("Proof checking failed"); } /* Calculate Rb and proof */ msg4[0] = bstate.qab.modPow(bstate.x3, MODULUS_S); BigInteger[] res = proofEqualLogs(bstate,8); msg4[1]=res[0]; msg4[2]=res[1]; byte[] output = serialize(msg4); /* Calculate Rab and verify that secrets match */ BigInteger rab = msg3[5].modPow(bstate.x3, MODULUS_S); //Util.checkBytes("rab", rab.getValue()); //Util.checkBytes("pab", bstate.pab.getValue()); int comp = rab.compareTo(bstate.pab); bstate.smProgState = (comp!=0) ? PROG_FAILED : PROG_SUCCEEDED; return output; } /** Receives the final SMP message, which was generated in otrl_sm_step. * This method checks if Alice and Bob's secrets were the same. If * so, it returns NO_ERROR. If the secrets differ, an INV_VALUE error is * returned instead. * @param astate MVN_PASS_JAVADOC_INSPECTION * @param input MVN_PASS_JAVADOC_INSPECTION * @throws SMException MVN_PASS_JAVADOC_INSPECTION */ public static void step5(SMState astate, byte[] input) throws SMException { /* Read from input to find the mpis */ BigInteger[] msg4 = unserialize(input); astate.smProgState = PROG_CHEATED; if (checkGroupElem(msg4[0])|| checkExpon(msg4[2])) { throw new SMException("Invalid Parameter"); } /* Verify Bob's log equality proof */ if (checkEqualLogs(msg4[1], msg4[2], msg4[0], astate, 8)!=0) throw new SMException("Invalid Parameter"); /* Calculate Rab and verify that secrets match */ BigInteger rab = msg4[0].modPow(astate.x3, MODULUS_S); //Util.checkBytes("rab", rab.getValue()); //Util.checkBytes("pab", astate.pab.getValue()); int comp = rab.compareTo(astate.pab); if (comp!=0){ //System.out.println("checking failed"); } astate.smProgState = (comp!=0) ? PROG_FAILED : PROG_SUCCEEDED; return; } // *************************************************** // Session stuff - perhaps factor out public static void main(String[] args) throws SMException { BigInteger res = SM.MODULUS_MINUS_2.subtract(SM.MODULUS_S).mod(SM.MODULUS_S); String ss = Util.bytesToHexString(res.toByteArray()); System.out.println(ss); byte[] secret1 = "abcdef".getBytes(); SMState a = new SMState(); SMState b = new SMState(); byte[] msg1 = SM.step1(a, secret1); SM.step2a(b, msg1, 123); byte[] msg2 = SM.step2b(b, secret1); byte[] msg3 = SM.step3(a, msg2); byte[] msg4 = SM.step4(b, msg3); SM.step5(a, msg4); } }
package vajdaz; import java.awt.AWTException; import java.awt.Image; import java.awt.MenuItem; import java.awt.PopupMenu; import java.awt.SystemTray; import java.awt.Toolkit; import java.awt.TrayIcon; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import vajdaz.util.HelperFunctions; import vajdaz.util.PersistentProperties; public class WorkTimeMonitor { private static final String CONFIG_FILENAME = "vajdaz.WorktimeMonitor.config"; private static final Image imge_red; private static final Image image_red_yellow; private static final Image image_green; static { try { imge_red = javax.imageio.ImageIO .read(WorkTimeMonitor.class.getClassLoader().getResource("traffic_light_red.png")); image_red_yellow = javax.imageio.ImageIO .read(WorkTimeMonitor.class.getClassLoader().getResource("traffic_light_red_yellow.png")); image_green = javax.imageio.ImageIO .read(WorkTimeMonitor.class.getClassLoader().getResource("traffic_light_green.png")); } catch (Exception e) { e.printStackTrace(); throw new RuntimeException("Error: could not load icon images."); } } private TrayIcon trayIcon = null; private PersistentProperties props = new WorkTimeMonitorProperties(CONFIG_FILENAME); private ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor(); public WorkTimeMonitor() { if (!SystemTray.isSupported()) { throw new RuntimeException("Error: Systray not supported."); } try { trayIcon = new TrayIcon(imge_red, "", createMenu()); trayIcon.setImageAutoSize(true); SystemTray.getSystemTray().add(trayIcon); } catch (AWTException e) { e.printStackTrace(); throw new RuntimeException("Error: could not initialize tray icon."); } } private PopupMenu createMenu() { // Root popup menu when right clicking the icon PopupMenu popup = new PopupMenu(); // Menu item for closing the application MenuItem menuItem = new MenuItem("Beenden"); menuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { try { SystemTray.getSystemTray().remove(trayIcon); executor.shutdown(); executor.awaitTermination(60, TimeUnit.SECONDS); } catch (InterruptedException e) { e.printStackTrace(); } System.exit(0); } }); popup.add(menuItem); // Menu item for reloading configuration data menuItem = new MenuItem("Einstellungen neu laden"); menuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { props.refresh(); WorkTimeMonitor.this.updateTrayIcon(); } }); popup.add(menuItem); // Menu item for opening configuration file menuItem = new MenuItem("Konfigurationsdatei \u00F6ffnen"); menuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { try { Process proc = Runtime.getRuntime() .exec(props.getProperty(WorkTimeMonitorProperties.EDITOR) + " " + CONFIG_FILENAME); proc.waitFor(); if (proc.exitValue() == 0) { props.refresh(); WorkTimeMonitor.this.updateTrayIcon(); } } catch (Exception e) { e.printStackTrace(); } } }); popup.add(menuItem); // Menu item for resetting the break time menuItem = new MenuItem("Pausenzeit zur\u00FCcksetzen"); menuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { WorkTimeMonitor.this.resetBreakTime(); WorkTimeMonitor.this.updateTrayIcon(); } }); popup.add(menuItem); return popup; } private boolean didThisRanToday() { SimpleDateFormat sdf = new SimpleDateFormat(); String sLastRun = props.getProperty("startTime", ""); if (sLastRun.length() == 0) { return false; } Calendar calLastRun = Calendar.getInstance(); try { calLastRun.setTime(sdf.parse(sLastRun)); } catch (java.text.ParseException e) { System.out.println("Warning: Parse error in Properties."); return false; } Calendar calNow = Calendar.getInstance(); return (calNow.get(Calendar.YEAR) == calLastRun.get(Calendar.YEAR)) && (calNow.get(Calendar.MONTH) == calLastRun.get(Calendar.MONTH)) && (calNow.get(Calendar.DAY_OF_MONTH) == calLastRun.get(Calendar.DAY_OF_MONTH)); } private void startup() { if (!didThisRanToday()) { resetStartTime(); resetBreakTime(); } executor.scheduleAtFixedRate(new Runnable() { public void run() { WorkTimeMonitor.this.updateTrayIcon(); } }, 0, 60000, TimeUnit.MILLISECONDS); Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { try { executor.shutdown(); executor.awaitTermination(60, TimeUnit.SECONDS); } catch (InterruptedException e) { e.printStackTrace(); } } }); } private long resetStartTime() { Date now = new Date(); props.setPropertyPersistent("startTime", new SimpleDateFormat().format(now)); return now.getTime(); } private long resetBreakTime() { Date now = new Date(); Calendar cal = Calendar.getInstance(); cal.setTime(now); cal.add(Calendar.HOUR_OF_DAY, 1); props.setPropertyPersistent("breakTime", new SimpleDateFormat().format(cal.getTime())); return cal.getTimeInMillis(); } private boolean moveBreakTime() { Date now = new Date(); Date breakTime = null; try { breakTime = new SimpleDateFormat().parse(props.getProperty("breakTime")); } catch (Exception e) { resetBreakTime(); return false; } if (now.getTime() > breakTime.getTime()) { Calendar cal = Calendar.getInstance(); cal.setTime(now); cal.add(Calendar.HOUR_OF_DAY, 1); props.setPropertyPersistent("breakTime", new SimpleDateFormat().format(cal.getTime())); return true; } return false; } private void updateTrayIcon() { Date startTime = null; try { startTime = new SimpleDateFormat().parse(props.getProperty("startTime")); } catch (java.text.ParseException e) { throw new RuntimeException(e); } Date now = new Date(); long grossWorkitmeDone = (now.getTime() - startTime.getTime()) / 1000; long lunchime = (long) (3600.0 * Double.parseDouble(props.getProperty("lunchTimeSpan", "1"))); long netWorktimeDone = grossWorkitmeDone - lunchime; long netWorktime = (long) (3600.0 * Double.parseDouble(props.getProperty("worktimeHours", "8"))); if ((netWorktimeDone <= netWorktime - 3600) && (trayIcon.getImage() != imge_red)) { trayIcon.setImage(imge_red); } else if ((netWorktimeDone > netWorktime - 3600) && (netWorktimeDone <= netWorktime) && (trayIcon.getImage() != image_red_yellow)) { trayIcon.setImage(image_red_yellow); trayIcon.displayMessage("Worktime Monitor", "Noch eine Stunde", TrayIcon.MessageType.INFO); Toolkit.getDefaultToolkit().beep(); } else if ((netWorktimeDone > netWorktime) && (trayIcon.getImage() != image_green)) { trayIcon.setImage(image_green); trayIcon.displayMessage("Worktime Monitor", "Go, go, go!", TrayIcon.MessageType.INFO); Toolkit.getDefaultToolkit().beep(); } boolean breakTimeNotification = false; if (moveBreakTime()) { breakTimeNotification = true; } long nextBreakInSeconds = 0; try { nextBreakInSeconds = new SimpleDateFormat().parse(props.getProperty("breakTime")).getTime() / 1000; } catch (Exception e) { nextBreakInSeconds = resetBreakTime() / 1000; } long[] split = HelperFunctions.splitTimeDiff(netWorktimeDone); String sTimeAll = String.format("Netto Arbeit: " + (split[0] < 0 ? "-" : "") + "%d:%02d", new Object[] { Long.valueOf(split[2]), Long.valueOf(split[3]) }); split = HelperFunctions.splitTimeDiff(netWorktime - netWorktimeDone); String sTimeRemaining = String.format((split[0] < 0 ? "\u00DCberzug " : "Restzeit: ") + "%d:%02d", new Object[] { Long.valueOf(split[2]), Long.valueOf(split[3]) }); split = HelperFunctions.splitTimeDiff(nextBreakInSeconds - now.getTime() / 1000); String sNextBreak = String.format(split[0] < 0 ? "N\u00E4chste Pause: -" : "N\u00E4chste Pause: %d:%02d", new Object[] { Long.valueOf(split[2]), Long.valueOf(split[3]) }); split = HelperFunctions.splitTimeDiff(lunchime); String sLunchTime = String.format("Mittagspause: %d:%02d", new Object[] { Long.valueOf(split[2]), Long.valueOf(split[3]) }); if (breakTimeNotification) { trayIcon.displayMessage("Worktime Monitor", "Pause!", TrayIcon.MessageType.INFO); Toolkit.getDefaultToolkit().beep(); } trayIcon.setToolTip(sTimeAll + "\n" + sTimeRemaining + "\n" + sNextBreak + "\n" + sLunchTime); } public static void main(String[] args) { WorkTimeMonitor gadget = new WorkTimeMonitor(); gadget.startup(); } }
package org.jenkinsci.plugins.workflow.steps; import static org.junit.Assert.assertTrue; import hudson.model.Result; import hudson.model.TaskListener; import hudson.model.Run; import java.io.File; import java.io.IOException; import java.io.Serializable; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import jenkins.model.Jenkins; import org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition; import org.jenkinsci.plugins.workflow.cps.nodes.StepNode; import org.jenkinsci.plugins.workflow.flow.FlowExecution; import org.jenkinsci.plugins.workflow.graph.FlowGraphWalker; import org.jenkinsci.plugins.workflow.graph.FlowNode; import org.jenkinsci.plugins.workflow.job.WorkflowJob; import org.jenkinsci.plugins.workflow.job.WorkflowRun; import org.jenkinsci.plugins.workflow.test.steps.SemaphoreStep; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.jvnet.hudson.test.BuildWatcher; import org.jvnet.hudson.test.JenkinsRule; import org.jvnet.hudson.test.TestExtension; import org.kohsuke.stapler.DataBoundConstructor; import com.google.inject.Inject; public class SynchronousNonBlockingStepTest { @Rule public JenkinsRule j = new JenkinsRule(); @ClassRule public static BuildWatcher buildWatcher = new BuildWatcher(); @Test public void basicNonBlockingStep() throws Exception { WorkflowJob p = j.jenkins.createProject(WorkflowJob.class, "p"); p.setDefinition(new CpsFlowDefinition("node {\n" + "echo 'First message'\n" + "syncnonblocking 'wait'\n" + "echo 'Second message'\n" + "}")); WorkflowRun b = p.scheduleBuild2(0).getStartCondition().get(); // Wait for syncnonblocking to be started System.out.println("Waiting to syncnonblocking to start..."); SynchronousNonBlockingStep.waitForStart("wait", b); // At this point the execution is paused inside the synchronous non-blocking step // Check for FlowNode created FlowGraphWalker walker = new FlowGraphWalker(b.getExecution()); boolean found = false; // TODO: use iterator when https://github.com/jenkinsci/workflow-plugin/pull/178 merged for (FlowNode n = walker.next(); n != null; n = walker.next()) { if (n instanceof StepNode && ((StepNode) n).getDescriptor() instanceof SynchronousNonBlockingStep.DescriptorImpl) { found = true; break; } } System.out.println("Checking flow node added..."); assertTrue("FlowNode has to be added just when the step starts running", found); // Check for message the test message sent to context listener System.out.println("Checking build log message present..."); j.waitForMessage("Test Sync Message", b); // The last step did not run yet j.assertLogContains("First message", b); j.assertLogNotContains("Second message", b); // Let syncnonblocking to continue SynchronousNonBlockingStep.notify("wait"); System.out.println("Waiting until syncnonblocking (and the full flow) finishes"); j.waitForCompletion(b); System.out.println("Build finished. Continue."); // Check for the last message j.assertLogContains("Second message", b); j.assertBuildStatusSuccess(b); } @Test public void interruptedTest() throws Exception { WorkflowJob p = j.jenkins.createProject(WorkflowJob.class, "p"); p.setDefinition(new CpsFlowDefinition("node {\n" + "echo 'First message'\n" + "try { syncnonblocking 'wait' } catch(InterruptedException e) { echo 'Interrupted!' }\n" + "echo 'Second message'\n" + "}")); WorkflowRun b = p.scheduleBuild2(0).getStartCondition().get(); // Wait for syncnonblocking to be started System.out.println("Waiting to syncnonblocking to start..."); SynchronousNonBlockingStep.waitForStart("wait", b); // At this point syncnonblocking is waiting for an interruption FlowExecution e = b.getExecutionPromise().get(); // Let's force a call to stop. This will try to send an interruption to the run Thread e.interrupt(Result.ABORTED); System.out.println("Looking for interruption received log message"); j.waitForMessage("Interrupted!", b); j.waitForCompletion(b); } @Test public void parallelTest() throws Exception { WorkflowJob p = j.jenkins.createProject(WorkflowJob.class, "p"); p.setDefinition(new CpsFlowDefinition("node {\n" + "echo 'First message'\n" + "parallel( a: { syncnonblocking 'wait0'; echo 'a branch'; }, b: { semaphore 'wait1'; echo 'b branch'; } )\n" + "echo 'Second message'\n" + "}")); WorkflowRun b = p.scheduleBuild2(0).getStartCondition().get(); SynchronousNonBlockingStep.waitForStart("wait0", b); SemaphoreStep.success("wait1/1", null); // Wait for "b" branch to print its message j.waitForMessage("b branch", b); System.out.println("b branch finishes"); // Check that "a" branch is effectively blocked j.assertLogNotContains("a branch", b); // Notify "a" branch System.out.println("Continue on wait0"); SynchronousNonBlockingStep.notify("wait0"); // Wait for "a" branch to finish j.waitForMessage("a branch", b); j.waitForCompletion(b); } public static final class SynchronousNonBlockingStep extends AbstractStepImpl implements Serializable { public static final class State { private static final Map<File,State> states = new HashMap<File,State>(); static synchronized State get() { File home = Jenkins.getActiveInstance().getRootDir(); State state = states.get(home); if (state == null) { state = new State(); states.put(home, state); } return state; } private State() {} final Set<String> started = new HashSet<String>(); } private String id; @DataBoundConstructor public SynchronousNonBlockingStep(String id) { this.id = id; } public String getId() { return id; } public static void waitForStart(String id, Run<?,?> b) throws IOException, InterruptedException { State s = State.get(); synchronized (s) { while (!s.started.contains(id)) { if (b != null && !b.isBuilding()) { throw new AssertionError(); } s.wait(1000); } } } public static final void notify(String id) { State s = State.get(); synchronized (s) { if (s.started.remove(id)) { s.notifyAll(); } } } public static class StepExecutionImpl extends AbstractSynchronousNonBlockingStepExecution<Void> { @Inject(optional=true) private transient SynchronousNonBlockingStep step; @StepContextParameter private transient TaskListener listener; @Override protected Void run() throws Exception { System.out.println("Starting syncnonblocking " + step.getId()); // Send a test message to the listener listener.getLogger().println("Test Sync Message"); State s = State.get(); synchronized (s) { s.started.add(step.getId()); s.notifyAll(); } // Wait until somone (main test thread) notify us System.out.println("Sleeping inside the syncnonblocking thread (" + step.getId() + ")"); synchronized (s) { while (s.started.contains(step.getId())) { s.wait(1000); } } System.out.println("Continue syncnonblocking " + step.getId()); return null; } private static final long serialVersionUID = 1L; } @TestExtension public static final class DescriptorImpl extends AbstractStepDescriptorImpl { public DescriptorImpl() { super(StepExecutionImpl.class); } @Override public String getFunctionName() { return "syncnonblocking"; } @Override public String getDisplayName() { return "Sync non-blocking Test step"; } } private static final long serialVersionUID = 1L; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.filter; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.Objects; import java.util.PriorityQueue; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException; import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.UnsafeAvailChecker; import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; /** * This is optimized version of a standard FuzzyRowFilter Filters data based on fuzzy row key. * Performs fast-forwards during scanning. It takes pairs (row key, fuzzy info) to match row keys. * Where fuzzy info is a byte array with 0 or 1 as its values: * <ul> * <li>0 - means that this byte in provided row key is fixed, i.e. row key's byte at same position * must match</li> * <li>1 - means that this byte in provided row key is NOT fixed, i.e. row key's byte at this * position can be different from the one in provided row key</li> * </ul> * Example: Let's assume row key format is userId_actionId_year_month. Length of userId is fixed and * is 4, length of actionId is 2 and year and month are 4 and 2 bytes long respectively. Let's * assume that we need to fetch all users that performed certain action (encoded as "99") in Jan of * any year. Then the pair (row key, fuzzy info) would be the following: row key = "????_99_????_01" * (one can use any value instead of "?") fuzzy info = * "\x01\x01\x01\x01\x00\x00\x00\x00\x01\x01\x01\x01\x00\x00\x00" I.e. fuzzy info tells the matching * mask is "????_99_????_01", where at ? can be any value. */ @InterfaceAudience.Public public class FuzzyRowFilter extends FilterBase { private static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned(); private List<Pair<byte[], byte[]>> fuzzyKeysData; private boolean done = false; /** * The index of a last successfully found matching fuzzy string (in fuzzyKeysData). We will start * matching next KV with this one. If they do not match then we will return back to the one-by-one * iteration over fuzzyKeysData. */ private int lastFoundIndex = -1; /** * Row tracker (keeps all next rows after SEEK_NEXT_USING_HINT was returned) */ private RowTracker tracker; public FuzzyRowFilter(List<Pair<byte[], byte[]>> fuzzyKeysData) { List<Pair<byte[], byte[]>> fuzzyKeyDataCopy = new ArrayList<>(fuzzyKeysData.size()); for (Pair<byte[], byte[]> aFuzzyKeysData : fuzzyKeysData) { if (aFuzzyKeysData.getFirst().length != aFuzzyKeysData.getSecond().length) { Pair<String, String> readable = new Pair<>(Bytes.toStringBinary(aFuzzyKeysData.getFirst()), Bytes.toStringBinary(aFuzzyKeysData.getSecond())); throw new IllegalArgumentException("Fuzzy pair lengths do not match: " + readable); } Pair<byte[], byte[]> p = new Pair<>(); // create a copy of pair bytes so that they are not modified by the filter. p.setFirst(Arrays.copyOf(aFuzzyKeysData.getFirst(), aFuzzyKeysData.getFirst().length)); p.setSecond(Arrays.copyOf(aFuzzyKeysData.getSecond(), aFuzzyKeysData.getSecond().length)); // update mask ( 0 -> -1 (0xff), 1 -> 2) p.setSecond(preprocessMask(p.getSecond())); preprocessSearchKey(p); fuzzyKeyDataCopy.add(p); } this.fuzzyKeysData = fuzzyKeyDataCopy; this.tracker = new RowTracker(); } private void preprocessSearchKey(Pair<byte[], byte[]> p) { if (!UNSAFE_UNALIGNED) { // do nothing return; } byte[] key = p.getFirst(); byte[] mask = p.getSecond(); for (int i = 0; i < mask.length; i++) { // set non-fixed part of a search key to 0. if (mask[i] == 2) { key[i] = 0; } } } /** * We need to preprocess mask array, as since we treat 2's as unfixed positions and -1 (0xff) as * fixed positions * @param mask * @return mask array */ private byte[] preprocessMask(byte[] mask) { if (!UNSAFE_UNALIGNED) { // do nothing return mask; } if (isPreprocessedMask(mask)) return mask; for (int i = 0; i < mask.length; i++) { if (mask[i] == 0) { mask[i] = -1; // 0 -> -1 } else if (mask[i] == 1) { mask[i] = 2;// 1 -> 2 } } return mask; } private boolean isPreprocessedMask(byte[] mask) { for (int i = 0; i < mask.length; i++) { if (mask[i] != -1 && mask[i] != 2) { return false; } } return true; } @Override public ReturnCode filterCell(final Cell c) { final int startIndex = lastFoundIndex >= 0 ? lastFoundIndex : 0; final int size = fuzzyKeysData.size(); for (int i = startIndex; i < size + startIndex; i++) { final int index = i % size; Pair<byte[], byte[]> fuzzyData = fuzzyKeysData.get(index); // This shift is idempotent - always end up with 0 and -1 as mask values. for (int j = 0; j < fuzzyData.getSecond().length; j++) { fuzzyData.getSecond()[j] >>= 2; } SatisfiesCode satisfiesCode = satisfies(isReversed(), c.getRowArray(), c.getRowOffset(), c.getRowLength(), fuzzyData.getFirst(), fuzzyData.getSecond()); if (satisfiesCode == SatisfiesCode.YES) { lastFoundIndex = index; return ReturnCode.INCLUDE; } } // NOT FOUND -> seek next using hint lastFoundIndex = -1; return ReturnCode.SEEK_NEXT_USING_HINT; } @Override public Cell getNextCellHint(Cell currentCell) { boolean result = tracker.updateTracker(currentCell); if (result == false) { done = true; return null; } byte[] nextRowKey = tracker.nextRow(); return PrivateCellUtil.createFirstOnRow(nextRowKey, 0, (short) nextRowKey.length); } /** * If we have multiple fuzzy keys, row tracker should improve overall performance. It calculates * all next rows (one per every fuzzy key) and put them (the fuzzy key is bundled) into a priority * queue so that the smallest row key always appears at queue head, which helps to decide the * "Next Cell Hint". As scanning going on, the number of candidate rows in the RowTracker will * remain the size of fuzzy keys until some of the fuzzy keys won't possibly have matches any * more. */ private class RowTracker { private final PriorityQueue<Pair<byte[], Pair<byte[], byte[]>>> nextRows; private boolean initialized = false; RowTracker() { nextRows = new PriorityQueue<>(fuzzyKeysData.size(), new Comparator<Pair<byte[], Pair<byte[], byte[]>>>() { @Override public int compare(Pair<byte[], Pair<byte[], byte[]>> o1, Pair<byte[], Pair<byte[], byte[]>> o2) { return isReversed()? Bytes.compareTo(o2.getFirst(), o1.getFirst()): Bytes.compareTo(o1.getFirst(), o2.getFirst()); } }); } byte[] nextRow() { if (nextRows.isEmpty()) { throw new IllegalStateException( "NextRows should not be empty, make sure to call nextRow() after updateTracker() return true"); } else { return nextRows.peek().getFirst(); } } boolean updateTracker(Cell currentCell) { if (!initialized) { for (Pair<byte[], byte[]> fuzzyData : fuzzyKeysData) { updateWith(currentCell, fuzzyData); } initialized = true; } else { while (!nextRows.isEmpty() && !lessThan(currentCell, nextRows.peek().getFirst())) { Pair<byte[], Pair<byte[], byte[]>> head = nextRows.poll(); Pair<byte[], byte[]> fuzzyData = head.getSecond(); updateWith(currentCell, fuzzyData); } } return !nextRows.isEmpty(); } boolean lessThan(Cell currentCell, byte[] nextRowKey) { int compareResult = CellComparator.getInstance().compareRows(currentCell, nextRowKey, 0, nextRowKey.length); return (!isReversed() && compareResult < 0) || (isReversed() && compareResult > 0); } void updateWith(Cell currentCell, Pair<byte[], byte[]> fuzzyData) { byte[] nextRowKeyCandidate = getNextForFuzzyRule(isReversed(), currentCell.getRowArray(), currentCell.getRowOffset(), currentCell.getRowLength(), fuzzyData.getFirst(), fuzzyData.getSecond()); if (nextRowKeyCandidate != null) { nextRows.add(new Pair<>(nextRowKeyCandidate, fuzzyData)); } } } @Override public boolean filterAllRemaining() { return done; } /** * @return The filter serialized using pb */ @Override public byte[] toByteArray() { FilterProtos.FuzzyRowFilter.Builder builder = FilterProtos.FuzzyRowFilter.newBuilder(); for (Pair<byte[], byte[]> fuzzyData : fuzzyKeysData) { BytesBytesPair.Builder bbpBuilder = BytesBytesPair.newBuilder(); bbpBuilder.setFirst(UnsafeByteOperations.unsafeWrap(fuzzyData.getFirst())); bbpBuilder.setSecond(UnsafeByteOperations.unsafeWrap(fuzzyData.getSecond())); builder.addFuzzyKeysData(bbpBuilder); } return builder.build().toByteArray(); } /** * @param pbBytes A pb serialized {@link FuzzyRowFilter} instance * @return An instance of {@link FuzzyRowFilter} made from <code>bytes</code> * @throws DeserializationException * @see #toByteArray */ public static FuzzyRowFilter parseFrom(final byte[] pbBytes) throws DeserializationException { FilterProtos.FuzzyRowFilter proto; try { proto = FilterProtos.FuzzyRowFilter.parseFrom(pbBytes); } catch (InvalidProtocolBufferException e) { throw new DeserializationException(e); } int count = proto.getFuzzyKeysDataCount(); ArrayList<Pair<byte[], byte[]>> fuzzyKeysData = new ArrayList<>(count); for (int i = 0; i < count; ++i) { BytesBytesPair current = proto.getFuzzyKeysData(i); byte[] keyBytes = current.getFirst().toByteArray(); byte[] keyMeta = current.getSecond().toByteArray(); fuzzyKeysData.add(new Pair<>(keyBytes, keyMeta)); } return new FuzzyRowFilter(fuzzyKeysData); } @Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append("FuzzyRowFilter"); sb.append("{fuzzyKeysData="); for (Pair<byte[], byte[]> fuzzyData : fuzzyKeysData) { sb.append('{').append(Bytes.toStringBinary(fuzzyData.getFirst())).append(":"); sb.append(Bytes.toStringBinary(fuzzyData.getSecond())).append('}'); } sb.append("}, "); return sb.toString(); } // Utility methods static enum SatisfiesCode { /** row satisfies fuzzy rule */ YES, /** row doesn't satisfy fuzzy rule, but there's possible greater row that does */ NEXT_EXISTS, /** row doesn't satisfy fuzzy rule and there's no greater row that does */ NO_NEXT } @VisibleForTesting static SatisfiesCode satisfies(byte[] row, byte[] fuzzyKeyBytes, byte[] fuzzyKeyMeta) { return satisfies(false, row, 0, row.length, fuzzyKeyBytes, fuzzyKeyMeta); } @VisibleForTesting static SatisfiesCode satisfies(boolean reverse, byte[] row, byte[] fuzzyKeyBytes, byte[] fuzzyKeyMeta) { return satisfies(reverse, row, 0, row.length, fuzzyKeyBytes, fuzzyKeyMeta); } static SatisfiesCode satisfies(boolean reverse, byte[] row, int offset, int length, byte[] fuzzyKeyBytes, byte[] fuzzyKeyMeta) { if (!UNSAFE_UNALIGNED) { return satisfiesNoUnsafe(reverse, row, offset, length, fuzzyKeyBytes, fuzzyKeyMeta); } if (row == null) { // do nothing, let scan to proceed return SatisfiesCode.YES; } length = Math.min(length, fuzzyKeyBytes.length); int numWords = length / Bytes.SIZEOF_LONG; int j = numWords << 3; // numWords * SIZEOF_LONG; for (int i = 0; i < j; i += Bytes.SIZEOF_LONG) { long fuzzyBytes = Bytes.toLong(fuzzyKeyBytes, i); long fuzzyMeta = Bytes.toLong(fuzzyKeyMeta, i); long rowValue = Bytes.toLong(row, offset + i); if ((rowValue & fuzzyMeta) != (fuzzyBytes)) { // We always return NEXT_EXISTS return SatisfiesCode.NEXT_EXISTS; } } int off = j; if (length - off >= Bytes.SIZEOF_INT) { int fuzzyBytes = Bytes.toInt(fuzzyKeyBytes, off); int fuzzyMeta = Bytes.toInt(fuzzyKeyMeta, off); int rowValue = Bytes.toInt(row, offset + off); if ((rowValue & fuzzyMeta) != (fuzzyBytes)) { // We always return NEXT_EXISTS return SatisfiesCode.NEXT_EXISTS; } off += Bytes.SIZEOF_INT; } if (length - off >= Bytes.SIZEOF_SHORT) { short fuzzyBytes = Bytes.toShort(fuzzyKeyBytes, off); short fuzzyMeta = Bytes.toShort(fuzzyKeyMeta, off); short rowValue = Bytes.toShort(row, offset + off); if ((rowValue & fuzzyMeta) != (fuzzyBytes)) { // We always return NEXT_EXISTS // even if it does not (in this case getNextForFuzzyRule // will return null) return SatisfiesCode.NEXT_EXISTS; } off += Bytes.SIZEOF_SHORT; } if (length - off >= Bytes.SIZEOF_BYTE) { int fuzzyBytes = fuzzyKeyBytes[off] & 0xff; int fuzzyMeta = fuzzyKeyMeta[off] & 0xff; int rowValue = row[offset + off] & 0xff; if ((rowValue & fuzzyMeta) != (fuzzyBytes)) { // We always return NEXT_EXISTS return SatisfiesCode.NEXT_EXISTS; } } return SatisfiesCode.YES; } static SatisfiesCode satisfiesNoUnsafe(boolean reverse, byte[] row, int offset, int length, byte[] fuzzyKeyBytes, byte[] fuzzyKeyMeta) { if (row == null) { // do nothing, let scan to proceed return SatisfiesCode.YES; } Order order = Order.orderFor(reverse); boolean nextRowKeyCandidateExists = false; for (int i = 0; i < fuzzyKeyMeta.length && i < length; i++) { // First, checking if this position is fixed and not equals the given one boolean byteAtPositionFixed = fuzzyKeyMeta[i] == 0; boolean fixedByteIncorrect = byteAtPositionFixed && fuzzyKeyBytes[i] != row[i + offset]; if (fixedByteIncorrect) { // in this case there's another row that satisfies fuzzy rule and bigger than this row if (nextRowKeyCandidateExists) { return SatisfiesCode.NEXT_EXISTS; } // If this row byte is less than fixed then there's a byte array bigger than // this row and which satisfies the fuzzy rule. Otherwise there's no such byte array: // this row is simply bigger than any byte array that satisfies the fuzzy rule boolean rowByteLessThanFixed = (row[i + offset] & 0xFF) < (fuzzyKeyBytes[i] & 0xFF); if (rowByteLessThanFixed && !reverse) { return SatisfiesCode.NEXT_EXISTS; } else if (!rowByteLessThanFixed && reverse) { return SatisfiesCode.NEXT_EXISTS; } else { return SatisfiesCode.NO_NEXT; } } // Second, checking if this position is not fixed and byte value is not the biggest. In this // case there's a byte array bigger than this row and which satisfies the fuzzy rule. To get // bigger byte array that satisfies the rule we need to just increase this byte // (see the code of getNextForFuzzyRule below) by one. // Note: if non-fixed byte is already at biggest value, this doesn't allow us to say there's // bigger one that satisfies the rule as it can't be increased. if (fuzzyKeyMeta[i] == 1 && !order.isMax(fuzzyKeyBytes[i])) { nextRowKeyCandidateExists = true; } } return SatisfiesCode.YES; } @VisibleForTesting static byte[] getNextForFuzzyRule(byte[] row, byte[] fuzzyKeyBytes, byte[] fuzzyKeyMeta) { return getNextForFuzzyRule(false, row, 0, row.length, fuzzyKeyBytes, fuzzyKeyMeta); } @VisibleForTesting static byte[] getNextForFuzzyRule(boolean reverse, byte[] row, byte[] fuzzyKeyBytes, byte[] fuzzyKeyMeta) { return getNextForFuzzyRule(reverse, row, 0, row.length, fuzzyKeyBytes, fuzzyKeyMeta); } /** Abstracts directional comparisons based on scan direction. */ private enum Order { ASC { @Override public boolean lt(int lhs, int rhs) { return lhs < rhs; } @Override public boolean gt(int lhs, int rhs) { return lhs > rhs; } @Override public byte inc(byte val) { // TODO: what about over/underflow? return (byte) (val + 1); } @Override public boolean isMax(byte val) { return val == (byte) 0xff; } @Override public byte min() { return 0; } }, DESC { @Override public boolean lt(int lhs, int rhs) { return lhs > rhs; } @Override public boolean gt(int lhs, int rhs) { return lhs < rhs; } @Override public byte inc(byte val) { // TODO: what about over/underflow? return (byte) (val - 1); } @Override public boolean isMax(byte val) { return val == 0; } @Override public byte min() { return (byte) 0xFF; } }; public static Order orderFor(boolean reverse) { return reverse ? DESC : ASC; } /** Returns true when {@code lhs < rhs}. */ public abstract boolean lt(int lhs, int rhs); /** Returns true when {@code lhs > rhs}. */ public abstract boolean gt(int lhs, int rhs); /** Returns {@code val} incremented by 1. */ public abstract byte inc(byte val); /** Return true when {@code val} is the maximum value */ public abstract boolean isMax(byte val); /** Return the minimum value according to this ordering scheme. */ public abstract byte min(); } /** * @return greater byte array than given (row) which satisfies the fuzzy rule if it exists, null * otherwise */ @VisibleForTesting static byte[] getNextForFuzzyRule(boolean reverse, byte[] row, int offset, int length, byte[] fuzzyKeyBytes, byte[] fuzzyKeyMeta) { // To find out the next "smallest" byte array that satisfies fuzzy rule and "greater" than // the given one we do the following: // 1. setting values on all "fixed" positions to the values from fuzzyKeyBytes // 2. if during the first step given row did not increase, then we increase the value at // the first "non-fixed" position (where it is not maximum already) // It is easier to perform this by using fuzzyKeyBytes copy and setting "non-fixed" position // values than otherwise. byte[] result = Arrays.copyOf(fuzzyKeyBytes, length > fuzzyKeyBytes.length ? length : fuzzyKeyBytes.length); if (reverse && length > fuzzyKeyBytes.length) { // we need trailing 0xff's instead of trailing 0x00's for (int i = fuzzyKeyBytes.length; i < result.length; i++) { result[i] = (byte) 0xFF; } } int toInc = -1; final Order order = Order.orderFor(reverse); boolean increased = false; for (int i = 0; i < result.length; i++) { if (i >= fuzzyKeyMeta.length || fuzzyKeyMeta[i] == 0 /* non-fixed */) { result[i] = row[offset + i]; if (!order.isMax(row[offset + i])) { // this is "non-fixed" position and is not at max value, hence we can increase it toInc = i; } } else if (i < fuzzyKeyMeta.length && fuzzyKeyMeta[i] == -1 /* fixed */) { if (order.lt((row[i + offset] & 0xFF), (fuzzyKeyBytes[i] & 0xFF))) { // if setting value for any fixed position increased the original array, // we are OK increased = true; break; } if (order.gt((row[i + offset] & 0xFF), (fuzzyKeyBytes[i] & 0xFF))) { // if setting value for any fixed position makes array "smaller", then just stop: // in case we found some non-fixed position to increase we will do it, otherwise // there's no "next" row key that satisfies fuzzy rule and "greater" than given row break; } } } if (!increased) { if (toInc < 0) { return null; } result[toInc] = order.inc(result[toInc]); // Setting all "non-fixed" positions to zeroes to the right of the one we increased so // that found "next" row key is the smallest possible for (int i = toInc + 1; i < result.length; i++) { if (i >= fuzzyKeyMeta.length || fuzzyKeyMeta[i] == 0 /* non-fixed */) { result[i] = order.min(); } } } return reverse? result: trimTrailingZeroes(result, fuzzyKeyMeta, toInc); } /** * For forward scanner, next cell hint should not contain any trailing zeroes * unless they are part of fuzzyKeyMeta * hint = '\x01\x01\x01\x00\x00' * will skip valid row '\x01\x01\x01' * * @param result * @param fuzzyKeyMeta * @param toInc - position of incremented byte * @return trimmed version of result */ private static byte[] trimTrailingZeroes(byte[] result, byte[] fuzzyKeyMeta, int toInc) { int off = fuzzyKeyMeta.length >= result.length? result.length -1: fuzzyKeyMeta.length -1; for( ; off >= 0; off--){ if(fuzzyKeyMeta[off] != 0) break; } if (off < toInc) off = toInc; byte[] retValue = new byte[off+1]; System.arraycopy(result, 0, retValue, 0, retValue.length); return retValue; } /** * @return true if and only if the fields of the filter that are serialized are equal to the * corresponding fields in other. Used for testing. */ @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof FuzzyRowFilter)) return false; FuzzyRowFilter other = (FuzzyRowFilter) o; if (this.fuzzyKeysData.size() != other.fuzzyKeysData.size()) return false; for (int i = 0; i < fuzzyKeysData.size(); ++i) { Pair<byte[], byte[]> thisData = this.fuzzyKeysData.get(i); Pair<byte[], byte[]> otherData = other.fuzzyKeysData.get(i); if (!(Bytes.equals(thisData.getFirst(), otherData.getFirst()) && Bytes.equals( thisData.getSecond(), otherData.getSecond()))) { return false; } } return true; } @Override public boolean equals(Object obj) { return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj); } @Override public int hashCode() { return Objects.hash(this.fuzzyKeysData); } }
/* * Copyright 2013 SEARCH Group, Incorporated. * * See the NOTICE file distributed with this work for additional information * regarding copyright ownership. SEARCH Group Inc. licenses this file to You * under the Apache License, Version 2.0 (the "License"); you may not use this * file except in compliance with the License. You may obtain a copy of the * License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package gov.nij.processor; import static org.apache.cxf.ws.addressing.JAXWSAConstants.CLIENT_ADDRESSING_PROPERTIES; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.camel.Exchange; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.cxf.binding.soap.SoapHeader; import org.apache.cxf.endpoint.Client; import org.apache.cxf.headers.Header; import org.apache.cxf.message.Message; import org.apache.cxf.ws.addressing.AddressingBuilder; import org.apache.cxf.ws.addressing.AddressingProperties; import org.apache.cxf.ws.addressing.AttributedURIType; import org.apache.cxf.ws.addressing.EndpointReferenceType; import org.apache.cxf.ws.addressing.ObjectFactory; import org.w3c.dom.Element; /** * * This class has methods in to handle Camel exchanges for inbound and outbound messages * and to assist with dealing with WS-Addressing to correlate requests and responses. * */ public class MessageProcessor { private static final Log log = LogFactory.getLog( MessageProcessor.class ); private static final ObjectFactory WSA_OBJECT_FACTORY = new ObjectFactory(); /** * This method will extract a message ID from the Soap header and set that as a GUID to correlate requests and responses * It also creates a header that can be used to make a file name from the Message ID that works on all platforms. * * @param exchange * @throws Exception */ public void processRequestPayload(Exchange exchange) throws Exception { HashMap<String, String> wsAddressingHeadersMap = returnWSAddressingHeadersFromCamelSoapHeaders(exchange); String requestID = wsAddressingHeadersMap.get("MessageID"); String replyTo = wsAddressingHeadersMap.get("ReplyTo"); if (StringUtils.isNotBlank(replyTo)) { exchange.getIn().setHeader("WSAddressingReplyToInbound", replyTo); } if (StringUtils.isNotBlank(requestID)) { String platformSafeFileName = requestID.replace(":", ""); exchange.getIn().setHeader("federatedQueryRequestGUID", requestID); exchange.getIn().setHeader("platformSafeFileName", platformSafeFileName); } else { throw new Exception("Unable to find unique ID in Soap Header. Was the message ID set in the Soap WS Addressing header?"); } } /** * This method will use an existing exchange and set the 'out' message with the WS-Addressing message ID. This removes all the headers from the 'in' * message which tend to confuse Camel. * * @param exchange * @throws Exception */ public void prepareNewExchangeResponseMessage(Exchange exchange) throws Exception { String requestID = (String)exchange.getIn().getHeader("federatedQueryRequestGUID"); log.debug("Federeated Query Request ID: " + requestID); //Create a new map with WS Addressing message properties that we want to override HashMap<String, String> wsAddressingMessageProperties = new HashMap<String, String>(); if (StringUtils.isNotEmpty(requestID)) { wsAddressingMessageProperties.put("MessageID",requestID); } //This is the reply to address that we want to provide to the service we are calling String replyToOutbound = (String)exchange.getIn().getHeader("WSAddressingReplyToOutbound"); if (StringUtils.isNotEmpty(replyToOutbound)) { log.debug("WS Addressing Reply To Camel Header: " + replyToOutbound); wsAddressingMessageProperties.put("ReplyTo",replyToOutbound); } //Call method to create proper request context map Map<String, Object> requestContext = setWSAddressingProperties(wsAddressingMessageProperties); //This is the reply to address of the service that called us, set this as the actual address to call String replyToInbound = (String)exchange.getIn().getHeader("WSAddressingReplyToInbound"); if (StringUtils.isNotEmpty(replyToInbound)) { requestContext.put(Message.ENDPOINT_ADDRESS, replyToInbound); } exchange.getOut().setHeader(Client.REQUEST_CONTEXT , requestContext); exchange.getOut().setBody(exchange.getIn().getBody()); } /** * This method returns a map with the following keys to get at WS-Addressing properties "MessageID", "ReplyTo", "From", "To" * We can add to this method to return additional properties as they are needed. * * @param exchange * @return */ @SuppressWarnings("unchecked") public static HashMap<String, String> returnWSAddressingHeadersFromCamelSoapHeaders(Exchange exchange) { String messageID = null; String replyTo = null; String from = null; String to = null; HashMap<String, String> wsAddressingMessageProperties = new HashMap<String, String>(); List<SoapHeader> soapHeaders = (List<SoapHeader>) exchange.getIn() .getHeader(Header.HEADER_LIST); for (SoapHeader soapHeader : soapHeaders) { log.debug("Soap Header: " + soapHeader.getName()); log.debug("Soap Direction: " + soapHeader.getDirection()); if (soapHeader.getName().toString() .equals("{http://www.w3.org/2005/08/addressing}MessageID")) { Element element = (Element) soapHeader.getObject(); if (element != null) { messageID = element.getTextContent(); } log.info("WS-Addressing Message ID: " + messageID); wsAddressingMessageProperties.put("MessageID", messageID); } if (soapHeader.getName().toString() .equals("{http://www.w3.org/2005/08/addressing}ReplyTo")) { Element element = (Element) soapHeader.getObject(); if (element != null) { replyTo = element.getTextContent(); } log.info("WS-Addressing ReplyTo: " + replyTo); wsAddressingMessageProperties.put("ReplyTo", replyTo); } if (soapHeader.getName().toString() .equals("{http://www.w3.org/2005/08/addressing}From")) { Element element = (Element) soapHeader.getObject(); if (element != null) { from = element.getTextContent(); } log.info("WS-Addressing From: " + from); wsAddressingMessageProperties.put("From", from); } if (soapHeader.getName().toString() .equals("{http://www.w3.org/2005/08/addressing}To")) { Element element = (Element) soapHeader.getObject(); if (element != null) { to = element.getTextContent(); } log.info("WS-Addressing To: " + to); wsAddressingMessageProperties.put("To", to); } } return wsAddressingMessageProperties; } /** * This method will set the WS-Addressing Message Properties on the exchange prior to sending an outbound CXF message. * It allows for 'MessageID' and 'ReplyTo' * * @param senderExchange * @param requestID * @return * @throws Exception */ public static Map<String, Object> setWSAddressingProperties(Map<String, String> wsAddressingMessageProperties) throws Exception { Map<String, Object> requestContext = null; if (!wsAddressingMessageProperties.isEmpty()) { // get Message Addressing Properties instance AddressingBuilder builder = AddressingBuilder.getAddressingBuilder(); AddressingProperties maps = builder.newAddressingProperties(); String messageID = wsAddressingMessageProperties.get("MessageID"); if (StringUtils.isNotEmpty(messageID)) { // set MessageID property AttributedURIType messageIDAttr = WSA_OBJECT_FACTORY.createAttributedURIType(); messageIDAttr.setValue(messageID); maps.setMessageID(messageIDAttr); } String replyToString = wsAddressingMessageProperties.get("ReplyTo"); if (StringUtils.isNotEmpty(replyToString)) { AttributedURIType replyToAttr = new AttributedURIType(); replyToAttr.setValue(replyToString); EndpointReferenceType replyToRef = new EndpointReferenceType(); replyToRef.setAddress(replyToAttr); maps.setReplyTo(replyToRef); } requestContext = new HashMap<String, Object>(); requestContext.put(CLIENT_ADDRESSING_PROPERTIES, maps); } else { throw new Exception("WS-Addressing Message Properties can not be set. Map is empty."); } return requestContext; } /** * This method will set the WS-Addressing Message ID on the exchange prior to sending an outbound CXF message. * This method only allows for a MessageID for backwards compatibility. Use 'setWSAddressingProperties' to see additional properties * * @param senderExchange * @param requestID * @return * @throws Exception */ public static Map<String, Object> setWSAddressingMessageID(String requestID) throws Exception { Map<String, Object> requestContext = null; if (StringUtils.isNotEmpty(requestID)) { // get Message Addressing Properties instance AddressingBuilder builder = AddressingBuilder.getAddressingBuilder(); AddressingProperties maps = builder.newAddressingProperties(); // set MessageID property AttributedURIType messageIDAttr = WSA_OBJECT_FACTORY.createAttributedURIType(); messageIDAttr.setValue(requestID); maps.setMessageID(messageIDAttr); requestContext = new HashMap<String, Object>(); requestContext.put(CLIENT_ADDRESSING_PROPERTIES, maps); } else { throw new Exception("WS-Addressing Message ID can not be set. Request ID is empty."); } return requestContext; } }
/* * Copyright 2013 David Schreiber * 2013 John Paul Nalog * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.itboye.guangda.view; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Camera; import android.graphics.Matrix; import android.util.AttributeSet; import android.view.View; import android.view.ViewGroup; import android.view.animation.Transformation; import android.widget.Gallery; import android.widget.SpinnerAdapter; import com.itboye.guangda.adapter.FancyCoverFlowAdapter; import com.itboye.guangda.adapter.FancyCoverFlowItemWrapper; import com.itboye.guangda_android.R; public class FancyCoverFlow extends Gallery { // ============================================================================= // Constants // ============================================================================= public static final int ACTION_DISTANCE_AUTO = Integer.MAX_VALUE; public static final float SCALEDOWN_GRAVITY_TOP = 0.0f; public static final float SCALEDOWN_GRAVITY_CENTER = 0.5f; public static final float SCALEDOWN_GRAVITY_BOTTOM = 1.0f; // ============================================================================= // Private members // ============================================================================= private float reflectionRatio = 0.4f; private int reflectionGap = 20; private boolean reflectionEnabled = false; /** * TODO: Doc */ private float unselectedAlpha; /** * Camera used for view transformation. */ private Camera transformationCamera; /** * TODO: Doc */ private int maxRotation = 75; /** * Factor (0-1) that defines how much the unselected children should be scaled down. 1 means no scaledown. */ private float unselectedScale; /** * TODO: Doc */ private float scaleDownGravity = SCALEDOWN_GRAVITY_CENTER; /** * Distance in pixels between the transformation effects (alpha, rotation, zoom) are applied. */ private int actionDistance; /** * Saturation factor (0-1) of items that reach the outer effects distance. */ private float unselectedSaturation; // ============================================================================= // Constructors // ============================================================================= public FancyCoverFlow(Context context) { super(context); this.initialize(); } public FancyCoverFlow(Context context, AttributeSet attrs) { super(context, attrs); this.initialize(); this.applyXmlAttributes(attrs); } public FancyCoverFlow(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); this.initialize(); this.applyXmlAttributes(attrs); } private void initialize() { this.transformationCamera = new Camera(); this.setSpacing(0); } private void applyXmlAttributes(AttributeSet attrs) { TypedArray a = getContext().obtainStyledAttributes(attrs, R.styleable.FancyCoverFlow); this.actionDistance = a.getInteger(R.styleable.FancyCoverFlow_actionDistance, ACTION_DISTANCE_AUTO); this.scaleDownGravity = a.getFloat(R.styleable.FancyCoverFlow_scaleDownGravity, 1.0f); this.maxRotation = a.getInteger(R.styleable.FancyCoverFlow_maxRotation, 45); this.unselectedAlpha = a.getFloat(R.styleable.FancyCoverFlow_unselectedAlpha, 0.3f); this.unselectedSaturation = a.getFloat(R.styleable.FancyCoverFlow_unselectedSaturation, 0.0f); this.unselectedScale = a.getFloat(R.styleable.FancyCoverFlow_unselectedScale, 0.75f); } // ============================================================================= // Getter / Setter // ============================================================================= public float getReflectionRatio() { return reflectionRatio; } public void setReflectionRatio(float reflectionRatio) { if (reflectionRatio <= 0 || reflectionRatio > 0.5f) { throw new IllegalArgumentException("reflectionRatio may only be in the interval (0, 0.5]"); } this.reflectionRatio = reflectionRatio; if (this.getAdapter() != null) { ((FancyCoverFlowAdapter) this.getAdapter()).notifyDataSetChanged(); } } public int getReflectionGap() { return reflectionGap; } public void setReflectionGap(int reflectionGap) { this.reflectionGap = reflectionGap; if (this.getAdapter() != null) { ((FancyCoverFlowAdapter) this.getAdapter()).notifyDataSetChanged(); } } public boolean isReflectionEnabled() { return reflectionEnabled; } public void setReflectionEnabled(boolean reflectionEnabled) { this.reflectionEnabled = reflectionEnabled; if (this.getAdapter() != null) { ((FancyCoverFlowAdapter) this.getAdapter()).notifyDataSetChanged(); } } /** * Use this to provide a {@link FancyCoverFlowAdapter} to the coverflow. This * method will throw an {@link ClassCastException} if the passed adapter does not * subclass {@link FancyCoverFlowAdapter}. * * @param adapter */ @Override public void setAdapter(SpinnerAdapter adapter) { if (!(adapter instanceof FancyCoverFlowAdapter)) { throw new ClassCastException(FancyCoverFlow.class.getSimpleName() + " only works in conjunction with a " + FancyCoverFlowAdapter.class.getSimpleName()); } super.setAdapter(adapter); } /** * Returns the maximum rotation that is applied to items left and right of the center of the coverflow. * * @return */ public int getMaxRotation() { return maxRotation; } /** * Sets the maximum rotation that is applied to items left and right of the center of the coverflow. * * @param maxRotation */ public void setMaxRotation(int maxRotation) { this.maxRotation = maxRotation; } /** * TODO: Write doc * * @return */ public float getUnselectedAlpha() { return this.unselectedAlpha; } /** * TODO: Write doc * * @return */ public float getUnselectedScale() { return unselectedScale; } /** * TODO: Write doc * * @param unselectedScale */ public void setUnselectedScale(float unselectedScale) { this.unselectedScale = unselectedScale; } /** * TODO: Doc * * @return */ public float getScaleDownGravity() { return scaleDownGravity; } /** * TODO: Doc * * @param scaleDownGravity */ public void setScaleDownGravity(float scaleDownGravity) { this.scaleDownGravity = scaleDownGravity; } /** * TODO: Write doc * * @return */ public int getActionDistance() { return actionDistance; } /** * TODO: Write doc * * @param actionDistance */ public void setActionDistance(int actionDistance) { this.actionDistance = actionDistance; } /** * TODO: Write doc * * @param unselectedAlpha */ @Override public void setUnselectedAlpha(float unselectedAlpha) { super.setUnselectedAlpha(unselectedAlpha); this.unselectedAlpha = unselectedAlpha; } /** * TODO: Write doc * * @return */ public float getUnselectedSaturation() { return unselectedSaturation; } /** * TODO: Write doc * * @param unselectedSaturation */ public void setUnselectedSaturation(float unselectedSaturation) { this.unselectedSaturation = unselectedSaturation; } // ============================================================================= // Supertype overrides // ============================================================================= @Override protected boolean getChildStaticTransformation(View child, Transformation t) { // We can cast here because FancyCoverFlowAdapter only creates wrappers. FancyCoverFlowItemWrapper item = (FancyCoverFlowItemWrapper) child; // Since Jelly Bean childs won't get invalidated automatically, needs to be added for the smooth coverflow animation if (android.os.Build.VERSION.SDK_INT >= 16) { item.invalidate(); } final int coverFlowWidth = this.getWidth(); final int coverFlowCenter = coverFlowWidth / 2; final int childWidth = item.getWidth(); final int childHeight = item.getHeight(); final int childCenter = item.getLeft() + childWidth / 2; // Use coverflow width when its defined as automatic. final int actionDistance = (this.actionDistance == ACTION_DISTANCE_AUTO) ? (int) ((coverFlowWidth + childWidth) / 2.0f) : this.actionDistance; // Calculate the abstract amount for all effects. final float effectsAmount = Math.min(1.0f, Math.max(-1.0f, (1.0f / actionDistance) * (childCenter - coverFlowCenter))); // Clear previous transformations and set transformation type (matrix + alpha). t.clear(); t.setTransformationType(Transformation.TYPE_BOTH); // Alpha if (this.unselectedAlpha != 1) { final float alphaAmount = (this.unselectedAlpha - 1) * Math.abs(effectsAmount) + 1; t.setAlpha(alphaAmount); } // Saturation if (this.unselectedSaturation != 1) { // Pass over saturation to the wrapper. final float saturationAmount = (this.unselectedSaturation - 1) * Math.abs(effectsAmount) + 1; item.setSaturation(saturationAmount); } final Matrix imageMatrix = t.getMatrix(); // Apply rotation. if (this.maxRotation != 0) { //final int rotationAngle = (int) (-effectsAmount * this.maxRotation); final int rotationAngle = (int) (-effectsAmount * this.maxRotation); this.transformationCamera.save(); this.transformationCamera.rotateY(rotationAngle); this.transformationCamera.getMatrix(imageMatrix); this.transformationCamera.restore(); } // Zoom. if (this.unselectedScale != 1) { final float zoomAmount = (this.unselectedScale - 1) * Math.abs(effectsAmount) + 1; // Calculate the scale anchor (y anchor can be altered) final float translateX = childWidth / 2.0f; final float translateY = childHeight * this.scaleDownGravity; imageMatrix.preTranslate(-translateX, -translateY); imageMatrix.postScale(zoomAmount, zoomAmount); imageMatrix.postTranslate(translateX, translateY); } return true; } // ============================================================================= // Public classes // ============================================================================= public static class LayoutParams extends Gallery.LayoutParams { public LayoutParams(Context c, AttributeSet attrs) { super(c, attrs); } public LayoutParams(int w, int h) { super(w, h); } public LayoutParams(ViewGroup.LayoutParams source) { super(source); } } }