gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.dataformat; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.core.memory.DataOutputViewStreamWrapper; import org.apache.flink.core.memory.MemorySegment; import org.apache.flink.core.memory.MemorySegmentFactory; import org.apache.flink.table.runtime.typeutils.BaseArraySerializer; import org.apache.flink.table.runtime.typeutils.BaseMapSerializer; import org.apache.flink.table.runtime.typeutils.BaseRowSerializer; import org.apache.flink.table.runtime.typeutils.BinaryGenericSerializer; import org.apache.flink.table.runtime.util.SegmentsUtil; import java.io.IOException; import java.io.OutputStream; import java.nio.charset.StandardCharsets; import java.util.Arrays; /** * Use the special format to write data to a {@link MemorySegment} (its capacity grows * automatically). * * <p>If write a format binary: * 1. New a writer. * 2. Write each field by writeXX or setNullAt. (Variable length fields can not be written * repeatedly.) * 3. Invoke {@link #complete()}. * * <p>If want to reuse this writer, please invoke {@link #reset()} first. */ public abstract class AbstractBinaryWriter implements BinaryWriter { protected MemorySegment segment; protected int cursor; protected DataOutputViewStreamWrapper outputView; /** * Set offset and size to fix len part. */ protected abstract void setOffsetAndSize(int pos, int offset, long size); /** * Get field offset. */ protected abstract int getFieldOffset(int pos); /** * After grow, need point to new memory. */ protected abstract void afterGrow(); protected abstract void setNullBit(int ordinal); /** * See {@link BinaryString#readBinaryStringFieldFromSegments}. */ @Override public void writeString(int pos, BinaryString input) { if (input.getSegments() == null) { String javaObject = input.getJavaObject(); writeBytes(pos, javaObject.getBytes(StandardCharsets.UTF_8)); } else { int len = input.getSizeInBytes(); if (len <= 7) { byte[] bytes = SegmentsUtil.allocateReuseBytes(len); SegmentsUtil.copyToBytes(input.getSegments(), input.getOffset(), bytes, 0, len); writeBytesToFixLenPart(segment, getFieldOffset(pos), bytes, len); } else { writeSegmentsToVarLenPart(pos, input.getSegments(), input.getOffset(), len); } } } private void writeBytes(int pos, byte[] bytes) { int len = bytes.length; if (len <= BinaryFormat.MAX_FIX_PART_DATA_SIZE) { writeBytesToFixLenPart(segment, getFieldOffset(pos), bytes, len); } else { writeBytesToVarLenPart(pos, bytes, len); } } @Override public void writeArray(int pos, BaseArray input, BaseArraySerializer serializer) { BinaryArray binary = serializer.toBinaryArray(input); writeSegmentsToVarLenPart(pos, binary.getSegments(), binary.getOffset(), binary.getSizeInBytes()); } @Override public void writeMap(int pos, BaseMap input, BaseMapSerializer serializer) { BinaryMap binary = serializer.toBinaryMap(input); writeSegmentsToVarLenPart(pos, binary.getSegments(), binary.getOffset(), binary.getSizeInBytes()); } private DataOutputViewStreamWrapper getOutputView() { if (outputView == null) { outputView = new DataOutputViewStreamWrapper(new BinaryRowWriterOutputView()); } return outputView; } @Override @SuppressWarnings("unchecked") public void writeGeneric(int pos, BinaryGeneric input, BinaryGenericSerializer serializer) { TypeSerializer innerSerializer = serializer.getInnerSerializer(); input.ensureMaterialized(innerSerializer); writeSegmentsToVarLenPart(pos, input.getSegments(), input.getOffset(), input.getSizeInBytes()); } @Override public void writeRow(int pos, BaseRow input, BaseRowSerializer serializer) { if (input instanceof BinaryFormat) { BinaryFormat row = (BinaryFormat) input; writeSegmentsToVarLenPart(pos, row.getSegments(), row.getOffset(), row.getSizeInBytes()); } else { BinaryRow row = serializer.toBinaryRow(input); writeSegmentsToVarLenPart(pos, row.getSegments(), row.getOffset(), row.getSizeInBytes()); } } @Override public void writeBinary(int pos, byte[] bytes) { int len = bytes.length; if (len <= BinaryFormat.MAX_FIX_PART_DATA_SIZE) { writeBytesToFixLenPart(segment, getFieldOffset(pos), bytes, len); } else { writeBytesToVarLenPart(pos, bytes, len); } } @Override public void writeDecimal(int pos, Decimal value, int precision) { assert value == null || (value.getPrecision() == precision); if (Decimal.isCompact(precision)) { assert value != null; writeLong(pos, value.toUnscaledLong()); } else { // grow the global buffer before writing data. ensureCapacity(16); // zero-out the bytes segment.putLong(cursor, 0L); segment.putLong(cursor + 8, 0L); // Make sure Decimal object has the same scale as DecimalType. // Note that we may pass in null Decimal object to set null for it. if (value == null) { setNullBit(pos); // keep the offset for future update setOffsetAndSize(pos, cursor, 0); } else { final byte[] bytes = value.toUnscaledBytes(); assert bytes.length <= 16; // Write the bytes to the variable length portion. segment.put(cursor, bytes, 0, bytes.length); setOffsetAndSize(pos, cursor, bytes.length); } // move the cursor forward. cursor += 16; } } @Override public void writeTimestamp(int pos, SqlTimestamp value, int precision) { if (SqlTimestamp.isCompact(precision)) { writeLong(pos, value.getMillisecond()); } else { // store the nanoOfMillisecond in fixed-length part as offset and nanoOfMillisecond ensureCapacity(8); if (value == null) { setNullBit(pos); // zero-out the bytes segment.putLong(cursor, 0L); setOffsetAndSize(pos, cursor, 0); } else { segment.putLong(cursor, value.getMillisecond()); setOffsetAndSize(pos, cursor, value.getNanoOfMillisecond()); } cursor += 8; } } private void zeroBytes(int offset, int size) { for (int i = offset; i < offset + size; i++) { segment.put(i, (byte) 0); } } protected void zeroOutPaddingBytes(int numBytes) { if ((numBytes & 0x07) > 0) { segment.putLong(cursor + ((numBytes >> 3) << 3), 0L); } } protected void ensureCapacity(int neededSize) { final int length = cursor + neededSize; if (segment.size() < length) { grow(length); } } private void writeSegmentsToVarLenPart(int pos, MemorySegment[] segments, int offset, int size) { final int roundedSize = roundNumberOfBytesToNearestWord(size); // grow the global buffer before writing data. ensureCapacity(roundedSize); zeroOutPaddingBytes(size); if (segments.length == 1) { segments[0].copyTo(offset, segment, cursor, size); } else { writeMultiSegmentsToVarLenPart(segments, offset, size); } setOffsetAndSize(pos, cursor, size); // move the cursor forward. cursor += roundedSize; } private void writeMultiSegmentsToVarLenPart(MemorySegment[] segments, int offset, int size) { // Write the bytes to the variable length portion. int needCopy = size; int fromOffset = offset; int toOffset = cursor; for (MemorySegment sourceSegment : segments) { int remain = sourceSegment.size() - fromOffset; if (remain > 0) { int copySize = remain > needCopy ? needCopy : remain; sourceSegment.copyTo(fromOffset, segment, toOffset, copySize); needCopy -= copySize; toOffset += copySize; fromOffset = 0; } else { fromOffset -= sourceSegment.size(); } } } private void writeBytesToVarLenPart(int pos, byte[] bytes, int len) { final int roundedSize = roundNumberOfBytesToNearestWord(len); // grow the global buffer before writing data. ensureCapacity(roundedSize); zeroOutPaddingBytes(len); // Write the bytes to the variable length portion. segment.put(cursor, bytes, 0, len); setOffsetAndSize(pos, cursor, len); // move the cursor forward. cursor += roundedSize; } /** * Increases the capacity to ensure that it can hold at least the * minimum capacity argument. */ private void grow(int minCapacity) { int oldCapacity = segment.size(); int newCapacity = oldCapacity + (oldCapacity >> 1); if (newCapacity - minCapacity < 0) { newCapacity = minCapacity; } segment = MemorySegmentFactory.wrap(Arrays.copyOf(segment.getArray(), newCapacity)); afterGrow(); } protected static int roundNumberOfBytesToNearestWord(int numBytes) { int remainder = numBytes & 0x07; if (remainder == 0) { return numBytes; } else { return numBytes + (8 - remainder); } } private static void writeBytesToFixLenPart( MemorySegment segment, int fieldOffset, byte[] bytes, int len) { long firstByte = len | 0x80; // first bit is 1, other bits is len long sevenBytes = 0L; // real data if (BinaryRow.LITTLE_ENDIAN) { for (int i = 0; i < len; i++) { sevenBytes |= ((0x00000000000000FFL & bytes[i]) << (i * 8L)); } } else { for (int i = 0; i < len; i++) { sevenBytes |= ((0x00000000000000FFL & bytes[i]) << ((6 - i) * 8L)); } } final long offsetAndSize = (firstByte << 56) | sevenBytes; segment.putLong(fieldOffset, offsetAndSize); } /** * OutputView for write Generic. */ private class BinaryRowWriterOutputView extends OutputStream { /** * Writes the specified byte to this output stream. The general contract for * <code>write</code> is that one byte is written to the output stream. The byte to be * written is the eight low-order bits of the argument <code>b</code>. The 24 high-order * bits of <code>b</code> are ignored. */ @Override public void write(int b) throws IOException { ensureCapacity(1); segment.put(cursor, (byte) b); cursor += 1; } @Override public void write(byte[] b) throws IOException { ensureCapacity(b.length); segment.put(cursor, b, 0, b.length); cursor += b.length; } @Override public void write(byte[] b, int off, int len) throws IOException { ensureCapacity(len); segment.put(cursor, b, off, len); cursor += len; } public void write(MemorySegment seg, int off, int len) throws IOException { ensureCapacity(len); seg.copyTo(off, segment, cursor, len); cursor += len; } } }
package org.mtransit.android.commons.provider; import android.annotation.SuppressLint; import android.content.ContentValues; import android.content.Context; import android.content.UriMatcher; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; import android.net.Uri; import android.text.TextUtils; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import org.json.JSONArray; import org.json.JSONObject; import org.mtransit.android.commons.ArrayUtils; import org.mtransit.android.commons.BuildConfig; import org.mtransit.android.commons.FileUtils; import org.mtransit.android.commons.MTLog; import org.mtransit.android.commons.NetworkUtils; import org.mtransit.android.commons.R; import org.mtransit.android.commons.SqlUtils; import org.mtransit.android.commons.StringUtils; import org.mtransit.android.commons.TimeUtils; import org.mtransit.android.commons.UriUtils; import org.mtransit.android.commons.data.POI; import org.mtransit.android.commons.data.POIStatus; import org.mtransit.android.commons.data.RouteTripStop; import org.mtransit.android.commons.data.Schedule; import org.mtransit.android.commons.data.Trip; import org.mtransit.android.commons.provider.CaLTCOnlineProvider.JBusTimes.JResult.JRealTimeResult; import org.mtransit.android.commons.provider.CaLTCOnlineProvider.JBusTimes.JResult.JStopTimeResult; import org.mtransit.android.commons.provider.CaLTCOnlineProvider.JBusTimes.JResult.JStopTimeResult.JStopTime; import org.mtransit.commons.CleanUtils; import java.io.BufferedWriter; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.net.SocketException; import java.net.URL; import java.net.URLConnection; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TimeZone; import java.util.concurrent.TimeUnit; import javax.net.ssl.HttpsURLConnection; // Nov 15, 2020: DOES NOT WORK because Real-Time API stop IDs do not match with GTFS static (DISABLED) @SuppressLint("Registered") public class CaLTCOnlineProvider extends MTContentProvider implements StatusProviderContract { private static final String LOG_TAG = CaLTCOnlineProvider.class.getSimpleName(); @NonNull @Override public String getLogTag() { return LOG_TAG; } @NonNull public static UriMatcher getNewUriMatcher(@NonNull String authority) { UriMatcher URI_MATCHER = new UriMatcher(UriMatcher.NO_MATCH); StatusProvider.append(URI_MATCHER, authority); return URI_MATCHER; } @Nullable private static UriMatcher uriMatcher = null; /** * Override if multiple {@link CaLTCOnlineProvider} implementations in same app. */ @NonNull private static UriMatcher getURIMATCHER(@NonNull Context context) { if (uriMatcher == null) { uriMatcher = getNewUriMatcher(getAUTHORITY(context)); } return uriMatcher; } @Nullable private static String authority = null; /** * Override if multiple {@link CaLTCOnlineProvider} implementations in same app. */ @NonNull private static String getAUTHORITY(@NonNull Context context) { if (authority == null) { authority = context.getResources().getString(R.string.ca_ltconline_authority); } return authority; } @Nullable private static Uri authorityUri = null; /** * Override if multiple {@link CaLTCOnlineProvider} implementations in same app. */ @NonNull private static Uri getAUTHORITY_URI(@NonNull Context context) { if (authorityUri == null) { authorityUri = UriUtils.newContentUri(getAUTHORITY(context)); } return authorityUri; } private static final long WEB_WATCH_STATUS_MAX_VALIDITY_IN_MS = TimeUnit.HOURS.toMillis(1L); private static final long WEB_WATCH_STATUS_VALIDITY_IN_MS = TimeUnit.MINUTES.toMillis(10L); private static final long WEB_WATCH_STATUS_VALIDITY_IN_FOCUS_IN_MS = TimeUnit.MINUTES.toMillis(1L); private static final long WEB_WATCH_STATUS_MIN_DURATION_BETWEEN_REFRESH_IN_MS = TimeUnit.MINUTES.toMillis(1L); private static final long WEB_WATCH_STATUS_MIN_DURATION_BETWEEN_REFRESH_IN_FOCUS_IN_MS = TimeUnit.MINUTES.toMillis(1L); @Override public long getStatusMaxValidityInMs() { return WEB_WATCH_STATUS_MAX_VALIDITY_IN_MS; } @Override public long getStatusValidityInMs(boolean inFocus) { if (inFocus) { return WEB_WATCH_STATUS_VALIDITY_IN_FOCUS_IN_MS; } return WEB_WATCH_STATUS_VALIDITY_IN_MS; } @Override public long getMinDurationBetweenRefreshInMs(boolean inFocus) { if (inFocus) { return WEB_WATCH_STATUS_MIN_DURATION_BETWEEN_REFRESH_IN_FOCUS_IN_MS; } return WEB_WATCH_STATUS_MIN_DURATION_BETWEEN_REFRESH_IN_MS; } @Override public void cacheStatus(@NonNull POIStatus newStatusToCache) { StatusProvider.cacheStatusS(this, newStatusToCache); } @Nullable @Override public POIStatus getCachedStatus(@NonNull StatusProviderContract.Filter statusFilter) { if (!(statusFilter instanceof Schedule.ScheduleStatusFilter)) { MTLog.w(this, "getNewStatus() > Can't find new schedule without schedule filter!"); return null; } Schedule.ScheduleStatusFilter scheduleStatusFilter = (Schedule.ScheduleStatusFilter) statusFilter; RouteTripStop rts = scheduleStatusFilter.getRouteTripStop(); String uuid = getAgencyRouteStopTargetUUID(rts); POIStatus status = StatusProvider.getCachedStatusS(this, uuid); if (status != null) { status.setTargetUUID(rts.getUUID()); // target RTS UUID instead of custom provider tags if (status instanceof Schedule) { ((Schedule) status).setDescentOnly(rts.isDescentOnly()); } } return status; } @NonNull private static String getAgencyRouteStopTargetUUID(@NonNull RouteTripStop rts) { return getAgencyRouteStopTargetUUID( rts.getAuthority(), getAgencyRouteId(rts), getAgencyTripId(rts), getAgencyStopId(rts) ); } @NonNull protected static String getAgencyRouteStopTargetUUID(@NonNull String agencyAuthority, @NonNull String routeShortName, @Nullable String optTripHeaSignValue, @NonNull String stopId) { return POI.POIUtils.getUUID(agencyAuthority, routeShortName, optTripHeaSignValue, stopId); } @NonNull private static String getAgencyRouteId(@NonNull RouteTripStop rts) { return rts.getRoute().getShortName(); } private static final String CA_LONDON_TRANSIT_BUS = BuildConfig.DEBUG ? "org.mtransit.android.debug.ca_london_transit_bus.gtfs" : "org.mtransit.android.ca_london_transit_bus.gtfs"; @NonNull private static String getAgencyTripId(@NonNull RouteTripStop rts) { if (rts.getTrip().getHeadsignType() == Trip.HEADSIGN_TYPE_DIRECTION) { return rts.getTrip().getHeadsignValue(); // E | W | N | S } else if (rts.getTrip().getHeadsignType() == Trip.HEADSIGN_TYPE_STRING) { if (CA_LONDON_TRANSIT_BUS.equals(rts.getAuthority())) { String tripIdS = String.valueOf(rts.getTrip().getId()); if (tripIdS.endsWith("010")) { return LTC_CW; } else if (tripIdS.endsWith("011")) { return LTC_CCW; } if (tripIdS.endsWith("0101")) { return LTC_HURON_AND_BARKER; } else if (tripIdS.endsWith("0102")) { return LTC_WESTERN; } if (tripIdS.endsWith("01")) { return Trip.HEADING_EAST; } else if (tripIdS.endsWith("02")) { return Trip.HEADING_NORTH; } else if (tripIdS.endsWith("03")) { return Trip.HEADING_SOUTH; } else if (tripIdS.endsWith("04")) { return Trip.HEADING_WEST; } } } MTLog.w(LOG_TAG, "Unsupported agency trip filtering for '%s'.", rts); return StringUtils.EMPTY; // DO NOT FILTER BY TRIP } private static String getAgencyStopId(@NonNull RouteTripStop rts) { return String.valueOf(rts.getStop().getId()); } @Override public boolean purgeUselessCachedStatuses() { return StatusProvider.purgeUselessCachedStatuses(this); } @Override public boolean deleteCachedStatus(int cachedStatusId) { return StatusProvider.deleteCachedStatus(this, cachedStatusId); } @NonNull @Override public String getStatusDbTableName() { return CaLTCOnlineDbHelper.T_WEB_WATCH_STATUS; } @Override public int getStatusType() { return POI.ITEM_STATUS_TYPE_SCHEDULE; } @Nullable @Override public POIStatus getNewStatus(@NonNull StatusProviderContract.Filter statusFilter) { if (!(statusFilter instanceof Schedule.ScheduleStatusFilter)) { MTLog.w(this, "getNewStatus() > Can't find new schedule without schedule filter!"); return null; } Schedule.ScheduleStatusFilter scheduleStatusFilter = (Schedule.ScheduleStatusFilter) statusFilter; RouteTripStop rts = scheduleStatusFilter.getRouteTripStop(); loadRealTimeStatusFromWWW(rts); return getCachedStatus(statusFilter); } private static final String REAL_TIME_URL = "https://realtime.londontransit.ca/InfoWeb"; private void loadRealTimeStatusFromWWW(@NonNull RouteTripStop rts) { try { String urlString = REAL_TIME_URL; MTLog.i(this, "Loading from '%s' for '%s'...", urlString, rts.getStop().getId()); String jsonPostParams = getJSONPostParameters(rts); if (TextUtils.isEmpty(jsonPostParams)) { MTLog.w(this, "loadPredictionsFromWWW() > skip (invalid JSON post parameters!)"); return; } URL url = new URL(urlString); URLConnection urlc = url.openConnection(); NetworkUtils.setupUrlConnection(urlc); HttpsURLConnection httpUrlConnection = (HttpsURLConnection) urlc; try { httpUrlConnection.setDoOutput(true); httpUrlConnection.setRequestMethod("POST"); httpUrlConnection.addRequestProperty("Content-Type", "application/json"); OutputStream os = httpUrlConnection.getOutputStream(); BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(os, FileUtils.getUTF8())); writer.write(jsonPostParams); writer.flush(); writer.close(); os.close(); long newLastUpdateInMs = TimeUtils.currentTimeMillis(); String jsonString = FileUtils.getString(httpUrlConnection.getInputStream()); JBusTimes jBusTimes = parseAgencyJSONBusTimes(jsonString); long beginningOfTodayInMs = getNewBeginningOfTodayCal().getTimeInMillis(); Collection<POIStatus> statuses = parseAgencyJSON(jBusTimes, rts, newLastUpdateInMs, beginningOfTodayInMs); StatusProvider.deleteCachedStatus(this, ArrayUtils.asArrayList(getAgencyRouteStopTargetUUID(rts))); for (POIStatus status : statuses) { StatusProvider.cacheStatusS(this, status); } } catch (Exception e) { MTLog.w(this, e, "Error while posting query!"); } finally { httpUrlConnection.disconnect(); } } catch (UnknownHostException uhe) { if (MTLog.isLoggable(android.util.Log.DEBUG)) { MTLog.w(this, uhe, "No Internet Connection!"); } else { MTLog.w(this, "No Internet Connection!"); } } catch (SocketException se) { MTLog.w(LOG_TAG, se, "No Internet Connection!"); } catch (Exception e) { // Unknown error MTLog.e(LOG_TAG, e, "INTERNAL ERROR: Unknown Exception"); } } private static final String JSON_ENABLED = "1"; private static final String JSON_CLIENT = "Client"; private static final String JSON_GET_STOP_TIMES = "GetStopTimes"; private static final String JSON_GET_STOP_TRIP_INFO = "GetStopTripInfo"; private static final String JSON_RADIUS = "Radius"; private static final String JSON_SUPPRESS_LINES_UNLOAD_ONLY = "SuppressLinesUnloadOnly"; private static final String JSON_LINES_REQUEST = "LinesRequest"; private static final String JSON_CLIENT_MOBILE_WEB = "MobileWeb"; private static final String JSON_GET_STOP_TIMES_ENABLED = JSON_ENABLED; private static final String JSON_GET_STOP_TRIP_INFO_ENABLED = JSON_ENABLED; private static final String JSON_RADIUS_NONE = "0"; private static final String JSON_SUPPRESS_LINES_UNLOAD_ONLY_ENABLED = JSON_ENABLED; private static final String JSON_VERSION = "version"; private static final String JSON_METHOD = "method"; private static final String JSON_PARAMS = "params"; private static final String JSON_STOP_ID = "StopId"; private static final String JSON_NUM_STOP_TIMES = "NumStopTimes"; private static final String JSON_VERSION_1_1 = "1.1"; private static final String JSON_METHOD_GET_BUS_TIMES = "GetBusTimes"; private static final String JSON_NUM_STOP_TIMES_COUNT = "200"; private static final String JSON_RESULT = "result"; private static final String JSON_STOP_TIME_RESULT = "StopTimeResult"; private static final String JSON_STOP_TIMES = "StopTimes"; private static final String JSON_TRIP_ID = "TripId"; private static final String JSON_IGNORE_ADHERENCE = "IgnoreAdherence"; private static final String JSON_DESTINATION_SIGN = "DestinationSign"; private static final String JSON_DIRECTION_NAME = "DirectionName"; private static final String JSON_REAL_TIME_RESULTS = "RealTimeResults"; private static final String JSON_REAL_TIME = "RealTime"; private static final String JSON_E_TIME = "ETime"; private static final String JSON_LINES = "Lines"; private static final String JSON_LINE_DIR_ID = "LineDirId"; private static final String JSON_LINE_ABBR = "LineAbbr"; @Nullable private static String getJSONPostParameters(@NonNull RouteTripStop rts) { try { JSONObject json = new JSONObject(); json.put(JSON_VERSION, JSON_VERSION_1_1); json.put(JSON_METHOD, JSON_METHOD_GET_BUS_TIMES); JSONObject jParams = new JSONObject(); JSONObject jLinesRequest = new JSONObject(); jLinesRequest.put(JSON_CLIENT, JSON_CLIENT_MOBILE_WEB); jLinesRequest.put(JSON_GET_STOP_TIMES, JSON_GET_STOP_TIMES_ENABLED); jLinesRequest.put(JSON_GET_STOP_TRIP_INFO, JSON_GET_STOP_TRIP_INFO_ENABLED); jLinesRequest.put(JSON_NUM_STOP_TIMES, JSON_NUM_STOP_TIMES_COUNT); jLinesRequest.put(JSON_RADIUS, JSON_RADIUS_NONE); jLinesRequest.put(JSON_STOP_ID, getAgencyStopId(rts)); jLinesRequest.put(JSON_SUPPRESS_LINES_UNLOAD_ONLY, JSON_SUPPRESS_LINES_UNLOAD_ONLY_ENABLED); jParams.put(JSON_LINES_REQUEST, jLinesRequest); json.put(JSON_PARAMS, jParams); return json.toString(); } catch (Exception e) { MTLog.w(LOG_TAG, e, "Error while creating JSON POST parameters for '%s'!", rts); return null; } } @NonNull private JBusTimes parseAgencyJSONBusTimes(@Nullable String jsonString) { List<JBusTimes.JResult> results = new ArrayList<>(); try { JSONObject json = jsonString == null ? null : new JSONObject(jsonString); if (json != null && json.has(JSON_RESULT)) { JSONArray jResults = json.getJSONArray(JSON_RESULT); for (int r = 0; r < jResults.length(); r++) { JSONObject jResult = jResults.getJSONObject(r); results.add(new JBusTimes.JResult( parseAgencyJSONBusTimesRealTimeResults(jResult), parseAgencyJSONBusTimesStopTimesResults(jResult) )); } } } catch (Exception e) { MTLog.w(this, e, "Error while parsing JSON '%s'!", jsonString); } return new JBusTimes(results); } private List<JStopTimeResult> parseAgencyJSONBusTimesStopTimesResults(@Nullable JSONObject jResult) { List<JStopTimeResult> stopTimeResults = new ArrayList<>(); try { if (jResult != null && jResult.has(JSON_STOP_TIME_RESULT)) { JSONArray jStopTimeResults = jResult.getJSONArray(JSON_STOP_TIME_RESULT); for (int str = 0; str < jStopTimeResults.length(); str++) { JSONObject jStopTimeResult = jStopTimeResults.getJSONObject(str); stopTimeResults.add(new JStopTimeResult( parseAgencyJSONBusTimesLines(jStopTimeResult), parseAgencyJSONBusTimesStopTimes(jStopTimeResult) )); } } } catch (Exception e) { MTLog.w(this, e, "Error while parsing JSON '%s'!", jResult); } return stopTimeResults; } @NonNull private List<JStopTimeResult.JLine> parseAgencyJSONBusTimesLines(@Nullable JSONObject jStopTimeResult) { List<JStopTimeResult.JLine> lines = new ArrayList<>(); try { if (jStopTimeResult != null && jStopTimeResult.has(JSON_LINES)) { JSONArray jLines = jStopTimeResult.getJSONArray(JSON_LINES); for (int l = 0; l < jLines.length(); l++) { JSONObject jLine = jLines.getJSONObject(l); JStopTimeResult.JLine line = parseAgencyJSONBusTimesLine(jLine); if (line != null) { lines.add(line); } } } } catch (Exception e) { MTLog.w(this, e, "Error while parsing JSON '%s'!", jStopTimeResult); } return lines; } @Nullable private JStopTimeResult.JLine parseAgencyJSONBusTimesLine(@Nullable JSONObject jLine) { try { if (jLine != null) { return new JStopTimeResult.JLine( jLine.getString(JSON_DIRECTION_NAME), jLine.getString(JSON_LINE_ABBR), jLine.getInt(JSON_LINE_DIR_ID), jLine.getInt(JSON_STOP_ID) ); } } catch (Exception e) { MTLog.w(this, e, "Error while parsing JSON '%s'!", jLine); } return null; } @NonNull private List<JStopTime> parseAgencyJSONBusTimesStopTimes(@Nullable JSONObject jStopTimeResult) { List<JStopTime> stopTimes = new ArrayList<>(); try { if (jStopTimeResult != null && jStopTimeResult.has(JSON_STOP_TIMES)) { JSONArray jStopTimes = jStopTimeResult.getJSONArray(JSON_STOP_TIMES); for (int st = 0; st < jStopTimes.length(); st++) { JSONObject jStopTime = jStopTimes.getJSONObject(st); JStopTime stopTime = parseAgencyJSONBusTimesStopTime(jStopTime); if (stopTime != null) { stopTimes.add(stopTime); } } } } catch (Exception e) { MTLog.w(this, e, "Error while parsing JSON '%s'!", jStopTimeResult); } return stopTimes; } @Nullable private JStopTime parseAgencyJSONBusTimesStopTime(@Nullable JSONObject jStopTime) { try { if (jStopTime != null) { return new JStopTime( jStopTime.getString(JSON_DESTINATION_SIGN), jStopTime.getInt(JSON_E_TIME), jStopTime.getInt(JSON_LINE_DIR_ID), jStopTime.getString(JSON_STOP_ID), jStopTime.getInt(JSON_TRIP_ID) ); } } catch (Exception e) { MTLog.w(this, e, "Error while parsing JSON '%s'!", jStopTime); } return null; } @NonNull private List<JRealTimeResult> parseAgencyJSONBusTimesRealTimeResults(@Nullable JSONObject jResult) { List<JRealTimeResult> realTimeResults = new ArrayList<>(); try { if (jResult != null && jResult.has(JSON_REAL_TIME_RESULTS)) { JSONArray jRealTimeResults = jResult.getJSONArray(JSON_REAL_TIME_RESULTS); for (int rt = 0; rt < jRealTimeResults.length(); rt++) { JSONObject jRealTimeResult = jRealTimeResults.getJSONObject(rt); JRealTimeResult realTimeResult = parseAgencyJSONBusTimesRealTimeResult(jRealTimeResult); if (realTimeResult != null) { realTimeResults.add(realTimeResult); } } } } catch (Exception e) { MTLog.w(this, e, "Error while parsing JSON '%s'!", jResult); } return realTimeResults; } @Nullable private JRealTimeResult parseAgencyJSONBusTimesRealTimeResult(@Nullable JSONObject jRealTimeResult) { try { if (jRealTimeResult != null) { return new JRealTimeResult( jRealTimeResult.getInt(JSON_E_TIME), jRealTimeResult.getInt(JSON_LINE_DIR_ID), jRealTimeResult.getInt(JSON_REAL_TIME), jRealTimeResult.getInt(JSON_STOP_ID), jRealTimeResult.getInt(JSON_TRIP_ID), jRealTimeResult.optBoolean(JSON_IGNORE_ADHERENCE, true) // true == not real-time ); } } catch (Exception e) { MTLog.w(this, e, "Error while parsing JSON '%s'!", jRealTimeResult); } return null; } private static final TimeZone LONDON_TZ = TimeZone.getTimeZone("America/Toronto"); @NonNull protected Calendar getNewBeginningOfTodayCal() { Calendar beginningOfTodayCal = Calendar.getInstance(LONDON_TZ); beginningOfTodayCal.set(Calendar.HOUR_OF_DAY, 0); beginningOfTodayCal.set(Calendar.MINUTE, 0); beginningOfTodayCal.set(Calendar.SECOND, 0); beginningOfTodayCal.set(Calendar.MILLISECOND, 0); return beginningOfTodayCal; } private static final long PROVIDER_PRECISION_IN_MS = TimeUnit.SECONDS.toMillis(10L); @NonNull protected List<POIStatus> parseAgencyJSON(@NonNull JBusTimes jBusTimes, @NonNull RouteTripStop rts, long newLastUpdateInMs, long beginningOfTodayInMs) { List<POIStatus> result = new ArrayList<>(); try { if (jBusTimes.hasResults()) { List<JBusTimes.JResult> jResults = jBusTimes.getResults(); if (jResults != null && !jResults.isEmpty()) { for (JBusTimes.JResult jResult : jResults) { @SuppressLint("UseSparseArrays") Map<Integer, String> lineDirIdTargetUUIDS = new HashMap<>(); @SuppressLint("UseSparseArrays") Map<Integer, List<JStopTime>> lineDirIdStopTimes = new HashMap<>(); @SuppressLint("UseSparseArrays") Map<Integer, List<JRealTimeResult>> lineDirIdRealTimeResults = new HashMap<>(); if (jResult != null && jResult.hasStopTimeResults()) { List<JStopTimeResult> jStopTimesResults = jResult.getStopTimeResults(); if (jStopTimesResults != null && !jStopTimesResults.isEmpty()) { for (JStopTimeResult jStopTimeResult : jStopTimesResults) { // LINES if (jStopTimeResult != null && jStopTimeResult.hasLines()) { List<JStopTimeResult.JLine> jLines = jStopTimeResult.getLines(); if (jLines != null && !jLines.isEmpty()) { for (JStopTimeResult.JLine jLine : jLines) { if (jLine.hasLineDirId()) { lineDirIdTargetUUIDS.put( jLine.getLineDirId(), getAgencyRouteStopTargetUUID( rts.getAuthority(), getRouteShortName(jLine), getTripHeadSign(jLine), jLine.getStopIdS() ) ); } } } } // STOP TIMES (STATIC) if (jStopTimeResult != null && jStopTimeResult.hasStopTimes()) { List<JStopTime> jStopTimes = jStopTimeResult.getStopTimes(); if (jStopTimes != null && !jStopTimes.isEmpty()) { for (JStopTime jStopTime : jStopTimes) { if (jStopTime.hasLineDirId()) { List<JStopTime> lineDirIdStopTime = lineDirIdStopTimes.get(jStopTime.getLineDirId()); if (lineDirIdStopTime == null) { lineDirIdStopTime = new ArrayList<>(); } lineDirIdStopTime.add(jStopTime); lineDirIdStopTimes.put(jStopTime.getLineDirId(), lineDirIdStopTime); } } } } } } } // REAL TIME (SCHEDULE) if (jResult != null && jResult.hasRealTimeResults()) { List<JRealTimeResult> jRealTimeResults = jResult.getRealTimeResults(); if (jRealTimeResults != null && !jRealTimeResults.isEmpty()) { for (JRealTimeResult jRealTimeResult : jRealTimeResults) { if (jRealTimeResult != null && jRealTimeResult.hasRealTime()) { if (jRealTimeResult.hasLineDirId()) { lineDirIdRealTimeResults.put( jRealTimeResult.getLineDirId(), jRealTimeResults ); } } } } } // MERGE for (Map.Entry<Integer, String> lineDirIdTargetUUID : lineDirIdTargetUUIDS.entrySet()) { int lineDirId = lineDirIdTargetUUID.getKey(); String targetUUID = lineDirIdTargetUUID.getValue(); Schedule newSchedule = new Schedule(targetUUID, newLastUpdateInMs, getStatusMaxValidityInMs(), newLastUpdateInMs, PROVIDER_PRECISION_IN_MS, false); List<JStopTime> stopTimes = lineDirIdStopTimes.get(lineDirId); List<JRealTimeResult> realTimeResults = lineDirIdRealTimeResults.get(lineDirId); if (stopTimes != null) { for (JStopTime stopTime : stopTimes) { int eTime = stopTime.getETime(); JRealTimeResult realTime = findRealTime(stopTime, realTimeResults); Boolean isRealTime = null; if (realTime != null) { eTime = realTime.getRealTime(); if (realTime.getIgnoreAdherence() != null) { isRealTime = !realTime.getIgnoreAdherence(); } } long t = beginningOfTodayInMs + TimeUnit.SECONDS.toMillis(eTime); Schedule.Timestamp timestamp = new Schedule.Timestamp(TimeUtils.timeToTheTensSecondsMillis(t)); String destinationSign = stopTime.getDestinationSign(); if (!TextUtils.isEmpty(destinationSign)) { destinationSign = cleanTripHeadSign(destinationSign); timestamp.setHeadsign(Trip.HEADSIGN_TYPE_STRING, destinationSign); } if (isRealTime != null) { timestamp.setRealTime(isRealTime); } newSchedule.addTimestampWithoutSort(timestamp); } } newSchedule.sortTimestamps(); result.add(newSchedule); } } } } } catch (Exception e) { MTLog.w(this, e, "Error while parsing '%s'!", jBusTimes); } return result; } @Nullable private JRealTimeResult findRealTime(@NonNull JStopTime stopTime, @Nullable List<JRealTimeResult> realTimeResults) { if (realTimeResults != null) { for (JRealTimeResult realTimeResult : realTimeResults) { if (realTimeResult.getLineDirId() != stopTime.getLineDirId()) { continue; // different line } if (realTimeResult.getTripId() != stopTime.getTripId()) { continue; // different trip } if (realTimeResult.getETime() != stopTime.getETime()) { continue; // different scheduled time } return realTimeResult; } } MTLog.d(this, "No real-time for '%s'", stopTime); return null; } private static final String EASTBOUND = "EASTBOUND"; private static final String WESTBOUND = "WESTBOUND"; private static final String NORTHBOUND = "NORTHBOUND"; private static final String SOUTHBOUND = "SOUTHBOUND"; private static final String LTC_CW = "10"; private static final String LTC_CCW = "11"; private static final String LTC_HURON_AND_BARKER = "101"; private static final String LTC_WESTERN = "102"; @NonNull private String getTripHeadSign(@NonNull JStopTimeResult.JLine jLine) { String jDirectionName = jLine.getDirectionName().trim(); if (EASTBOUND.equalsIgnoreCase(jDirectionName)) { return Trip.HEADING_EAST; } else if (WESTBOUND.equalsIgnoreCase(jDirectionName)) { return Trip.HEADING_WEST; } else if (NORTHBOUND.equalsIgnoreCase(jDirectionName)) { return Trip.HEADING_NORTH; } else if (SOUTHBOUND.equalsIgnoreCase(jDirectionName)) { return Trip.HEADING_SOUTH; } if ("CLOCKWISE".equalsIgnoreCase(jDirectionName)) { return LTC_CW; } else if ("COUNTER-CLKWISE".equalsIgnoreCase(jDirectionName)) { return LTC_CCW; } if ("WESTERN".equalsIgnoreCase(jDirectionName)) { return LTC_WESTERN; } else if ("HURON & BARKER".equalsIgnoreCase(jDirectionName)) { return LTC_HURON_AND_BARKER; } MTLog.w(LOG_TAG, "Unsupported agency line direction for '%s'.", jLine); return StringUtils.EMPTY; } @NonNull private String getRouteShortName(@NonNull JStopTimeResult.JLine jLine) { return String.valueOf(Integer.parseInt(jLine.getLineAbbr())); // remove leading 0 } private String cleanTripHeadSign(String tripHeadSign) { try { tripHeadSign = CleanUtils.CLEAN_AT.matcher(tripHeadSign).replaceAll(CleanUtils.CLEAN_AT_REPLACEMENT); tripHeadSign = CleanUtils.CLEAN_AND.matcher(tripHeadSign).replaceAll(CleanUtils.CLEAN_AND_REPLACEMENT); tripHeadSign = CleanUtils.cleanNumbers(tripHeadSign); tripHeadSign = CleanUtils.cleanStreetTypes(tripHeadSign); tripHeadSign = CleanUtils.removePoints(tripHeadSign); tripHeadSign = CleanUtils.cleanLabel(tripHeadSign); return tripHeadSign; } catch (Exception e) { MTLog.w(this, e, "Error while cleaning trip head sign '%s'!", tripHeadSign); return tripHeadSign; } } @Override public boolean onCreateMT() { ping(); return true; } @Override public void ping() { // DO NOTHING } @Nullable private CaLTCOnlineDbHelper dbHelper; private static int currentDbVersion = -1; @NonNull private CaLTCOnlineDbHelper getDBHelper(@NonNull Context context) { if (dbHelper == null) { // initialize dbHelper = getNewDbHelper(context); currentDbVersion = getCurrentDbVersion(); } else { // reset try { if (currentDbVersion != getCurrentDbVersion()) { dbHelper.close(); dbHelper = null; return getDBHelper(context); } } catch (Exception e) { // fail if locked, will try again later MTLog.w(this, e, "Can't check DB version!"); } } return dbHelper; } /** * Override if multiple {@link CaLTCOnlineProvider} implementations in same app. */ public int getCurrentDbVersion() { //noinspection ConstantConditions // TODO requireContext() return CaLTCOnlineDbHelper.getDbVersion(getContext()); } /** * Override if multiple {@link CaLTCOnlineProvider} implementations in same app. */ @NonNull public CaLTCOnlineDbHelper getNewDbHelper(@NonNull Context context) { return new CaLTCOnlineDbHelper(context.getApplicationContext()); } @NonNull @Override public UriMatcher getURI_MATCHER() { //noinspection ConstantConditions // TODO requireContext() return getURIMATCHER(getContext()); } @NonNull @Override public Uri getAuthorityUri() { //noinspection ConstantConditions // TODO requireContext() return getAUTHORITY_URI(getContext()); } @NonNull private SQLiteOpenHelper getDBHelper() { //noinspection ConstantConditions // TODO requireContext() return getDBHelper(getContext()); } @NonNull @Override public SQLiteDatabase getReadDB() { return getDBHelper().getReadableDatabase(); } @NonNull @Override public SQLiteDatabase getWriteDB() { return getDBHelper().getWritableDatabase(); } @Nullable @Override public Cursor queryMT(@NonNull Uri uri, @Nullable String[] projection, @Nullable String selection, @Nullable String[] selectionArgs, @Nullable String sortOrder) { Cursor cursor = StatusProvider.queryS(this, uri, selection); if (cursor != null) { return cursor; } throw new IllegalArgumentException(String.format("Unknown URI (query): '%s'", uri)); } @Nullable @Override public String getTypeMT(@NonNull Uri uri) { String type = StatusProvider.getTypeS(this, uri); if (type != null) { return type; } throw new IllegalArgumentException(String.format("Unknown URI (type): '%s'", uri)); } @Override public int deleteMT(@NonNull Uri uri, @Nullable String selection, @Nullable String[] selectionArgs) { MTLog.w(this, "The delete method is not available."); return 0; } @Override public int updateMT(@NonNull Uri uri, @Nullable ContentValues values, @Nullable String selection, @Nullable String[] selectionArgs) { MTLog.w(this, "The update method is not available."); return 0; } @Nullable @Override public Uri insertMT(@NonNull Uri uri, @Nullable ContentValues values) { MTLog.w(this, "The insert method is not available."); return null; } public static class CaLTCOnlineDbHelper extends MTSQLiteOpenHelper { private static final String LOG_TAG = CaLTCOnlineDbHelper.class.getSimpleName(); @NonNull @Override public String getLogTag() { return LOG_TAG; } /** * Override if multiple {@link CaLTCOnlineDbHelper} implementations in same app. */ protected static final String DB_NAME = "caltconline.db"; static final String T_WEB_WATCH_STATUS = StatusProvider.StatusDbHelper.T_STATUS; private static final String T_WEB_WATCH_STATUS_SQL_CREATE = StatusProvider.StatusDbHelper.getSqlCreateBuilder(T_WEB_WATCH_STATUS).build(); private static final String T_WEB_WATCH_STATUS_SQL_DROP = SqlUtils.getSQLDropIfExistsQuery(T_WEB_WATCH_STATUS); private static int dbVersion = -1; /** * Override if multiple {@link CaLTCOnlineDbHelper} in same app. */ public static int getDbVersion(@NonNull Context context) { if (dbVersion < 0) { dbVersion = context.getResources().getInteger(R.integer.ca_ltconline_db_version); } return dbVersion; } CaLTCOnlineDbHelper(@NonNull Context context) { super(context, DB_NAME, null, getDbVersion(context)); } @Override public void onCreateMT(@NonNull SQLiteDatabase db) { initAllDbTables(db); } @Override public void onUpgradeMT(@NonNull SQLiteDatabase db, int oldVersion, int newVersion) { db.execSQL(T_WEB_WATCH_STATUS_SQL_DROP); initAllDbTables(db); } public boolean isDbExist(@NonNull Context context) { return SqlUtils.isDbExist(context, DB_NAME); } private void initAllDbTables(@NonNull SQLiteDatabase db) { db.execSQL(T_WEB_WATCH_STATUS_SQL_CREATE); } } protected static class JBusTimes { private final List<JResult> results; JBusTimes(List<JResult> results) { this.results = results; } public List<JResult> getResults() { return results; } boolean hasResults() { return this.results != null && !this.results.isEmpty(); } @NonNull @Override public String toString() { return JBusTimes.class.getSimpleName() + "{" + "results=" + results + '}'; } protected static class JResult { private final List<JRealTimeResult> realTimeResults; private final List<JStopTimeResult> stopTimeResults; JResult(List<JRealTimeResult> realTimeResults, List<JStopTimeResult> stopTimeResults) { this.realTimeResults = realTimeResults; this.stopTimeResults = stopTimeResults; } List<JStopTimeResult> getStopTimeResults() { return stopTimeResults; } boolean hasStopTimeResults() { return this.stopTimeResults != null && !this.stopTimeResults.isEmpty(); } List<JRealTimeResult> getRealTimeResults() { return realTimeResults; } boolean hasRealTimeResults() { return this.realTimeResults != null && !this.realTimeResults.isEmpty(); } @NonNull @Override public String toString() { return JResult.class.getSimpleName() + "{" + "realTimeResults=" + realTimeResults + ", stopTimeResults=" + stopTimeResults + '}'; } protected static class JRealTimeResult { private final int eTime; private final int lineDirId; private final int realTime; private final int stopId; private final int tripId; private final Boolean ignoreAdherence; JRealTimeResult(int eTime, int lineDirId, int realTime, int stopId, int tripId, Boolean ignoreAdherence) { this.eTime = eTime; this.lineDirId = lineDirId; this.realTime = realTime; this.stopId = stopId; this.tripId = tripId; this.ignoreAdherence = ignoreAdherence; } int getETime() { return eTime; } boolean hasLineDirId() { return this.lineDirId > 0; } int getLineDirId() { return lineDirId; } int getRealTime() { return realTime; } boolean hasRealTime() { return this.realTime >= 0; } int getTripId() { return tripId; } Boolean getIgnoreAdherence() { return ignoreAdherence; } @NonNull @Override public String toString() { return JRealTimeResult.class.getSimpleName() + "{" + ", eTime=" + eTime + ", lineDirId=" + lineDirId + ", realTime=" + realTime + ", stopId=" + stopId + ", tripId=" + tripId + ", ignoreAdherence=" + ignoreAdherence + '}'; } } protected static class JStopTimeResult { private final List<JLine> lines; private final List<JStopTime> stopTimes; JStopTimeResult(List<JLine> lines, List<JStopTime> stopTimes) { this.lines = lines; this.stopTimes = stopTimes; } List<JLine> getLines() { return lines; } boolean hasLines() { return this.lines != null && !this.lines.isEmpty(); } List<JStopTime> getStopTimes() { return stopTimes; } boolean hasStopTimes() { return this.stopTimes != null && !this.stopTimes.isEmpty(); } @NonNull @Override public String toString() { return JStopTimeResult.class.getSimpleName() + "{" + "lines=" + lines + ", stopTimes=" + stopTimes + '}'; } protected static class JLine { private final String directionName; private final String lineAbbr; private final int lineDirId; private final int stopId; JLine(String directionName, String lineAbbr, int lineDirId, int stopId) { this.directionName = directionName; this.lineAbbr = lineAbbr; this.lineDirId = lineDirId; this.stopId = stopId; } String getDirectionName() { return directionName; } String getLineAbbr() { return lineAbbr; } int getLineDirId() { return lineDirId; } boolean hasLineDirId() { return this.lineDirId > 0; } String getStopIdS() { return String.valueOf(this.stopId); } @NonNull @Override public String toString() { return JLine.class.getSimpleName() + "{" + "directionName='" + directionName + '\'' + ", lineAbbr='" + lineAbbr + '\'' + ", lineDirId=" + lineDirId + ", stopId=" + stopId + '}'; } } protected static class JStopTime { private final String destinationSign; private final int eTime; private final int lineDirId; private final String stopId; private final int tripId; JStopTime(String destinationSign, int eTime, int lineDirId, String stopId, int tripId) { this.destinationSign = destinationSign; this.eTime = eTime; this.lineDirId = lineDirId; this.stopId = stopId; this.tripId = tripId; } String getDestinationSign() { return destinationSign; } int getETime() { return eTime; } int getLineDirId() { return lineDirId; } boolean hasLineDirId() { return this.lineDirId > 0; } int getTripId() { return tripId; } @NonNull @Override public String toString() { return JStopTime.class.getSimpleName() + "{" + ", destinationSign='" + destinationSign + '\'' + ", eTime=" + eTime + ", lineDirId=" + lineDirId + ", stopId='" + stopId + '\'' + ", tripId=" + tripId + '}'; } } } } } }
/* * Copyright 2000-2014 Vaadin Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.vaadin.client.ui; import com.google.gwt.core.client.Scheduler; import com.google.gwt.core.client.Scheduler.ScheduledCommand; import com.google.gwt.dom.client.Element; import com.google.gwt.event.dom.client.BlurEvent; import com.google.gwt.event.dom.client.BlurHandler; import com.google.gwt.event.dom.client.ChangeEvent; import com.google.gwt.event.dom.client.ChangeHandler; import com.google.gwt.event.dom.client.FocusEvent; import com.google.gwt.event.dom.client.FocusHandler; import com.google.gwt.event.dom.client.KeyCodes; import com.google.gwt.event.dom.client.KeyDownEvent; import com.google.gwt.event.dom.client.KeyDownHandler; import com.google.gwt.user.client.DOM; import com.google.gwt.user.client.Event; import com.google.gwt.user.client.Timer; import com.google.gwt.user.client.ui.TextBoxBase; import com.vaadin.client.ApplicationConnection; import com.vaadin.client.BrowserInfo; import com.vaadin.client.WidgetUtil; import com.vaadin.shared.EventId; import com.vaadin.shared.ui.textfield.TextFieldConstants; /** * This class represents a basic text input field with one row. * * @author Vaadin Ltd. * */ public class VTextField extends TextBoxBase implements Field, ChangeHandler, FocusHandler, BlurHandler, KeyDownHandler { /** * The input node CSS classname. */ public static final String CLASSNAME = "v-textfield"; /** * This CSS classname is added to the input node on hover. */ public static final String CLASSNAME_FOCUS = "focus"; /** For internal use only. May be removed or replaced in the future. */ public String paintableId; /** For internal use only. May be removed or replaced in the future. */ public ApplicationConnection client; /** For internal use only. May be removed or replaced in the future. */ public String valueBeforeEdit = null; /** * Set to false if a text change event has been sent since the last value * change event. This means that {@link #valueBeforeEdit} should not be * trusted when determining whether a text change even should be sent. */ private boolean valueBeforeEditIsSynced = true; private boolean immediate = false; private int maxLength = -1; private static final String CLASSNAME_PROMPT = "prompt"; private static final String TEXTCHANGE_MODE_TIMEOUT = "TIMEOUT"; private String inputPrompt = null; private boolean prompting = false; private int lastCursorPos = -1; // used while checking if FF has set input prompt as value private boolean possibleInputError = false; public VTextField() { this(DOM.createInputText()); } protected VTextField(Element node) { super(node); setStyleName(CLASSNAME); addChangeHandler(this); if (BrowserInfo.get().isIE() || BrowserInfo.get().isFirefox()) { addKeyDownHandler(this); } addFocusHandler(this); addBlurHandler(this); } /** * For internal use only. May be removed or replaced in the future. * <p> * TODO When GWT adds ONCUT, add it there and remove workaround. See * http://code.google.com/p/google-web-toolkit/issues/detail?id=4030 * <p> * Also note that the cut/paste are not totally crossbrowsers compatible. * E.g. in Opera mac works via context menu, but on via File->Paste/Cut. * Opera might need the polling method for 100% working textchanceevents. * Eager polling for a change is bit dum and heavy operation, so I guess we * should first try to survive without. */ public static final int TEXTCHANGE_EVENTS = Event.ONPASTE | Event.KEYEVENTS | Event.ONMOUSEUP; @Override public void onBrowserEvent(Event event) { super.onBrowserEvent(event); if (listenTextChangeEvents && (event.getTypeInt() & TEXTCHANGE_EVENTS) == event .getTypeInt()) { deferTextChangeEvent(); } } /* * TODO optimize this so that only changes are sent + make the value change * event just a flag that moves the current text to value */ private String lastTextChangeString = null; private String getLastCommunicatedString() { return lastTextChangeString; } private void communicateTextValueToServer() { String text = getText(); if (prompting) { // Input prompt visible, text is actually "" text = ""; } if (!text.equals(getLastCommunicatedString())) { if (valueBeforeEditIsSynced && text.equals(valueBeforeEdit)) { /* * Value change for the current text has been enqueued since the * last text change event was sent, but we can't know that it * has been sent to the server. Ensure that all pending changes * are sent now. Sending a value change without a text change * will simulate a TextChangeEvent on the server. */ client.sendPendingVariableChanges(); } else { // Default case - just send an immediate text change message client.updateVariable(paintableId, TextFieldConstants.VAR_CUR_TEXT, text, true); // Shouldn't investigate valueBeforeEdit to avoid duplicate text // change events as the states are not in sync any more valueBeforeEditIsSynced = false; } lastTextChangeString = text; } } private Timer textChangeEventTrigger = new Timer() { @Override public void run() { if (isAttached()) { updateCursorPosition(); communicateTextValueToServer(); scheduled = false; } } }; private boolean scheduled = false; /** For internal use only. May be removed or replaced in the future. */ public boolean listenTextChangeEvents; /** For internal use only. May be removed or replaced in the future. */ public String textChangeEventMode; public int textChangeEventTimeout; private void deferTextChangeEvent() { if (textChangeEventMode.equals(TEXTCHANGE_MODE_TIMEOUT) && scheduled) { return; } else { textChangeEventTrigger.cancel(); } textChangeEventTrigger.schedule(getTextChangeEventTimeout()); scheduled = true; } private int getTextChangeEventTimeout() { return textChangeEventTimeout; } @Override public void setReadOnly(boolean readOnly) { boolean wasReadOnly = isReadOnly(); if (readOnly) { setTabIndex(-1); } else if (wasReadOnly && !readOnly && getTabIndex() == -1) { /* * Need to manually set tab index to 0 since server will not send * the tab index if it is 0. */ setTabIndex(0); } super.setReadOnly(readOnly); } /** For internal use only. May be removed or replaced in the future. */ public void updateFieldContent(final String text) { setPrompting(inputPrompt != null && focusedTextField != this && (text.equals(""))); String fieldValue; if (prompting) { fieldValue = isReadOnly() ? "" : inputPrompt; addStyleDependentName(CLASSNAME_PROMPT); } else { fieldValue = text; removeStyleDependentName(CLASSNAME_PROMPT); } setText(fieldValue); lastTextChangeString = valueBeforeEdit = text; valueBeforeEditIsSynced = true; } protected void onCut() { if (listenTextChangeEvents) { deferTextChangeEvent(); } } /** For internal use only. May be removed or replaced in the future. */ public native void attachCutEventListener(Element el) /*-{ var me = this; el.oncut = $entry(function() { [email protected]::onCut()(); }); }-*/; protected native void detachCutEventListener(Element el) /*-{ el.oncut = null; }-*/; private void onDrop() { if (focusedTextField == this) { return; } updateText(false); } private void updateText(boolean blurred) { String text = getText(); setPrompting(inputPrompt != null && (text == null || text.isEmpty())); if (prompting) { setText(isReadOnly() ? "" : inputPrompt); if (blurred) { addStyleDependentName(CLASSNAME_PROMPT); } } valueChange(blurred); } private void scheduleOnDropEvent() { Scheduler.get().scheduleDeferred(new ScheduledCommand() { @Override public void execute() { onDrop(); } }); } private native void attachDropEventListener(Element el) /*-{ var me = this; el.ondrop = $entry(function() { [email protected]::scheduleOnDropEvent()(); }); }-*/; private native void detachDropEventListener(Element el) /*-{ el.ondrop = null; }-*/; @Override protected void onDetach() { super.onDetach(); detachCutEventListener(getElement()); if (focusedTextField == this) { focusedTextField = null; } if (BrowserInfo.get().isFirefox()) { removeOnInputListener(getElement()); detachDropEventListener(getElement()); } } @Override protected void onAttach() { super.onAttach(); if (listenTextChangeEvents) { detachCutEventListener(getElement()); } if (BrowserInfo.get().isFirefox()) { // Workaround for FF setting input prompt as the value if esc is // pressed while the field is focused and empty (#8051). addOnInputListener(getElement()); // Workaround for FF updating component's internal value after // having drag-and-dropped text from another element (#14056) attachDropEventListener(getElement()); } } /** For internal use only. May be removed or replaced in the future. */ public void setMaxLength(int newMaxLength) { if (newMaxLength == maxLength) { return; } maxLength = newMaxLength; updateMaxLength(maxLength); } /** * This method is responsible for updating the DOM or otherwise ensuring * that the given max length is enforced. Called when the max length for the * field has changed. * * @param maxLength * The new max length */ protected void updateMaxLength(int maxLength) { if (maxLength >= 0) { getElement().setPropertyInt("maxLength", maxLength); } else { getElement().removeAttribute("maxLength"); } setMaxLengthToElement(maxLength); } protected void setMaxLengthToElement(int newMaxLength) { if (newMaxLength >= 0) { getElement().setPropertyInt("maxLength", newMaxLength); } else { getElement().removeAttribute("maxLength"); } } public int getMaxLength() { return maxLength; } @Override public void onChange(ChangeEvent event) { valueChange(false); } /** * Called when the field value might have changed and/or the field was * blurred. These are combined so the blur event is sent in the same batch * as a possible value change event (these are often connected). * * @param blurred * true if the field was blurred */ public void valueChange(boolean blurred) { if (client != null && paintableId != null) { boolean sendBlurEvent = false; boolean sendValueChange = false; if (blurred && client.hasEventListeners(this, EventId.BLUR)) { sendBlurEvent = true; client.updateVariable(paintableId, EventId.BLUR, "", false); } String newText = prompting ? "" : getText(); if (newText != null && !newText.equals(valueBeforeEdit)) { sendValueChange = immediate; client.updateVariable(paintableId, "text", newText, false); valueBeforeEdit = newText; valueBeforeEditIsSynced = true; } /* * also send cursor position, no public api yet but for easier * extension */ updateCursorPosition(); if (sendBlurEvent || sendValueChange) { /* * Avoid sending text change event as we will simulate it on the * server side before value change events. */ textChangeEventTrigger.cancel(); scheduled = false; client.sendPendingVariableChanges(); } } } /** * Updates the cursor position variable if it has changed since the last * update. * * @return true iff the value was updated */ protected boolean updateCursorPosition() { if (WidgetUtil.isAttachedAndDisplayed(this)) { int cursorPos = prompting ? 0 : getCursorPos(); if (lastCursorPos != cursorPos) { client.updateVariable(paintableId, TextFieldConstants.VAR_CURSOR, cursorPos, false); lastCursorPos = cursorPos; return true; } } return false; } private static VTextField focusedTextField; public static void flushChangesFromFocusedTextField() { if (focusedTextField != null) { focusedTextField.onChange(null); } } @Override public void onFocus(FocusEvent event) { addStyleDependentName(CLASSNAME_FOCUS); if (prompting) { setText(""); removeStyleDependentName(CLASSNAME_PROMPT); setPrompting(false); } focusedTextField = this; if (client != null && client.hasEventListeners(this, EventId.FOCUS)) { client.updateVariable(paintableId, EventId.FOCUS, "", true); } } @Override public void onBlur(BlurEvent event) { // this is called twice on Chrome when e.g. changing tab while prompting // field focused - do not change settings on the second time if (focusedTextField != this) { return; } removeStyleDependentName(CLASSNAME_FOCUS); focusedTextField = null; updateText(true); } private void setPrompting(boolean prompting) { this.prompting = prompting; } public void setColumns(int columns) { if (columns <= 0) { return; } setWidth(columns + "em"); } @Override public void onKeyDown(KeyDownEvent event) { if (BrowserInfo.get().isIE() && event.getNativeKeyCode() == KeyCodes.KEY_ENTER) { // IE does not send change events when pressing enter in a text // input so we handle it using a key listener instead valueChange(false); } else if (BrowserInfo.get().isFirefox() && event.getNativeKeyCode() == KeyCodes.KEY_ESCAPE && getText().equals("")) { // check after onInput event if inputPrompt has appeared as the // value of the field possibleInputError = true; } } public void setImmediate(boolean immediate) { this.immediate = immediate; } public void setInputPrompt(String inputPrompt) { this.inputPrompt = inputPrompt; } protected boolean isWordwrap() { String wrap = getElement().getAttribute("wrap"); return !"off".equals(wrap); } private native void addOnInputListener(Element el) /*-{ var self = this; el.oninput = $entry(function() { [email protected]::checkForInputError()(); }); }-*/; private native void removeOnInputListener(Element el) /*-{ el.oninput = null; }-*/; private void checkForInputError() { if (possibleInputError && getText().equals(inputPrompt)) { setText(""); } possibleInputError = false; } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. * def */ package com.microsoft.azure.management.network.v2019_02_01.implementation; import com.microsoft.azure.arm.resources.collection.implementation.GroupableResourcesCoreImpl; import com.microsoft.azure.management.network.v2019_02_01.VirtualNetworkGateways; import com.microsoft.azure.management.network.v2019_02_01.VirtualNetworkGateway; import rx.Observable; import rx.Completable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import com.microsoft.azure.arm.resources.ResourceUtilsCore; import com.microsoft.azure.arm.utils.RXMapper; import rx.functions.Func1; import com.microsoft.azure.PagedList; import com.microsoft.azure.Page; import com.microsoft.azure.management.network.v2019_02_01.BgpPeerStatusListResult; import com.microsoft.azure.management.network.v2019_02_01.GatewayRouteListResult; import com.microsoft.azure.management.network.v2019_02_01.VpnClientIPsecParameters; import com.microsoft.azure.management.network.v2019_02_01.VpnClientParameters; import com.microsoft.azure.management.network.v2019_02_01.VirtualNetworkGatewayConnectionListEntity; import com.microsoft.azure.management.network.v2019_02_01.VpnDeviceScriptParameters; class VirtualNetworkGatewaysImpl extends GroupableResourcesCoreImpl<VirtualNetworkGateway, VirtualNetworkGatewayImpl, VirtualNetworkGatewayInner, VirtualNetworkGatewaysInner, NetworkManager> implements VirtualNetworkGateways { protected VirtualNetworkGatewaysImpl(NetworkManager manager) { super(manager.inner().virtualNetworkGateways(), manager); } @Override protected Observable<VirtualNetworkGatewayInner> getInnerAsync(String resourceGroupName, String name) { VirtualNetworkGatewaysInner client = this.inner(); return client.getByResourceGroupAsync(resourceGroupName, name); } @Override protected Completable deleteInnerAsync(String resourceGroupName, String name) { VirtualNetworkGatewaysInner client = this.inner(); return client.deleteAsync(resourceGroupName, name).toCompletable(); } @Override public Observable<String> deleteByIdsAsync(Collection<String> ids) { if (ids == null || ids.isEmpty()) { return Observable.empty(); } Collection<Observable<String>> observables = new ArrayList<>(); for (String id : ids) { final String resourceGroupName = ResourceUtilsCore.groupFromResourceId(id); final String name = ResourceUtilsCore.nameFromResourceId(id); Observable<String> o = RXMapper.map(this.inner().deleteAsync(resourceGroupName, name), id); observables.add(o); } return Observable.mergeDelayError(observables); } @Override public Observable<String> deleteByIdsAsync(String...ids) { return this.deleteByIdsAsync(new ArrayList<String>(Arrays.asList(ids))); } @Override public void deleteByIds(Collection<String> ids) { if (ids != null && !ids.isEmpty()) { this.deleteByIdsAsync(ids).toBlocking().last(); } } @Override public void deleteByIds(String...ids) { this.deleteByIds(new ArrayList<String>(Arrays.asList(ids))); } @Override public PagedList<VirtualNetworkGateway> listByResourceGroup(String resourceGroupName) { VirtualNetworkGatewaysInner client = this.inner(); return this.wrapList(client.listByResourceGroup(resourceGroupName)); } @Override public Observable<VirtualNetworkGateway> listByResourceGroupAsync(String resourceGroupName) { VirtualNetworkGatewaysInner client = this.inner(); return client.listByResourceGroupAsync(resourceGroupName) .flatMapIterable(new Func1<Page<VirtualNetworkGatewayInner>, Iterable<VirtualNetworkGatewayInner>>() { @Override public Iterable<VirtualNetworkGatewayInner> call(Page<VirtualNetworkGatewayInner> page) { return page.items(); } }) .map(new Func1<VirtualNetworkGatewayInner, VirtualNetworkGateway>() { @Override public VirtualNetworkGateway call(VirtualNetworkGatewayInner inner) { return wrapModel(inner); } }); } @Override public VirtualNetworkGatewayImpl define(String name) { return wrapModel(name); } @Override public Observable<VirtualNetworkGateway> resetAsync(String resourceGroupName, String virtualNetworkGatewayName) { VirtualNetworkGatewaysInner client = this.inner(); return client.resetAsync(resourceGroupName, virtualNetworkGatewayName) .map(new Func1<VirtualNetworkGatewayInner, VirtualNetworkGateway>() { @Override public VirtualNetworkGateway call(VirtualNetworkGatewayInner inner) { return new VirtualNetworkGatewayImpl(inner.name(), inner, manager()); } }); } @Override public Completable resetVpnClientSharedKeyAsync(String resourceGroupName, String virtualNetworkGatewayName) { VirtualNetworkGatewaysInner client = this.inner(); return client.resetVpnClientSharedKeyAsync(resourceGroupName, virtualNetworkGatewayName).toCompletable(); } @Override public Observable<String> generatevpnclientpackageAsync(String resourceGroupName, String virtualNetworkGatewayName, VpnClientParameters parameters) { VirtualNetworkGatewaysInner client = this.inner(); return client.generatevpnclientpackageAsync(resourceGroupName, virtualNetworkGatewayName, parameters) ;} @Override public Observable<String> generateVpnProfileAsync(String resourceGroupName, String virtualNetworkGatewayName, VpnClientParameters parameters) { VirtualNetworkGatewaysInner client = this.inner(); return client.generateVpnProfileAsync(resourceGroupName, virtualNetworkGatewayName, parameters) ;} @Override public Observable<String> getVpnProfilePackageUrlAsync(String resourceGroupName, String virtualNetworkGatewayName) { VirtualNetworkGatewaysInner client = this.inner(); return client.getVpnProfilePackageUrlAsync(resourceGroupName, virtualNetworkGatewayName) ;} @Override public Observable<BgpPeerStatusListResult> getBgpPeerStatusAsync(String resourceGroupName, String virtualNetworkGatewayName) { VirtualNetworkGatewaysInner client = this.inner(); return client.getBgpPeerStatusAsync(resourceGroupName, virtualNetworkGatewayName) .map(new Func1<BgpPeerStatusListResultInner, BgpPeerStatusListResult>() { @Override public BgpPeerStatusListResult call(BgpPeerStatusListResultInner inner) { return new BgpPeerStatusListResultImpl(inner, manager()); } }); } @Override public Observable<String> supportedVpnDevicesAsync(String resourceGroupName, String virtualNetworkGatewayName) { VirtualNetworkGatewaysInner client = this.inner(); return client.supportedVpnDevicesAsync(resourceGroupName, virtualNetworkGatewayName) ;} @Override public Observable<GatewayRouteListResult> getLearnedRoutesAsync(String resourceGroupName, String virtualNetworkGatewayName) { VirtualNetworkGatewaysInner client = this.inner(); return client.getLearnedRoutesAsync(resourceGroupName, virtualNetworkGatewayName) .map(new Func1<GatewayRouteListResultInner, GatewayRouteListResult>() { @Override public GatewayRouteListResult call(GatewayRouteListResultInner inner) { return new GatewayRouteListResultImpl(inner, manager()); } }); } @Override public Observable<GatewayRouteListResult> getAdvertisedRoutesAsync(String resourceGroupName, String virtualNetworkGatewayName, String peer) { VirtualNetworkGatewaysInner client = this.inner(); return client.getAdvertisedRoutesAsync(resourceGroupName, virtualNetworkGatewayName, peer) .map(new Func1<GatewayRouteListResultInner, GatewayRouteListResult>() { @Override public GatewayRouteListResult call(GatewayRouteListResultInner inner) { return new GatewayRouteListResultImpl(inner, manager()); } }); } @Override public Observable<VpnClientIPsecParameters> setVpnclientIpsecParametersAsync(String resourceGroupName, String virtualNetworkGatewayName, VpnClientIPsecParametersInner vpnclientIpsecParams) { VirtualNetworkGatewaysInner client = this.inner(); return client.setVpnclientIpsecParametersAsync(resourceGroupName, virtualNetworkGatewayName, vpnclientIpsecParams) .map(new Func1<VpnClientIPsecParametersInner, VpnClientIPsecParameters>() { @Override public VpnClientIPsecParameters call(VpnClientIPsecParametersInner inner) { return new VpnClientIPsecParametersImpl(inner, manager()); } }); } @Override public Observable<VpnClientIPsecParameters> getVpnclientIpsecParametersAsync(String resourceGroupName, String virtualNetworkGatewayName) { VirtualNetworkGatewaysInner client = this.inner(); return client.getVpnclientIpsecParametersAsync(resourceGroupName, virtualNetworkGatewayName) .map(new Func1<VpnClientIPsecParametersInner, VpnClientIPsecParameters>() { @Override public VpnClientIPsecParameters call(VpnClientIPsecParametersInner inner) { return new VpnClientIPsecParametersImpl(inner, manager()); } }); } @Override protected VirtualNetworkGatewayImpl wrapModel(VirtualNetworkGatewayInner inner) { return new VirtualNetworkGatewayImpl(inner.name(), inner, manager()); } @Override protected VirtualNetworkGatewayImpl wrapModel(String name) { return new VirtualNetworkGatewayImpl(name, new VirtualNetworkGatewayInner(), this.manager()); } private VirtualNetworkGatewayConnectionListEntityImpl wrapVirtualNetworkGatewayConnectionListEntityModel(VirtualNetworkGatewayConnectionListEntityInner inner) { return new VirtualNetworkGatewayConnectionListEntityImpl(inner, manager()); } @Override public Observable<VirtualNetworkGatewayConnectionListEntity> listConnectionsAsync(final String resourceGroupName, final String virtualNetworkGatewayName) { VirtualNetworkGatewaysInner client = this.inner(); return client.listConnectionsAsync(resourceGroupName, virtualNetworkGatewayName) .flatMapIterable(new Func1<Page<VirtualNetworkGatewayConnectionListEntityInner>, Iterable<VirtualNetworkGatewayConnectionListEntityInner>>() { @Override public Iterable<VirtualNetworkGatewayConnectionListEntityInner> call(Page<VirtualNetworkGatewayConnectionListEntityInner> page) { return page.items(); } }) .map(new Func1<VirtualNetworkGatewayConnectionListEntityInner, VirtualNetworkGatewayConnectionListEntity>() { @Override public VirtualNetworkGatewayConnectionListEntity call(VirtualNetworkGatewayConnectionListEntityInner inner) { return wrapVirtualNetworkGatewayConnectionListEntityModel(inner); } }); } @Override public Observable<String> vpnDeviceConfigurationScriptAsync(String resourceGroupName, String virtualNetworkGatewayConnectionName, VpnDeviceScriptParameters parameters) { VirtualNetworkGatewaysInner client = this.inner(); return client.vpnDeviceConfigurationScriptAsync(resourceGroupName, virtualNetworkGatewayConnectionName, parameters) ;} }
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.apimgt.jms.listener.utils; import org.apache.axis2.client.Options; public class JMSConstants { /** * The prefix indicating an Axis JMS URL */ public static final String JMS_PREFIX = "jms:/"; //------------------------------------ defaults / constants ------------------------------------ /** * The local (Axis2) JMS connection factory name of the default connection * factory to be used, if a service does not explicitly state the connection * factory it should be using by a Parameter named JMSConstants.CONFAC_PARAM */ public static final String DEFAULT_CONFAC_NAME = "default"; /** * The default JMS time out waiting for a reply */ public static final long DEFAULT_JMS_TIMEOUT = Options.DEFAULT_TIMEOUT_MILLISECONDS; /** * Value indicating a Queue used */ public static final String DESTINATION_TYPE_QUEUE = "queue"; /** * Value indicating a Topic */ public static final String DESTINATION_TYPE_TOPIC = "topic"; /** * Value indicating a JMS 1.1 Generic Destination */ public static final String DESTINATION_TYPE_GENERIC = "generic"; /** * Do not cache any JMS resources between tasks (when sending) or JMS CF's (when sending) */ public static final int CACHE_NONE = 0; /** * Cache only the JMS connection between tasks (when receiving), or JMS CF's (when sending) */ public static final int CACHE_CONNECTION = 1; /** * Cache only the JMS connection and Session between tasks (receiving), or JMS CF's (sending) */ public static final int CACHE_SESSION = 2; /** * Cache the JMS connection, Session and Consumer between tasks when receiving */ public static final int CACHE_CONSUMER = 3; /** * Cache the JMS connection, Session and Producer within a JMSConnectionFactory when sending */ public static final int CACHE_PRODUCER = 4; /** * automatic choice of an appropriate caching level (depending on the transaction strategy) */ public static final int CACHE_AUTO = 5; /** * A JMS 1.1 Generic Destination type or ConnectionFactory */ public static final int GENERIC = 0; /** * A Queue Destination type or ConnectionFactory */ public static final int QUEUE = 1; /** * A Topic Destination type or ConnectionFactory */ public static final int TOPIC = 2; /** * The EPR parameter name indicating the name of the message level property that indicated the content type. */ public static final String CONTENT_TYPE_PROPERTY_PARAM = "transport.jms.ContentTypeProperty"; //---------------------------------- services.xml parameters ----------------------------------- /** * The Service level Parameter name indicating the JMS destination for requests of a service */ public static final String PARAM_DESTINATION = "transport.jms.Destination"; /** * The Service level Parameter name indicating the destination type for requests. */ public static final String PARAM_DEST_TYPE = "transport.jms.DestinationType"; /** * The Service level Parameter name indicating the [default] response destination of a service */ public static final String PARAM_REPLY_DESTINATION = "transport.jms.ReplyDestination"; /** * The Service level Parameter name indicating the response destination type */ public static final String PARAM_REPLY_DEST_TYPE = "transport.jms.ReplyDestinationType"; /** * The Parameter name of an Axis2 service, indicating the JMS connection * factory which should be used to listen for messages for it. This is * the local (Axis2) name of the connection factory and not the JNDI name */ public static final String PARAM_JMS_CONFAC = "transport.jms.ConnectionFactory"; /** * Connection factory type if using JMS 1.0, either DESTINATION_TYPE_QUEUE or DESTINATION_TYPE_TOPIC */ public static final String PARAM_CONFAC_TYPE = "transport.jms.ConnectionFactoryType"; /** * The Parameter name indicating the JMS connection factory JNDI name */ public static final String PARAM_CONFAC_JNDI_NAME = "transport.jms.ConnectionFactoryJNDIName"; /** * The Parameter indicating the expected content type for messages received by the service. */ public static final String CONTENT_TYPE_PARAM = "transport.jms.ContentType"; /** * The Parameter indicating a final EPR as a String, to be published on the WSDL of a service * Could occur more than once, and could provide additional connection properties or a subset * of the properties auto computed. Also could replace IP addresses with hostnames, and expose * public credentials clients. If a user specified this parameter, the auto generated EPR will * not be exposed - unless an instance of this parameter is added with the string "legacy" * This parameter could be used to expose EPR's conforming to the proposed SOAP/JMS spec * until such time full support is implemented for it. */ public static final String PARAM_PUBLISH_EPR = "transport.jms.PublishEPR"; /** * The parameter indicating the JMS API specification to be used - if this is "1.1" the JMS * 1.1 API would be used, else the JMS 1.0.2B */ public static final String PARAM_JMS_SPEC_VER = "transport.jms.JMSSpecVersion"; /** * The Parameter indicating whether the JMS Session should be transacted for the service * Specified as a "true" or "false" */ public static final String PARAM_SESSION_TRANSACTED = "transport.jms.SessionTransacted"; /** * The Parameter indicating the Session acknowledgement for the service. Must be one of the * following Strings, or the appropriate Integer used by the JMS API * "AUTO_ACKNOWLEDGE", "CLIENT_ACKNOWLEDGE", "DUPS_OK_ACKNOWLEDGE" or "SESSION_TRANSACTED" */ public static final String PARAM_SESSION_ACK = "transport.jms.SessionAcknowledgement"; /** * A message selector to be used when messages are sought for this service */ public static final String PARAM_MSG_SELECTOR = "transport.jms.MessageSelector"; /** * Is the Subscription durable ? - "true" or "false" */ public static final String PARAM_SUB_DURABLE = "transport.jms.SubscriptionDurable"; /** * The name for the durable subscription */ public static final String PARAM_DURABLE_SUB_NAME = "transport.jms.DurableSubscriberName"; /** * JMS Resource cachable level to be used for the service One of the following: */ public static final String PARAM_CACHE_LEVEL = "transport.jms.CacheLevel"; /** * Should a pub-sub connection receive messages published by itself? */ public static final String PARAM_PUBSUB_NO_LOCAL = "transport.jms.PubSubNoLocal"; /** * The number of milliseconds to wait for a message on a consumer.receive() call * negative number - wait forever * 0 - do not wait at all * positive number - indicates the number of milliseconds to wait */ public static final String PARAM_RCV_TIMEOUT = "transport.jms.ReceiveTimeout"; /** * The number of concurrent consumers to be created to poll for messages for this service * For Topics, this should be ONE, to prevent receipt of multiple copies of the same message */ public static final String PARAM_CONCURRENT_CONSUMERS = "transport.jms.ConcurrentConsumers"; /** * The maximum number of concurrent consumers for the service */ public static final String PARAM_MAX_CONSUMERS = "transport.jms.MaxConcurrentConsumers"; /** * The number of idle (i.e. message-less) polling attempts before a worker task commits suicide, * to scale down resources, as load decreases */ public static final String PARAM_IDLE_TASK_LIMIT = "transport.jms.IdleTaskLimit"; /** * The maximum number of messages a polling worker task should process, before suicide - to * prevent many longer running threads - default is unlimited (i.e. a worker task will live forever) */ public static final String PARAM_MAX_MSGS_PER_TASK = "transport.jms.MaxMessagesPerTask"; /** * Number of milliseconds before the first reconnection attempt is tried, on detection of an * error. Subsequent retries follow a geometric series, where the * duration = previous duration * factor */ public static final String PARAM_RECON_INIT_DURATION = "transport.jms.InitialReconnectDuration"; public static final String PARAM_RECON_FACTOR = "transport.jms.ReconnectProgressFactor"; public static final String PARAM_RECON_MAX_DURATION = "transport.jms.MaxReconnectDuration"; /** * The username to use when obtaining a JMS Connection */ public static final String PARAM_JMS_USERNAME = "transport.jms.UserName"; /** * The password to use when obtaining a JMS Connection */ public static final String PARAM_JMS_PASSWORD = "transport.jms.Password"; public static final String PARAM_DURABLE_SUB_CLIENT_ID = "transport.jms.DurableSubscriberClientID"; //-------------- message context / transport header properties and client options -------------- /** * A MessageContext property or client Option indicating the JMS message type */ public static final String JMS_MESSAGE_TYPE = "JMS_MESSAGE_TYPE"; /** * The message type indicating a BytesMessage. */ public static final String JMS_BYTE_MESSAGE = "JMS_BYTE_MESSAGE"; /** * The message type indicating a TextMessage. */ public static final String JMS_TEXT_MESSAGE = "JMS_TEXT_MESSAGE"; /** * The message type indicating a TextMessage. */ public static final String JMS_MAP_MESSAGE = "JMS_MAP_MESSAGE"; /** * A MessageContext property or client Option indicating the time to wait for a response JMS message */ public static final String JMS_WAIT_REPLY = "JMS_WAIT_REPLY"; /** * A MessageContext property or client Option indicating the JMS correlation id */ public static final String JMS_COORELATION_ID = "JMS_COORELATION_ID"; /** * A MessageContext property or client Option indicating the JMS message id */ public static final String JMS_MESSAGE_ID = "JMS_MESSAGE_ID"; /** * A MessageContext property or client Option indicating the JMS delivery mode as an Integer or String * Value 1 - javax.jms.DeliveryMode.NON_PERSISTENT * Value 2 - javax.jms.DeliveryMode.PERSISTENT */ public static final String JMS_DELIVERY_MODE = "JMS_DELIVERY_MODE"; /** * A MessageContext property or client Option indicating the JMS destination to use on a Send */ public static final String JMS_DESTINATION = "JMS_DESTINATION"; /** * A MessageContext property or client Option indicating the JMS message expiration - a Long value * specified as a String */ public static final String JMS_EXPIRATION = "JMS_EXPIRATION"; /** * A MessageContext property indicating if the message is a redelivery (Boolean as a String) */ public static final String JMS_REDELIVERED = "JMS_REDELIVERED"; /** * A MessageContext property or client Option indicating the JMS replyTo Destination */ public static final String JMS_REPLY_TO = "JMS_REPLY_TO"; /** * A MessageContext property or client Option indicating the JMS replyTo Destination type */ public static final String JMS_REPLY_TO_TYPE = "JMS_REPLY_TO_TYPE"; /** * A MessageContext property or client Option indicating the JMS timestamp (Long specified as String) */ public static final String JMS_TIMESTAMP = "JMS_TIMESTAMP"; /** * A MessageContext property indicating the JMS type String returned by */ public static final String JMS_TYPE = "JMS_TYPE"; /** * A MessageContext property or client Option indicating the JMS priority */ public static final String JMS_PRIORITY = "JMS_PRIORITY"; /** * A MessageContext property or client Option indicating the JMS time to live for message sent */ public static final String JMS_TIME_TO_LIVE = "JMS_TIME_TO_LIVE"; /** * The prefix that denotes JMSX properties */ public static final String JMSX_PREFIX = "JMSX"; /** * The JMSXGroupID property */ public static final String JMSX_GROUP_ID = "JMSXGroupID"; /** * The JMSXGroupSeq property */ public static final String JMSX_GROUP_SEQ = "JMSXGroupSeq"; /** * Parameter for jndi security credentials in jms configs of axis2.xml */ public static final String PARAM_NAMING_SECURITY_CREDENTIALS = "java.naming.security.credentials"; public static final String CONNECTION_STRING_QUEUE = "connectionfactory.QueueConnectionFactory"; public static final String CONNECTION_STRING_TOPIC = "connectionfactory.TopicConnectionFactory"; //APIM default topic names public static final String TOPIC_THROTTLE_DATA = "throttleData"; public static final String TOPIC_TOKEN_REVOCATION = "tokenRevocation"; }
// Copyright 2000-2022 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license. package org.jetbrains.kotlin.idea.caches.resolve; import com.intellij.testFramework.TestDataPath; import org.jetbrains.kotlin.test.JUnit3RunnerWithInners; import org.jetbrains.kotlin.test.KotlinTestUtils; import org.jetbrains.kotlin.test.TestMetadata; import org.jetbrains.kotlin.test.TestRoot; import org.junit.runner.RunWith; /** * This class is generated by {@link org.jetbrains.kotlin.testGenerator.generator.TestGenerator}. * DO NOT MODIFY MANUALLY. */ @SuppressWarnings("all") @TestRoot("idea/tests") @TestDataPath("$CONTENT_ROOT") @RunWith(JUnit3RunnerWithInners.class) @TestMetadata("testData/multiplatform") public class MultiplatformAnalysisTestGenerated extends AbstractMultiplatformAnalysisTest { private void runTest(String testDataFilePath) throws Exception { KotlinTestUtils.runTest(this::doTest, this, testDataFilePath); } @TestMetadata("aliasesTypeMismatch") public void testAliasesTypeMismatch() throws Exception { runTest("testData/multiplatform/aliasesTypeMismatch/"); } @TestMetadata("builtinsAndStdlib") public void testBuiltinsAndStdlib() throws Exception { runTest("testData/multiplatform/builtinsAndStdlib/"); } @TestMetadata("callableReferences") public void testCallableReferences() throws Exception { runTest("testData/multiplatform/callableReferences/"); } @TestMetadata("constructorsOfExpect") public void testConstructorsOfExpect() throws Exception { runTest("testData/multiplatform/constructorsOfExpect/"); } @TestMetadata("correctOverloadResolutionAmbiguity") public void testCorrectOverloadResolutionAmbiguity() throws Exception { runTest("testData/multiplatform/correctOverloadResolutionAmbiguity/"); } @TestMetadata("diamondActualInBottom") public void testDiamondActualInBottom() throws Exception { runTest("testData/multiplatform/diamondActualInBottom/"); } @TestMetadata("diamondActualOnOnePath") public void testDiamondActualOnOnePath() throws Exception { runTest("testData/multiplatform/diamondActualOnOnePath/"); } @TestMetadata("diamondDuplicateActuals") public void testDiamondDuplicateActuals() throws Exception { runTest("testData/multiplatform/diamondDuplicateActuals/"); } @TestMetadata("diamondModuleDependency1") public void testDiamondModuleDependency1() throws Exception { runTest("testData/multiplatform/diamondModuleDependency1/"); } @TestMetadata("diamondModuleDependency2") public void testDiamondModuleDependency2() throws Exception { runTest("testData/multiplatform/diamondModuleDependency2/"); } @TestMetadata("diamondSeesTwoActuals") public void testDiamondSeesTwoActuals() throws Exception { runTest("testData/multiplatform/diamondSeesTwoActuals/"); } @TestMetadata("differentKindsOfDependencies") public void testDifferentKindsOfDependencies() throws Exception { runTest("testData/multiplatform/differentKindsOfDependencies/"); } @TestMetadata("duplicateActualsExplicit") public void testDuplicateActualsExplicit() throws Exception { runTest("testData/multiplatform/duplicateActualsExplicit/"); } @TestMetadata("duplicateActualsImplicit") public void testDuplicateActualsImplicit() throws Exception { runTest("testData/multiplatform/duplicateActualsImplicit/"); } @TestMetadata("duplicateActualsOneWeaklyIncompatible") public void testDuplicateActualsOneWeaklyIncompatible() throws Exception { runTest("testData/multiplatform/duplicateActualsOneWeaklyIncompatible/"); } @TestMetadata("duplicateActualsOneWithStrongIncompatibility") public void testDuplicateActualsOneWithStrongIncompatibility() throws Exception { runTest("testData/multiplatform/duplicateActualsOneWithStrongIncompatibility/"); } @TestMetadata("duplicateExpectsExplicit") public void testDuplicateExpectsExplicit() throws Exception { runTest("testData/multiplatform/duplicateExpectsExplicit/"); } @TestMetadata("duplicateExpectsImplicit") public void testDuplicateExpectsImplicit() throws Exception { runTest("testData/multiplatform/duplicateExpectsImplicit/"); } @TestMetadata("duplicateExpectsWithStrongIncompatibility") public void testDuplicateExpectsWithStrongIncompatibility() throws Exception { runTest("testData/multiplatform/duplicateExpectsWithStrongIncompatibility/"); } @TestMetadata("expectActualLineMarkers") public void testExpectActualLineMarkers() throws Exception { runTest("testData/multiplatform/expectActualLineMarkers/"); } @TestMetadata("extensionOnExpect") public void testExtensionOnExpect() throws Exception { runTest("testData/multiplatform/extensionOnExpect/"); } @TestMetadata("hierarcicalActualization") public void testHierarcicalActualization() throws Exception { runTest("testData/multiplatform/hierarcicalActualization/"); } @TestMetadata("incompleteActualization") public void testIncompleteActualization() throws Exception { runTest("testData/multiplatform/incompleteActualization/"); } @TestMetadata("internalFromDependsOn") public void testInternalFromDependsOn() throws Exception { runTest("testData/multiplatform/internalFromDependsOn/"); } @TestMetadata("internalFromDependsOnOfProduction") public void testInternalFromDependsOnOfProduction() throws Exception { runTest("testData/multiplatform/internalFromDependsOnOfProduction/"); } @TestMetadata("internalFromProduction") public void testInternalFromProduction() throws Exception { runTest("testData/multiplatform/internalFromProduction/"); } @TestMetadata("jsNameClash") public void testJsNameClash() throws Exception { runTest("testData/multiplatform/jsNameClash/"); } @TestMetadata("jvmDefaultNonMpp") public void testJvmDefaultNonMpp() throws Exception { runTest("testData/multiplatform/jvmDefaultNonMpp/"); } @TestMetadata("kt41218") public void testKt41218() throws Exception { runTest("testData/multiplatform/kt41218/"); } @TestMetadata("kt44898") public void testKt44898() throws Exception { runTest("testData/multiplatform/kt44898/"); } @TestMetadata("lambdas") public void testLambdas() throws Exception { runTest("testData/multiplatform/lambdas/"); } @TestMetadata("languageConstructions") public void testLanguageConstructions() throws Exception { runTest("testData/multiplatform/languageConstructions/"); } @TestMetadata("multilevelParents") public void testMultilevelParents() throws Exception { runTest("testData/multiplatform/multilevelParents/"); } @TestMetadata("multiplatformLibrary") public void testMultiplatformLibrary() throws Exception { runTest("testData/multiplatform/multiplatformLibrary/"); } @TestMetadata("overrideExpect") public void testOverrideExpect() throws Exception { runTest("testData/multiplatform/overrideExpect/"); } @TestMetadata("overrideExpectWithCompositeType") public void testOverrideExpectWithCompositeType() throws Exception { runTest("testData/multiplatform/overrideExpectWithCompositeType/"); } @TestMetadata("platformSpecificChecksInCommon") public void testPlatformSpecificChecksInCommon() throws Exception { runTest("testData/multiplatform/platformSpecificChecksInCommon/"); } @TestMetadata("qualifiedReceiver") public void testQualifiedReceiver() throws Exception { runTest("testData/multiplatform/qualifiedReceiver/"); } @TestMetadata("recursiveTypes") public void testRecursiveTypes() throws Exception { runTest("testData/multiplatform/recursiveTypes/"); } @TestMetadata("simple") public void testSimple() throws Exception { runTest("testData/multiplatform/simple/"); } @TestMetadata("smartCastOnPropertyFromDependentModule") public void testSmartCastOnPropertyFromDependentModule() throws Exception { runTest("testData/multiplatform/smartCastOnPropertyFromDependentModule/"); } @TestMetadata("supertypes") public void testSupertypes() throws Exception { runTest("testData/multiplatform/supertypes/"); } @TestMetadata("transitiveDependencyOnCommonSourceSets") public void testTransitiveDependencyOnCommonSourceSets() throws Exception { runTest("testData/multiplatform/transitiveDependencyOnCommonSourceSets/"); } @TestMetadata("typeAliasToExpectClassExplicitReference") public void testTypeAliasToExpectClassExplicitReference() throws Exception { runTest("testData/multiplatform/typeAliasToExpectClassExplicitReference/"); } @TestMetadata("typeAliases") public void testTypeAliases() throws Exception { runTest("testData/multiplatform/typeAliases/"); } @TestMetadata("typeParameters") public void testTypeParameters() throws Exception { runTest("testData/multiplatform/typeParameters/"); } @TestMetadata("unresolvedInMultiplatformLibrary") public void testUnresolvedInMultiplatformLibrary() throws Exception { runTest("testData/multiplatform/unresolvedInMultiplatformLibrary/"); } @TestMetadata("useCorrectBuiltIns") public void testUseCorrectBuiltIns() throws Exception { runTest("testData/multiplatform/useCorrectBuiltIns/"); } @TestMetadata("weaklyIncompatibleActualInIntermediateModule") public void testWeaklyIncompatibleActualInIntermediateModule() throws Exception { runTest("testData/multiplatform/weaklyIncompatibleActualInIntermediateModule/"); } @TestMetadata("whenExhaustivenessForSealed") public void testWhenExhaustivenessForSealed() throws Exception { runTest("testData/multiplatform/whenExhaustivenessForSealed/"); } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. /** * DeleteSpotDatafeedSubscriptionType.java * * This file was auto-generated from WSDL * by the Apache Axis2 version: 1.5.1 Built on : Oct 19, 2009 (10:59:34 EDT) */ package com.amazon.ec2; /** * DeleteSpotDatafeedSubscriptionType bean class */ public class DeleteSpotDatafeedSubscriptionType implements org.apache.axis2.databinding.ADBBean{ /* This type was generated from the piece of schema that had name = DeleteSpotDatafeedSubscriptionType Namespace URI = http://ec2.amazonaws.com/doc/2010-11-15/ Namespace Prefix = ns1 */ private static java.lang.String generatePrefix(java.lang.String namespace) { if(namespace.equals("http://ec2.amazonaws.com/doc/2010-11-15/")){ return "ns1"; } return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } /** * isReaderMTOMAware * @return true if the reader supports MTOM */ public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) { boolean isReaderMTOMAware = false; try{ isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE)); }catch(java.lang.IllegalArgumentException e){ isReaderMTOMAware = false; } return isReaderMTOMAware; } /** * * @param parentQName * @param factory * @return org.apache.axiom.om.OMElement */ public org.apache.axiom.om.OMElement getOMElement ( final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{ org.apache.axiom.om.OMDataSource dataSource = new org.apache.axis2.databinding.ADBDataSource(this,parentQName){ public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { DeleteSpotDatafeedSubscriptionType.this.serialize(parentQName,factory,xmlWriter); } }; return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl( parentQName,factory,dataSource); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ serialize(parentQName,factory,xmlWriter,false); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter, boolean serializeType) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ java.lang.String prefix = null; java.lang.String namespace = null; prefix = parentQName.getPrefix(); namespace = parentQName.getNamespaceURI(); if ((namespace != null) && (namespace.trim().length() > 0)) { java.lang.String writerPrefix = xmlWriter.getPrefix(namespace); if (writerPrefix != null) { xmlWriter.writeStartElement(namespace, parentQName.getLocalPart()); } else { if (prefix == null) { prefix = generatePrefix(namespace); } xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } } else { xmlWriter.writeStartElement(parentQName.getLocalPart()); } if (serializeType){ java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2010-11-15/"); if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){ writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", namespacePrefix+":DeleteSpotDatafeedSubscriptionType", xmlWriter); } else { writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", "DeleteSpotDatafeedSubscriptionType", xmlWriter); } } xmlWriter.writeEndElement(); } /** * Util method to write an attribute with the ns prefix */ private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (xmlWriter.getPrefix(namespace) == null) { xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } xmlWriter.writeAttribute(namespace,attName,attValue); } /** * Util method to write an attribute without the ns prefix */ private void writeAttribute(java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (namespace.equals("")) { xmlWriter.writeAttribute(attName,attValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace,attName,attValue); } } /** * Util method to write an attribute without the ns prefix */ private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName, javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } } /** * method to handle Qnames */ private void writeQName(javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String namespaceURI = qname.getNamespaceURI(); if (namespaceURI != null) { java.lang.String prefix = xmlWriter.getPrefix(namespaceURI); if (prefix == null) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } else { // i.e this is the default namespace xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } else { xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } private void writeQNames(javax.xml.namespace.QName[] qnames, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (qnames != null) { // we have to store this data until last moment since it is not possible to write any // namespace data after writing the charactor data java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer(); java.lang.String namespaceURI = null; java.lang.String prefix = null; for (int i = 0; i < qnames.length; i++) { if (i > 0) { stringToWrite.append(" "); } namespaceURI = qnames[i].getNamespaceURI(); if (namespaceURI != null) { prefix = xmlWriter.getPrefix(namespaceURI); if ((prefix == null) || (prefix.length() == 0)) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } xmlWriter.writeCharacters(stringToWrite.toString()); } } /** * Register a namespace prefix */ private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException { java.lang.String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) { prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } return prefix; } /** * databinding method to get an XML representation of this object * */ public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName) throws org.apache.axis2.databinding.ADBException{ java.util.ArrayList elementList = new java.util.ArrayList(); java.util.ArrayList attribList = new java.util.ArrayList(); return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray()); } /** * Factory class that keeps the parse method */ public static class Factory{ /** * static method to create the object * Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable * If this object is not an element, it is a complex type and the reader is at the event just after the outer start element * Postcondition: If this object is an element, the reader is positioned at its end element * If this object is a complex type, the reader is positioned at the end element of its outer element */ public static DeleteSpotDatafeedSubscriptionType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{ DeleteSpotDatafeedSubscriptionType object = new DeleteSpotDatafeedSubscriptionType(); int event; java.lang.String nillableValue = null; java.lang.String prefix =""; java.lang.String namespaceuri =""; try { while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){ java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "type"); if (fullTypeName!=null){ java.lang.String nsPrefix = null; if (fullTypeName.indexOf(":") > -1){ nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":")); } nsPrefix = nsPrefix==null?"":nsPrefix; java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1); if (!"DeleteSpotDatafeedSubscriptionType".equals(type)){ //find namespace for the prefix java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix); return (DeleteSpotDatafeedSubscriptionType)com.amazon.ec2.ExtensionMapper.getTypeObject( nsUri,type,reader); } } } // Note all attributes that were handled. Used to differ normal attributes // from anyAttributes. java.util.Vector handledAttributes = new java.util.Vector(); reader.next(); } catch (javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }//end of factory class }
/*! * Copyright 2010 - 2018 Hitachi Vantara. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.pentaho.osgi.impl; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import org.apache.commons.lang.StringUtils; import org.apache.karaf.bundle.core.BundleService; import org.apache.karaf.bundle.core.BundleState; import org.apache.karaf.bundle.core.BundleStateService; import org.apache.karaf.features.BundleInfo; import org.apache.karaf.features.Dependency; import org.apache.karaf.features.Feature; import org.apache.karaf.features.FeaturesService; import org.osgi.framework.Bundle; import org.osgi.framework.BundleContext; import org.osgi.framework.InvalidSyntaxException; import org.osgi.framework.ServiceReference; import org.osgi.framework.wiring.BundleRequirement; import org.osgi.service.cm.Configuration; import org.osgi.service.cm.ConfigurationAdmin; import org.osgi.util.tracker.ServiceTracker; import org.pentaho.capabilities.api.ICapability; import org.pentaho.capabilities.api.ICapabilityManager; import org.pentaho.capabilities.impl.DefaultCapabilityManager; import org.pentaho.osgi.api.IKarafFeatureWatcher; import org.pentaho.platform.engine.core.system.PentahoSystem; import org.pentaho.platform.servicecoordination.api.IServiceBarrier; import org.pentaho.platform.servicecoordination.api.IServiceBarrierManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Created by nbaker on 2/19/15. */ public class KarafFeatureWatcherImpl implements IKarafFeatureWatcher { private BundleContext bundleContext; private long timeout; private Logger logger = LoggerFactory.getLogger( getClass() ); private static final String KARAF_TIMEOUT_PROPERTY = "karafWaitForBoot"; public KarafFeatureWatcherImpl( BundleContext bundleContext ) { this.bundleContext = bundleContext; // Default timeout of 2 minutes can be overridden in server.properties timeout = PentahoSystem.getApplicationContext().getProperty( KARAF_TIMEOUT_PROPERTY ) == null ? 2 * 60 * 1000L : Long .valueOf( PentahoSystem.getApplicationContext().getProperty( KARAF_TIMEOUT_PROPERTY ) ); } @Override public void waitForFeatures() throws FeatureWatcherException { // Start the serviceTracker timer ServiceTracker serviceTracker = new ServiceTracker( bundleContext, FeaturesService.class.getName(), null ); serviceTracker.open(); try { serviceTracker.waitForService( timeout ); } catch ( InterruptedException e ) { logger.debug( "FeaturesService " + FeaturesService.class.getName() + " ServiceTracker Interrupted" ); } ServiceReference<FeaturesService> serviceReference = bundleContext.getServiceReference( FeaturesService.class ); if ( serviceReference != null ) { FeaturesService featuresService = bundleContext.getService( serviceReference ); ServiceReference<ConfigurationAdmin> serviceReference1 = bundleContext.getServiceReference( ConfigurationAdmin.class ); ConfigurationAdmin configurationAdmin = bundleContext.getService( serviceReference1 ); try { List<String> requiredFeatures = new ArrayList<String>(); Configuration configuration = configurationAdmin.getConfiguration( "org.apache.karaf.features" ); String featuresBoot = (String) configuration.getProperties().get( "featuresBoot" ); String[] fs = featuresBoot.split( "," ); requiredFeatures.addAll( Arrays.asList( fs ) ); waitForFeatures( requiredFeatures, featuresService ); List<String> extraFeatures = new ArrayList<String>(); // Install extra features configuration = configurationAdmin.getConfiguration( "org.pentaho.features" ); if ( configuration != null && configuration.getProperties() != null ) { String extraFeaturesStr = (String) configuration.getProperties().get( "runtimeFeatures" ); if ( extraFeaturesStr != null ) { fs = extraFeaturesStr.split( "," ); extraFeatures.addAll( Arrays.asList( fs ) ); } ICapabilityManager manager = DefaultCapabilityManager.getInstance(); if ( manager != null ) { for ( String extraFeature : extraFeatures ) { ICapability capability = manager.getCapabilityById( extraFeature ); if ( capability != null && !capability.isInstalled() ) { capability.install(); } } } } waitForFeatures( extraFeatures, featuresService ); } catch ( IOException e ) { throw new FeatureWatcherException( "Error accessing ConfigurationAdmin", e ); } catch ( Exception e ) { throw new FeatureWatcherException( "Unknown error in KarafWatcher", e ); } finally { serviceTracker.close(); } } } private void waitForFeatures( List<String> requiredFeatures, FeaturesService featuresService ) throws Exception { long entryTime = System.currentTimeMillis(); // Loop through to see if features are all installed while ( true ) { List<String> uninstalledFeatures = new ArrayList<String>(); for ( String requiredFeature : requiredFeatures ) { requiredFeature = requiredFeature.trim(); Feature feature = featuresService.getFeature( requiredFeature ); if ( feature != null && featuresService.isInstalled( feature ) == false ) { uninstalledFeatures.add( requiredFeature ); } } if ( uninstalledFeatures.size() > 0 ) { if ( System.currentTimeMillis() - timeout > entryTime ) { IServiceBarrier serviceBarrier = IServiceBarrierManager.LOCATOR.getManager().getServiceBarrier( "KarafFeatureWatcherBarrier" ); if ( serviceBarrier == null || serviceBarrier.isAvailable() ) { logger.debug( getFeaturesReport( featuresService, uninstalledFeatures ) ); throw new FeatureWatcherException( "Timed out waiting for Karaf features to install: " + StringUtils .join( uninstalledFeatures, "," ) ); } else { entryTime = System.currentTimeMillis(); } } logger.debug( "KarafFeatureWatcher is waiting for the following features to install: " + StringUtils.join( uninstalledFeatures, "," ) ); Thread.sleep( 100 ); continue; } break; } } // All features report private String getFeaturesReport( FeaturesService featuresService, List<String> uninstalledFeatures ) throws Exception { ServiceReference<BundleService> serviceReferenceBundleService = bundleContext.getServiceReference( BundleService.class ); BundleService bundleService = bundleContext.getService( serviceReferenceBundleService ); List<BundleStateService> bundleStateServices = getBundleStateServices(); String featuresReport = System.lineSeparator() + "--------- Karaf Feature Watcher Report Begin ---------"; for ( String uninstalledFeature : uninstalledFeatures ) { Feature feature = featuresService.getFeature( uninstalledFeature ); featuresReport += System.lineSeparator() + getFeatureReport( featuresService, bundleService, bundleStateServices, feature ); } return featuresReport + System.lineSeparator() + "--------- Karaf Feature Watcher Report End ---------"; } // Single feature report private String getFeatureReport( FeaturesService featuresService, BundleService bundleService, List<BundleStateService> bundleStateServices, Feature feature ) throws Exception { String featureReport = ""; if ( feature.hasVersion() ) { featureReport += "Feature '" + feature.getName() + "' with version " + feature.getVersion() + " did not install."; } else { featureReport += "Feature '" + feature.getName() + "' did not install."; } // For this feature, we list its non active bundles with additional information if ( feature.getBundles() != null ) { boolean first = true; for ( BundleInfo bundleInfo : feature.getBundles() ) { Bundle bundle = bundleContext.getBundle( bundleInfo.getLocation() ); if ( bundleService.getInfo( bundle ).getState() != BundleState.Active ) { if ( first == true ) { featureReport += System.lineSeparator() + "The following bundle(s) are not active and they are contained in feature '" + feature.getName() + "'"; first = false; } featureReport += System.lineSeparator() + "\t" + getBundleReport( bundleService, bundleStateServices, bundle ).replaceAll( "\n", "\n\t" ); } } } // For this feature, we list its non installed features in a higher indent level if ( feature.getDependencies() != null ) { boolean first = true; for ( Dependency dependency : feature.getDependencies() ) { String dependencyName = dependency.getName(); String dependencyVersion = dependency.getVersion(); Feature dependencyFeature; if ( dependencyVersion != null && dependencyVersion.isEmpty() == false ) { dependencyFeature = featuresService.getFeature( dependencyName, dependencyVersion ); } else { dependencyFeature = featuresService.getFeature( dependencyName ); } if ( dependencyFeature != null && featuresService.isInstalled( dependencyFeature ) == false ) { if ( first == true ) { featureReport += System.lineSeparator() + "The following feature(s) are not active and they are contained in feature '" + feature.getName() + "'"; first = false; } featureReport += System.lineSeparator() + "\t" + getFeatureReport( featuresService, bundleService, bundleStateServices, dependencyFeature ).replaceAll( "\n", "\n\t" ); } first = false; } } return featureReport; } private List<BundleStateService> getBundleStateServices() throws InvalidSyntaxException { List<BundleStateService> bundleStateServices = new ArrayList<BundleStateService>(); Collection<ServiceReference<BundleStateService>> serviceReferenceBundleStateService = bundleContext.getServiceReferences( BundleStateService.class, null ); for ( ServiceReference<BundleStateService> bundleStateService : serviceReferenceBundleStateService ) { bundleStateServices.add( bundleContext.getService( bundleStateService ) ); } return bundleStateServices; } // Single bundle report private String getBundleReport( BundleService bundleService, List<BundleStateService> bundleStateServices, Bundle bundle ) { BundleState bundleState = bundleService.getInfo( bundle ).getState(); long bundleId = bundle.getBundleId(); String bundleName = bundle.getSymbolicName(); String bundleReport = "Bundle '" + bundleName + "':" + System.lineSeparator() + "\t Bundle State: " + bundleState + System .lineSeparator() + "\t Bundle ID: " + bundleId; // We loop through the available Bundle State Services and gather diagnostic information, if it exists. Usually, // there are two Bundle State Services, the BlueprintStateService and the SpringStateService. for ( BundleStateService bundleStateService : bundleStateServices ) { String part = bundleStateService.getDiag( bundle ); if ( part != null ) { bundleReport += bundleStateService.getName() + "\n"; bundleReport += part.replaceAll( "\n", "\n\t" ); } } // Unsatisfied Requirements for this bundle, includes optional requirements List<BundleRequirement> missingDependencies = bundleService.getUnsatisfiedRquirements( bundle, null ); if ( missingDependencies != null && missingDependencies.isEmpty() == false ) { bundleReport += System.lineSeparator() + "\t Unsatisfied Requirements:"; for ( BundleRequirement missDependency : missingDependencies ) { bundleReport += System.lineSeparator() + "\t\t" + missDependency; } } return bundleReport; } }
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.bridge; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.StringTokenizer; import org.apache.batik.gvt.font.GVTFontFamily; import org.apache.batik.gvt.font.GVTFontFace; import org.apache.batik.gvt.font.UnresolvedFontFamily; import org.apache.batik.util.SVGConstants; import org.apache.batik.anim.dom.SVGOMDocument; import org.apache.batik.css.engine.CSSEngine; import org.apache.batik.css.engine.FontFaceRule; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; /** * Utility class for SVG fonts. * * @author <a href="mailto:[email protected]">Bella Robinson</a> * @version $Id$ */ public abstract class SVGFontUtilities implements SVGConstants { public static List getFontFaces(Document doc, BridgeContext ctx) { // check fontFamilyMap to see if we have already created an // FontFamily that matches Map fontFamilyMap = ctx.getFontFamilyMap(); List ret = (List)fontFamilyMap.get(doc); if (ret != null) return ret; ret = new LinkedList(); NodeList fontFaceElements = doc.getElementsByTagNameNS (SVG_NAMESPACE_URI, SVG_FONT_FACE_TAG); SVGFontFaceElementBridge fontFaceBridge; fontFaceBridge = (SVGFontFaceElementBridge)ctx.getBridge (SVG_NAMESPACE_URI, SVG_FONT_FACE_TAG); for (int i = 0; i < fontFaceElements.getLength(); i++) { Element fontFaceElement = (Element)fontFaceElements.item(i); ret.add(fontFaceBridge.createFontFace (ctx, fontFaceElement)); } CSSEngine engine = ((SVGOMDocument)doc).getCSSEngine(); List sms = engine.getFontFaces(); for (Object sm : sms) { FontFaceRule ffr = (FontFaceRule) sm; ret.add(CSSFontFace.createCSSFontFace(engine, ffr)); } return ret; } /** * Given a font family name tries to find a matching SVG font * object. If finds one, returns an SVGFontFamily otherwise * returns an UnresolvedFontFamily. * * @param textElement The text element that the font family will * be attached to. * @param ctx The bridge context, used to search for a matching * SVG font element. * @param fontFamilyName The name of the font family to search * for. * @param fontWeight The weight of the font to use when trying to * match an SVG font family. * @param fontStyle The style of the font to use when trying to * match as SVG font family. * * @return A GVTFontFamily for the specified font attributes. This * will be unresolved unless a matching SVG font was found. */ public static GVTFontFamily getFontFamily(Element textElement, BridgeContext ctx, String fontFamilyName, String fontWeight, String fontStyle) { // TODO: should match against font-variant as well String fontKeyName = fontFamilyName.toLowerCase() + " " + // todo locale?? fontWeight + " " + fontStyle; // check fontFamilyMap to see if we have already created an // FontFamily that matches Map fontFamilyMap = ctx.getFontFamilyMap(); GVTFontFamily fontFamily = (GVTFontFamily)fontFamilyMap.get(fontKeyName); if (fontFamily != null) { return fontFamily; } // try to find a matching SVGFontFace element Document doc = textElement.getOwnerDocument(); List fontFaces = (List)fontFamilyMap.get(doc); if (fontFaces == null) { fontFaces = getFontFaces(doc, ctx); fontFamilyMap.put(doc, fontFaces); } Iterator iter = fontFaces.iterator(); List svgFontFamilies = new LinkedList(); while (iter.hasNext()) { FontFace fontFace = (FontFace)iter.next(); if (!fontFace.hasFamilyName(fontFamilyName)) { continue; } String fontFaceStyle = fontFace.getFontStyle(); if (fontFaceStyle.equals(SVG_ALL_VALUE) || fontFaceStyle.indexOf(fontStyle) != -1) { GVTFontFamily ffam = fontFace.getFontFamily(ctx); if (ffam != null) svgFontFamilies.add(ffam); } } if (svgFontFamilies.size() == 1) { // only found one matching svg font family fontFamilyMap.put(fontKeyName, svgFontFamilies.get(0)); return (GVTFontFamily)svgFontFamilies.get(0); } else if (svgFontFamilies.size() > 1) { // need to find font face that matches the font-weight closest String fontWeightNumber = getFontWeightNumberString(fontWeight); // create lists of font weight numbers for each font family List fontFamilyWeights = new ArrayList(svgFontFamilies.size()); for (Object svgFontFamily : svgFontFamilies) { GVTFontFace fontFace; fontFace = ((GVTFontFamily) svgFontFamily).getFontFace(); String fontFaceWeight = fontFace.getFontWeight(); fontFaceWeight = getFontWeightNumberString(fontFaceWeight); fontFamilyWeights.add(fontFaceWeight); } // make sure that each possible font-weight has been // assigned to a font-face, if not then need to "fill the // holes" List newFontFamilyWeights = new ArrayList(fontFamilyWeights); for (int i = 100; i <= 900; i+= 100) { String weightString = String.valueOf(i); boolean matched = false; int minDifference = 1000; int minDifferenceIndex = 0; for (int j = 0; j < fontFamilyWeights.size(); j++) { String fontFamilyWeight = (String)fontFamilyWeights.get(j); if (fontFamilyWeight.indexOf(weightString) > -1) { matched = true; break; } StringTokenizer st = new StringTokenizer(fontFamilyWeight, " ,"); while (st.hasMoreTokens()) { int weightNum = Integer.parseInt(st.nextToken()); int difference = Math.abs(weightNum - i); if (difference < minDifference) { minDifference = difference; minDifferenceIndex = j; } } } if (!matched) { String newFontFamilyWeight = newFontFamilyWeights.get(minDifferenceIndex) + ", " + weightString; newFontFamilyWeights.set(minDifferenceIndex, newFontFamilyWeight); } } // now find matching font weight for (int i = 0; i < svgFontFamilies.size(); i++) { String fontFaceWeight = (String)newFontFamilyWeights.get(i); if (fontFaceWeight.indexOf(fontWeightNumber) > -1) { fontFamilyMap.put(fontKeyName, svgFontFamilies.get(i)); return (GVTFontFamily)svgFontFamilies.get(i); } } // should not get here, just return the first svg font family fontFamilyMap.put(fontKeyName, svgFontFamilies.get(0)); return (GVTFontFamily) svgFontFamilies.get(0); } else { // couldn't find one so return an UnresolvedFontFamily object GVTFontFamily gvtFontFamily = new UnresolvedFontFamily(fontFamilyName); fontFamilyMap.put(fontKeyName, gvtFontFamily); return gvtFontFamily; } } /** * Returns a string that contains all of the font weight numbers for the * specified font weight attribute value. * * @param fontWeight The font-weight attribute value. * * @return The font weight expressed as font weight numbers. * e.g. "normal" becomes "400". */ protected static String getFontWeightNumberString(String fontWeight) { if (fontWeight.equals(SVG_NORMAL_VALUE)) { return SVG_400_VALUE; } else if (fontWeight.equals(SVG_BOLD_VALUE)) { return SVG_700_VALUE; } else if (fontWeight.equals(SVG_ALL_VALUE)) { return "100, 200, 300, 400, 500, 600, 700, 800, 900"; } return fontWeight; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.operator; import com.google.common.collect.ImmutableList; import io.trino.block.BlockAssertions; import io.trino.metadata.TestingFunctionResolution; import io.trino.operator.AggregationOperator.AggregationOperatorFactory; import io.trino.operator.aggregation.AccumulatorFactory; import io.trino.operator.aggregation.InternalAggregationFunction; import io.trino.spi.Page; import io.trino.spi.block.Block; import io.trino.spi.block.ByteArrayBlock; import io.trino.spi.block.RunLengthEncodedBlock; import io.trino.spi.type.TypeOperators; import io.trino.sql.gen.JoinCompiler; import io.trino.sql.planner.plan.AggregationNode.Step; import io.trino.sql.planner.plan.PlanNodeId; import io.trino.sql.tree.QualifiedName; import io.trino.testing.MaterializedResult; import io.trino.type.BlockTypeOperators; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import java.util.List; import java.util.Optional; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledExecutorService; import static com.google.common.collect.Iterables.getOnlyElement; import static io.airlift.concurrent.Threads.daemonThreadsNamed; import static io.trino.RowPagesBuilder.rowPagesBuilder; import static io.trino.SessionTestUtils.TEST_SESSION; import static io.trino.operator.OperatorAssertion.assertOperatorEquals; import static io.trino.operator.OperatorAssertion.toPages; import static io.trino.spi.type.BigintType.BIGINT; import static io.trino.spi.type.BooleanType.BOOLEAN; import static io.trino.spi.type.DoubleType.DOUBLE; import static io.trino.spi.type.RealType.REAL; import static io.trino.spi.type.VarcharType.VARCHAR; import static io.trino.sql.analyzer.TypeSignatureProvider.fromTypes; import static io.trino.testing.MaterializedResult.resultBuilder; import static io.trino.testing.TestingTaskContext.createTaskContext; import static java.util.Collections.emptyIterator; import static java.util.concurrent.Executors.newCachedThreadPool; import static java.util.concurrent.Executors.newScheduledThreadPool; import static org.assertj.core.api.Assertions.assertThat; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertTrue; @Test(singleThreaded = true) public class TestAggregationOperator { private static final TestingFunctionResolution FUNCTION_RESOLUTION = new TestingFunctionResolution(); private static final InternalAggregationFunction LONG_AVERAGE = FUNCTION_RESOLUTION.getAggregateFunctionImplementation(QualifiedName.of("avg"), fromTypes(BIGINT)); private static final InternalAggregationFunction DOUBLE_SUM = FUNCTION_RESOLUTION.getAggregateFunctionImplementation(QualifiedName.of("sum"), fromTypes(DOUBLE)); private static final InternalAggregationFunction LONG_SUM = FUNCTION_RESOLUTION.getAggregateFunctionImplementation(QualifiedName.of("sum"), fromTypes(BIGINT)); private static final InternalAggregationFunction REAL_SUM = FUNCTION_RESOLUTION.getAggregateFunctionImplementation(QualifiedName.of("sum"), fromTypes(REAL)); private static final InternalAggregationFunction COUNT = FUNCTION_RESOLUTION.getAggregateFunctionImplementation(QualifiedName.of("count"), ImmutableList.of()); private ExecutorService executor; private ScheduledExecutorService scheduledExecutor; @BeforeMethod public void setUp() { executor = newCachedThreadPool(daemonThreadsNamed(getClass().getSimpleName() + "-%s")); scheduledExecutor = newScheduledThreadPool(2, daemonThreadsNamed(getClass().getSimpleName() + "-scheduledExecutor-%s")); } @AfterMethod(alwaysRun = true) public void tearDown() { executor.shutdownNow(); scheduledExecutor.shutdownNow(); } @Test public void testMaskWithDirtyNulls() { List<Page> input = ImmutableList.of(new Page( 4, BlockAssertions.createLongsBlock(1, 2, 3, 4), new ByteArrayBlock( 4, Optional.of(new boolean[] {true, true, false, false}), new byte[] {0, 27 /* dirty null */, 0, 75 /* non-zero value is true */}))); OperatorFactory operatorFactory = new AggregationOperatorFactory( 0, new PlanNodeId("test"), Step.SINGLE, ImmutableList.of(COUNT.bind(ImmutableList.of(0), Optional.of(1))), false); DriverContext driverContext = createTaskContext(executor, scheduledExecutor, TEST_SESSION) .addPipelineContext(0, true, true, false) .addDriverContext(); MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT) .row(1L) .build(); assertOperatorEquals(operatorFactory, driverContext, input, expected); } @Test public void testDistinctMaskWithNulls() { TypeOperators typeOperators = new TypeOperators(); AccumulatorFactory distinctFactory = COUNT.bind( ImmutableList.of(0), Optional.of(1), ImmutableList.of(BIGINT, BOOLEAN), ImmutableList.of(), ImmutableList.of(), null, true, // distinct new JoinCompiler(typeOperators), new BlockTypeOperators(typeOperators), ImmutableList.of(), TEST_SESSION); DriverContext driverContext = createTaskContext(executor, scheduledExecutor, TEST_SESSION) .addPipelineContext(0, true, true, false) .addDriverContext(); OperatorFactory operatorFactory = new AggregationOperatorFactory( 0, new PlanNodeId("test"), Step.SINGLE, ImmutableList.of(distinctFactory), false); ByteArrayBlock trueMaskAllNull = new ByteArrayBlock( 4, Optional.of(new boolean[] {true, true, true, true}), /* all positions are null */ new byte[] {1, 1, 1, 1}); /* non-zero value is true, all masks are true */ Block trueNullRleMask = new RunLengthEncodedBlock(trueMaskAllNull.getSingleValueBlock(0), 4); List<Page> nullTrueMaskInput = ImmutableList.of( new Page(4, BlockAssertions.createLongsBlock(1, 2, 3, 4), trueMaskAllNull), new Page(4, BlockAssertions.createLongsBlock(5, 6, 7, 8), trueNullRleMask)); MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT) .row(0L) // all rows should be filtered by nulls .build(); assertOperatorEquals(operatorFactory, driverContext, nullTrueMaskInput, expected); } @Test public void testAggregation() { InternalAggregationFunction countVarcharColumn = FUNCTION_RESOLUTION.getAggregateFunctionImplementation(QualifiedName.of("count"), fromTypes(VARCHAR)); InternalAggregationFunction maxVarcharColumn = FUNCTION_RESOLUTION.getAggregateFunctionImplementation(QualifiedName.of("max"), fromTypes(VARCHAR)); List<Page> input = rowPagesBuilder(VARCHAR, BIGINT, VARCHAR, BIGINT, REAL, DOUBLE, VARCHAR) .addSequencePage(100, 0, 0, 300, 500, 400, 500, 500) .build(); OperatorFactory operatorFactory = new AggregationOperatorFactory( 0, new PlanNodeId("test"), Step.SINGLE, ImmutableList.of(COUNT.bind(ImmutableList.of(0), Optional.empty()), LONG_SUM.bind(ImmutableList.of(1), Optional.empty()), LONG_AVERAGE.bind(ImmutableList.of(1), Optional.empty()), maxVarcharColumn.bind(ImmutableList.of(2), Optional.empty()), countVarcharColumn.bind(ImmutableList.of(0), Optional.empty()), LONG_SUM.bind(ImmutableList.of(3), Optional.empty()), REAL_SUM.bind(ImmutableList.of(4), Optional.empty()), DOUBLE_SUM.bind(ImmutableList.of(5), Optional.empty()), maxVarcharColumn.bind(ImmutableList.of(6), Optional.empty())), false); DriverContext driverContext = createTaskContext(executor, scheduledExecutor, TEST_SESSION) .addPipelineContext(0, true, true, false) .addDriverContext(); MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT, BIGINT, DOUBLE, VARCHAR, BIGINT, BIGINT, REAL, DOUBLE, VARCHAR) .row(100L, 4950L, 49.5, "399", 100L, 54950L, 44950.0f, 54950.0, "599") .build(); assertOperatorEquals(operatorFactory, driverContext, input, expected); assertEquals(driverContext.getSystemMemoryUsage(), 0); assertEquals(driverContext.getMemoryUsage(), 0); } @Test public void testMemoryTracking() throws Exception { testMemoryTracking(false); testMemoryTracking(true); } private void testMemoryTracking(boolean useSystemMemory) throws Exception { Page input = getOnlyElement(rowPagesBuilder(BIGINT).addSequencePage(100, 0).build()); OperatorFactory operatorFactory = new AggregationOperatorFactory( 0, new PlanNodeId("test"), Step.SINGLE, ImmutableList.of(LONG_SUM.bind(ImmutableList.of(0), Optional.empty())), useSystemMemory); DriverContext driverContext = createTaskContext(executor, scheduledExecutor, TEST_SESSION) .addPipelineContext(0, true, true, false) .addDriverContext(); try (Operator operator = operatorFactory.createOperator(driverContext)) { assertTrue(operator.needsInput()); operator.addInput(input); if (useSystemMemory) { assertThat(driverContext.getSystemMemoryUsage()).isGreaterThan(0); assertEquals(driverContext.getMemoryUsage(), 0); } else { assertEquals(driverContext.getSystemMemoryUsage(), 0); assertThat(driverContext.getMemoryUsage()).isGreaterThan(0); } toPages(operator, emptyIterator()); } assertEquals(driverContext.getSystemMemoryUsage(), 0); assertEquals(driverContext.getMemoryUsage(), 0); } }
package android.graphics; /* * #%L * Matos * $Id:$ * $HeadURL:$ * %% * Copyright (C) 2010 - 2014 Orange SA * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ @com.francetelecom.rd.stubs.annotation.ClassDone(0) public class Canvas { // Classes public static enum EdgeType { // Enum Constants BW(0) , AA(0) ; // Fields public final int nativeInt = 0; // Constructors private EdgeType(int arg1){ } // Methods } public static enum VertexMode { // Enum Constants TRIANGLES(0) , TRIANGLE_STRIP(0) , TRIANGLE_FAN(0) ; // Fields public final int nativeInt = 0; // Constructors private VertexMode(int arg1){ } // Methods } // Fields protected int mDensity; protected int mScreenDensity; public static final int DIRECTION_LTR = 0; public static final int DIRECTION_RTL = 1; public static final int MATRIX_SAVE_FLAG = 1; public static final int CLIP_SAVE_FLAG = 2; public static final int HAS_ALPHA_LAYER_SAVE_FLAG = 4; public static final int FULL_COLOR_LAYER_SAVE_FLAG = 8; public static final int CLIP_TO_LAYER_SAVE_FLAG = 16; public static final int ALL_SAVE_FLAG = 31; // Constructors public Canvas(){ } public Canvas(Bitmap arg1){ } Canvas(int arg1){ } // Methods public void concat(Matrix arg1){ } public int save(){ return 0; } public int save(int arg1){ return 0; } public void rotate(float arg1){ } public final void rotate(float arg1, float arg2, float arg3){ } public void scale(float arg1, float arg2){ } public final void scale(float arg1, float arg2, float arg3, float arg4){ } public boolean isOpaque(){ return false; } protected static void checkRange(int arg1, int arg2, int arg3){ } public int getWidth(){ return 0; } public boolean isHardwareAccelerated(){ return false; } public int getHeight(){ return 0; } public void getMatrix(Matrix arg1){ } public final Matrix getMatrix(){ return (Matrix) null; } public void restore(){ } public boolean quickReject(RectF arg1, Canvas.EdgeType arg2){ return false; } public boolean quickReject(Path arg1, Canvas.EdgeType arg2){ return false; } public boolean quickReject(float arg1, float arg2, float arg3, float arg4, Canvas.EdgeType arg5){ return false; } public void translate(float arg1, float arg2){ } public void setBitmap(Bitmap arg1){ } public boolean clipRect(RectF arg1, Region.Op arg2){ return false; } public boolean clipRect(Rect arg1, Region.Op arg2){ return false; } public boolean clipRect(RectF arg1){ return false; } public boolean clipRect(Rect arg1){ return false; } public boolean clipRect(float arg1, float arg2, float arg3, float arg4, Region.Op arg5){ return false; } public boolean clipRect(float arg1, float arg2, float arg3, float arg4){ return false; } public boolean clipRect(int arg1, int arg2, int arg3, int arg4){ return false; } public void restoreToCount(int arg1){ } public int saveLayerAlpha(RectF arg1, int arg2, int arg3){ return 0; } public int saveLayerAlpha(float arg1, float arg2, float arg3, float arg4, int arg5, int arg6){ return 0; } public int saveLayer(RectF arg1, Paint arg2, int arg3){ return 0; } public int saveLayer(float arg1, float arg2, float arg3, float arg4, Paint arg5, int arg6){ return 0; } public void drawBitmap(Bitmap arg1, float arg2, float arg3, Paint arg4){ } public void drawBitmap(Bitmap arg1, Rect arg2, RectF arg3, Paint arg4){ } public void drawBitmap(Bitmap arg1, Rect arg2, Rect arg3, Paint arg4){ } public void drawBitmap(int [] arg1, int arg2, int arg3, float arg4, float arg5, int arg6, int arg7, boolean arg8, Paint arg9){ } public void drawBitmap(int [] arg1, int arg2, int arg3, int arg4, int arg5, int arg6, int arg7, boolean arg8, Paint arg9){ } public void drawBitmap(Bitmap arg1, Matrix arg2, Paint arg3){ } public void setViewport(int arg1, int arg2){ } public void setDensity(int arg1){ } public int getSaveCount(){ return 0; } public void drawRect(RectF arg1, Paint arg2){ } public void drawRect(Rect arg1, Paint arg2){ } public void drawRect(float arg1, float arg2, float arg3, float arg4, Paint arg5){ } public void drawColor(int arg1){ } public void drawColor(int arg1, PorterDuff.Mode arg2){ } public void drawPaint(Paint arg1){ } public void drawLine(float arg1, float arg2, float arg3, float arg4, Paint arg5){ } public void drawPicture(Picture arg1){ } public void drawPicture(Picture arg1, RectF arg2){ } public void drawPicture(Picture arg1, Rect arg2){ } public void setDrawFilter(DrawFilter arg1){ } public void drawText(char [] arg1, int arg2, int arg3, float arg4, float arg5, Paint arg6){ } public void drawText(java.lang.String arg1, float arg2, float arg3, Paint arg4){ } public void drawText(java.lang.String arg1, int arg2, int arg3, float arg4, float arg5, Paint arg6){ } public void drawText(java.lang.CharSequence arg1, int arg2, int arg3, float arg4, float arg5, Paint arg6){ } public void drawPath(Path arg1, Paint arg2){ } public int getDensity(){ return 0; } public void drawPatch(Bitmap arg1, byte [] arg2, RectF arg3, Paint arg4){ } public void drawBitmapMesh(Bitmap arg1, int arg2, int arg3, float [] arg4, int arg5, int [] arg6, int arg7, Paint arg8){ } public void drawCircle(float arg1, float arg2, float arg3, Paint arg4){ } public void drawLines(float [] arg1, int arg2, int arg3, Paint arg4){ } public void drawLines(float [] arg1, Paint arg2){ } public void drawOval(RectF arg1, Paint arg2){ } public void drawPoint(float arg1, float arg2, Paint arg3){ } public void drawPoints(float [] arg1, int arg2, int arg3, Paint arg4){ } public void drawPoints(float [] arg1, Paint arg2){ } public void drawPosText(char [] arg1, int arg2, int arg3, float [] arg4, Paint arg5){ } public void drawPosText(java.lang.String arg1, float [] arg2, Paint arg3){ } public void drawRoundRect(RectF arg1, float arg2, float arg3, Paint arg4){ } public void drawTextOnPath(char [] arg1, int arg2, int arg3, Path arg4, float arg5, float arg6, Paint arg7){ } public void drawTextOnPath(java.lang.String arg1, Path arg2, float arg3, float arg4, Paint arg5){ } public void drawTextRun(char [] arg1, int arg2, int arg3, int arg4, int arg5, float arg6, float arg7, int arg8, Paint arg9){ } public void drawTextRun(java.lang.CharSequence arg1, int arg2, int arg3, int arg4, int arg5, float arg6, float arg7, int arg8, Paint arg9){ } public void drawVertices(Canvas.VertexMode arg1, int arg2, float [] arg3, int arg4, float [] arg5, int arg6, int [] arg7, int arg8, short [] arg9, int arg10, int arg11, Paint arg12){ } public int getMaximumBitmapWidth(){ return 0; } public int getMaximumBitmapHeight(){ return 0; } public boolean clipPath(Path arg1, Region.Op arg2){ return false; } public boolean clipPath(Path arg1){ return false; } public boolean clipRegion(Region arg1, Region.Op arg2){ return false; } public boolean clipRegion(Region arg1){ return false; } public boolean getClipBounds(Rect arg1){ return false; } public final Rect getClipBounds(){ return (Rect) null; } public void skew(float arg1, float arg2){ } public void setMatrix(Matrix arg1){ } public DrawFilter getDrawFilter(){ return (DrawFilter) null; } public void drawArc(RectF arg1, float arg2, float arg3, boolean arg4, Paint arg5){ } public void drawARGB(int arg1, int arg2, int arg3, int arg4){ } public void drawRGB(int arg1, int arg2, int arg3){ } @java.lang.Deprecated protected javax.microedition.khronos.opengles.GL getGL(){ return (javax.microedition.khronos.opengles.GL) null; } public void setScreenDensity(int arg1){ } public static void freeCaches(){ } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.buildtool; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Optional; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Range; import com.google.devtools.build.lib.actions.ActionCacheChecker; import com.google.devtools.build.lib.actions.ActionExecutionException; import com.google.devtools.build.lib.actions.ActionExecutionStatusReporter; import com.google.devtools.build.lib.actions.ActionInputFileCache; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.BuildFailedException; import com.google.devtools.build.lib.actions.Executor; import com.google.devtools.build.lib.actions.MissingInputFileException; import com.google.devtools.build.lib.actions.ResourceManager; import com.google.devtools.build.lib.actions.TestExecException; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.buildtool.buildevent.ExecutionProgressReceiverAvailableEvent; import com.google.devtools.build.lib.events.EventHandler; import com.google.devtools.build.lib.events.Reporter; import com.google.devtools.build.lib.packages.BuildFileNotFoundException; import com.google.devtools.build.lib.rules.test.TestProvider; import com.google.devtools.build.lib.skyframe.ActionExecutionInactivityWatchdog; import com.google.devtools.build.lib.skyframe.AspectValue; import com.google.devtools.build.lib.skyframe.Builder; import com.google.devtools.build.lib.skyframe.SkyframeExecutor; import com.google.devtools.build.lib.util.AbruptExitException; import com.google.devtools.build.lib.util.ExitCode; import com.google.devtools.build.lib.util.LoggingUtil; import com.google.devtools.build.lib.util.Preconditions; import com.google.devtools.build.lib.vfs.ModifiedFileSet; import com.google.devtools.build.skyframe.CycleInfo; import com.google.devtools.build.skyframe.ErrorInfo; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.SkyKey; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.logging.Level; import javax.annotation.Nullable; /** * A {@link Builder} implementation driven by Skyframe. */ @VisibleForTesting public class SkyframeBuilder implements Builder { private final SkyframeExecutor skyframeExecutor; private final boolean keepGoing; private final int numJobs; private final boolean finalizeActionsToOutputService; private final ModifiedFileSet modifiedOutputFiles; private final ActionInputFileCache fileCache; private final ActionCacheChecker actionCacheChecker; private final int progressReportInterval; @VisibleForTesting public SkyframeBuilder(SkyframeExecutor skyframeExecutor, ActionCacheChecker actionCacheChecker, boolean keepGoing, int numJobs, ModifiedFileSet modifiedOutputFiles, boolean finalizeActionsToOutputService, ActionInputFileCache fileCache, int progressReportInterval) { this.skyframeExecutor = skyframeExecutor; this.actionCacheChecker = actionCacheChecker; this.keepGoing = keepGoing; this.numJobs = numJobs; this.finalizeActionsToOutputService = finalizeActionsToOutputService; this.modifiedOutputFiles = modifiedOutputFiles; this.fileCache = fileCache; this.progressReportInterval = progressReportInterval; } @Override public void buildArtifacts( Reporter reporter, Set<Artifact> artifacts, Set<ConfiguredTarget> parallelTests, Set<ConfiguredTarget> exclusiveTests, Collection<ConfiguredTarget> targetsToBuild, Collection<AspectValue> aspects, Executor executor, Set<ConfiguredTarget> builtTargets, boolean explain, @Nullable Range<Long> lastExecutionTimeRange) throws BuildFailedException, AbruptExitException, TestExecException, InterruptedException { skyframeExecutor.prepareExecution(modifiedOutputFiles, lastExecutionTimeRange); skyframeExecutor.setFileCache(fileCache); // Note that executionProgressReceiver accesses builtTargets concurrently (after wrapping in a // synchronized collection), so unsynchronized access to this variable is unsafe while it runs. ExecutionProgressReceiver executionProgressReceiver = new ExecutionProgressReceiver(Preconditions.checkNotNull(builtTargets), countTestActions(exclusiveTests), skyframeExecutor.getEventBus()); skyframeExecutor .getEventBus() .post(new ExecutionProgressReceiverAvailableEvent(executionProgressReceiver)); ResourceManager.instance().setEventBus(skyframeExecutor.getEventBus()); List<ExitCode> exitCodes = new LinkedList<>(); EvaluationResult<?> result; ActionExecutionStatusReporter statusReporter = ActionExecutionStatusReporter.create( reporter, executor, skyframeExecutor.getEventBus()); AtomicBoolean isBuildingExclusiveArtifacts = new AtomicBoolean(false); ActionExecutionInactivityWatchdog watchdog = new ActionExecutionInactivityWatchdog( executionProgressReceiver.createInactivityMonitor(statusReporter), executionProgressReceiver.createInactivityReporter(statusReporter, isBuildingExclusiveArtifacts), progressReportInterval); skyframeExecutor.setActionExecutionProgressReportingObjects(executionProgressReceiver, executionProgressReceiver, statusReporter); watchdog.start(); try { result = skyframeExecutor.buildArtifacts( reporter, executor, artifacts, targetsToBuild, aspects, parallelTests, /*exclusiveTesting=*/ false, keepGoing, explain, finalizeActionsToOutputService, numJobs, actionCacheChecker, executionProgressReceiver); // progressReceiver is finished, so unsynchronized access to builtTargets is now safe. Optional<ExitCode> exitCode = processResult(reporter, result, keepGoing, skyframeExecutor); Preconditions.checkState( exitCode != null || result.keyNames().size() == (artifacts.size() + targetsToBuild.size() + aspects.size() + parallelTests.size()), "Build reported as successful but not all artifacts and targets built: %s, %s", result, artifacts); if (exitCode != null) { exitCodes.add(exitCode.orNull()); } // Run exclusive tests: either tagged as "exclusive" or is run in an invocation with // --test_output=streamed. isBuildingExclusiveArtifacts.set(true); for (ConfiguredTarget exclusiveTest : exclusiveTests) { // Since only one artifact is being built at a time, we don't worry about an artifact being // built and then the build being interrupted. result = skyframeExecutor.buildArtifacts( reporter, executor, ImmutableSet.<Artifact>of(), targetsToBuild, aspects, ImmutableSet.of(exclusiveTest), /*exclusiveTesting=*/ true, keepGoing, explain, finalizeActionsToOutputService, numJobs, actionCacheChecker, null); exitCode = processResult(reporter, result, keepGoing, skyframeExecutor); Preconditions.checkState( exitCode != null || !result.keyNames().isEmpty(), "Build reported as successful but test %s not executed: %s", exclusiveTest, result); if (exitCode != null) { exitCodes.add(exitCode.orNull()); } } } finally { watchdog.stop(); ResourceManager.instance().unsetEventBus(); skyframeExecutor.setActionExecutionProgressReportingObjects(null, null, null); statusReporter.unregisterFromEventBus(); } if (!exitCodes.isEmpty()) { if (keepGoing) { // Use the exit code with the highest priority. throw new BuildFailedException( null, Collections.max(exitCodes, ExitCodeComparator.INSTANCE)); } else { throw new BuildFailedException(); } } } /** * Process the Skyframe update, taking into account the keepGoing setting. * * <p> Returns optional {@link ExitCode} based on following conditions: * 1. null, if result had no errors. * 2. Optional.absent(), if result had errors but none of the errors specified an exit code. * 3. Optional.of(e), if result had errors and one of them specified exit code 'e'. * Throws on fail-fast failures. */ @Nullable private static Optional<ExitCode> processResult( EventHandler eventHandler, EvaluationResult<?> result, boolean keepGoing, SkyframeExecutor skyframeExecutor) throws BuildFailedException, TestExecException { if (result.hasError()) { for (Map.Entry<SkyKey, ErrorInfo> entry : result.errorMap().entrySet()) { Iterable<CycleInfo> cycles = entry.getValue().getCycleInfo(); skyframeExecutor.reportCycles(eventHandler, cycles, entry.getKey()); } if (result.getCatastrophe() != null) { rethrow(result.getCatastrophe()); } if (keepGoing) { // If build fails and keepGoing is true, an exit code is assigned using reported errors // in the following order: // 1. First infrastructure error with non-null exit code // 2. First non-infrastructure error with non-null exit code // 3. Null (later default to 1) ExitCode exitCode = null; for (Map.Entry<SkyKey, ErrorInfo> error : result.errorMap().entrySet()) { Throwable cause = error.getValue().getException(); if (cause instanceof ActionExecutionException) { ActionExecutionException actionExecutionCause = (ActionExecutionException) cause; ExitCode code = actionExecutionCause.getExitCode(); // Update global exit code when current exit code is not null and global exit code has // a lower 'reporting' priority. if (ExitCodeComparator.INSTANCE.compare(code, exitCode) > 0) { exitCode = code; } } } return Optional.fromNullable(exitCode); } ErrorInfo errorInfo = Preconditions.checkNotNull(result.getError(), result); Exception exception = errorInfo.getException(); if (exception == null) { Preconditions.checkState(!Iterables.isEmpty(errorInfo.getCycleInfo()), errorInfo); // If a keepGoing=false build found a cycle, that means there were no other errors thrown // during evaluation (otherwise, it wouldn't have bothered to find a cycle). So the best // we can do is throw a generic build failure exception, since we've already reported the // cycles above. throw new BuildFailedException(null, /*hasCatastrophe=*/ false); } else { rethrow(exception); } } return null; } /** Figure out why an action's execution failed and rethrow the right kind of exception. */ @VisibleForTesting public static void rethrow(Throwable cause) throws BuildFailedException, TestExecException { Throwable innerCause = cause.getCause(); if (innerCause instanceof TestExecException) { throw (TestExecException) innerCause; } if (cause instanceof ActionExecutionException) { ActionExecutionException actionExecutionCause = (ActionExecutionException) cause; // Sometimes ActionExecutionExceptions are caused by Actions with no owner. String message = (actionExecutionCause.getLocation() != null) ? (actionExecutionCause.getLocation().print() + " " + cause.getMessage()) : cause.getMessage(); throw new BuildFailedException( message, actionExecutionCause.isCatastrophe(), actionExecutionCause.getAction(), actionExecutionCause.getRootCauses(), /*errorAlreadyShown=*/ !actionExecutionCause.showError(), actionExecutionCause.getExitCode()); } else if (cause instanceof MissingInputFileException) { throw new BuildFailedException(cause.getMessage()); } else if (cause instanceof BuildFileNotFoundException) { // Sadly, this can happen because we may load new packages during input discovery. Any // failures reading those packages shouldn't terminate the build, but in Skyframe they do. LoggingUtil.logToRemote(Level.WARNING, "undesirable loading exception", cause); throw new BuildFailedException(cause.getMessage()); } else if (cause instanceof RuntimeException) { throw (RuntimeException) cause; } else if (cause instanceof Error) { throw (Error) cause; } else { // We encountered an exception we don't think we should have encountered. This can indicate // a bug in our code, such as lower level exceptions not being properly handled, or in our // expectations in this method. throw new IllegalArgumentException( "action terminated with " + "unexpected exception: " + cause.getMessage(), cause); } } private static int countTestActions(Iterable<ConfiguredTarget> testTargets) { int count = 0; for (ConfiguredTarget testTarget : testTargets) { count += TestProvider.getTestStatusArtifacts(testTarget).size(); } return count; } /** * A comparator to determine the reporting priority of {@link ExitCode}. * * <p> Priority: infrastructure exit codes > non-infrastructure exit codes > null exit codes. */ private static class ExitCodeComparator implements Comparator<ExitCode> { private static final ExitCodeComparator INSTANCE = new ExitCodeComparator(); @Override public int compare(ExitCode c1, ExitCode c2) { // returns POSITIVE result when the priority of c1 is HIGHER than the priority of c2 return getPriority(c1) - getPriority(c2); } private int getPriority(ExitCode code) { if (code == null) { return 0; } else { return code.isInfrastructureFailure() ? 2 : 1; } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.web.dao.impl; import static org.apache.nifi.util.StringUtils.isEmpty; import java.util.ArrayList; import java.util.List; import java.util.Set; import java.util.regex.Matcher; import org.apache.commons.lang3.StringUtils; import org.apache.nifi.connectable.Position; import org.apache.nifi.controller.FlowController; import org.apache.nifi.controller.exception.ValidationException; import org.apache.nifi.groups.ProcessGroup; import org.apache.nifi.groups.RemoteProcessGroup; import org.apache.nifi.processor.DataUnit; import org.apache.nifi.remote.RemoteGroupPort; import org.apache.nifi.remote.protocol.SiteToSiteTransportProtocol; import org.apache.nifi.util.FormatUtils; import org.apache.nifi.web.ResourceNotFoundException; import org.apache.nifi.web.api.dto.BatchSettingsDTO; import org.apache.nifi.web.api.dto.DtoFactory; import org.apache.nifi.web.api.dto.RemoteProcessGroupDTO; import org.apache.nifi.web.api.dto.RemoteProcessGroupPortDTO; import org.apache.nifi.web.dao.RemoteProcessGroupDAO; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class StandardRemoteProcessGroupDAO extends ComponentDAO implements RemoteProcessGroupDAO { private static final Logger logger = LoggerFactory.getLogger(StandardRemoteProcessGroupDAO.class); private FlowController flowController; private RemoteProcessGroup locateRemoteProcessGroup(final String remoteProcessGroupId) { final ProcessGroup rootGroup = flowController.getGroup(flowController.getRootGroupId()); final RemoteProcessGroup remoteProcessGroup = rootGroup.findRemoteProcessGroup(remoteProcessGroupId); if (remoteProcessGroup == null) { throw new ResourceNotFoundException(String.format("Unable to find remote process group with id '%s'.", remoteProcessGroupId)); } else { return remoteProcessGroup; } } @Override public boolean hasRemoteProcessGroup(String remoteProcessGroupId) { final ProcessGroup rootGroup = flowController.getGroup(flowController.getRootGroupId()); return rootGroup.findRemoteProcessGroup(remoteProcessGroupId) != null; } /** * Creates a remote process group reference. * * @param remoteProcessGroupDTO The remote process group * @return The remote process group */ @Override public RemoteProcessGroup createRemoteProcessGroup(String groupId, RemoteProcessGroupDTO remoteProcessGroupDTO) { ProcessGroup group = locateProcessGroup(flowController, groupId); if (remoteProcessGroupDTO.getParentGroupId() != null && !flowController.areGroupsSame(groupId, remoteProcessGroupDTO.getParentGroupId())) { throw new IllegalArgumentException("Cannot specify a different Parent Group ID than the Group to which the Remote Process Group is being added."); } final String targetUris = remoteProcessGroupDTO.getTargetUris(); if (targetUris == null || targetUris.length() == 0) { throw new IllegalArgumentException("Cannot add a Remote Process Group without specifying the Target URI(s)"); } // create the remote process group RemoteProcessGroup remoteProcessGroup = flowController.createRemoteProcessGroup(remoteProcessGroupDTO.getId(), targetUris); remoteProcessGroup.initialize(); // set other properties updateRemoteProcessGroup(remoteProcessGroup, remoteProcessGroupDTO); // get the group to add the remote process group to group.addRemoteProcessGroup(remoteProcessGroup); return remoteProcessGroup; } /** * Gets the specified remote process group. * * @param remoteProcessGroupId The remote process group id * @return The remote process group */ @Override public RemoteProcessGroup getRemoteProcessGroup(String remoteProcessGroupId) { final RemoteProcessGroup remoteProcessGroup = locateRemoteProcessGroup(remoteProcessGroupId); return remoteProcessGroup; } /** * Gets all of the remote process groups. * * @return The remote process groups */ @Override public Set<RemoteProcessGroup> getRemoteProcessGroups(String groupId) { final ProcessGroup group = locateProcessGroup(flowController, groupId); final Set<RemoteProcessGroup> remoteProcessGroups = group.getRemoteProcessGroups(); return remoteProcessGroups; } @Override public void verifyUpdate(RemoteProcessGroupDTO remoteProcessGroup) { verifyUpdate(locateRemoteProcessGroup(remoteProcessGroup.getId()), remoteProcessGroup); } /** * Verifies the specified remote group can be updated, if necessary. */ private void verifyUpdate(RemoteProcessGroup remoteProcessGroup, RemoteProcessGroupDTO remoteProcessGroupDto) { // see if the remote process group can start/stop transmitting if (isNotNull(remoteProcessGroupDto.isTransmitting())) { if (!remoteProcessGroup.isTransmitting() && remoteProcessGroupDto.isTransmitting()) { remoteProcessGroup.verifyCanStartTransmitting(); } else if (remoteProcessGroup.isTransmitting() && !remoteProcessGroupDto.isTransmitting()) { remoteProcessGroup.verifyCanStopTransmitting(); } } // validate the proposed configuration final List<String> requestValidation = validateProposedRemoteProcessGroupConfiguration(remoteProcessGroupDto); // ensure there was no validation errors if (!requestValidation.isEmpty()) { throw new ValidationException(requestValidation); } // if any remote group properties are changing, verify update if (isAnyNotNull(remoteProcessGroupDto.getYieldDuration(), remoteProcessGroupDto.getLocalNetworkInterface(), remoteProcessGroupDto.getCommunicationsTimeout(), remoteProcessGroupDto.getProxyHost(), remoteProcessGroupDto.getProxyPort(), remoteProcessGroupDto.getProxyUser(), remoteProcessGroupDto.getProxyPassword())) { remoteProcessGroup.verifyCanUpdate(); } } @Override public void verifyUpdateInputPort(String remoteProcessGroupId, RemoteProcessGroupPortDTO remoteProcessGroupPortDto) { final RemoteProcessGroup remoteProcessGroup = locateRemoteProcessGroup(remoteProcessGroupId); final RemoteGroupPort port = remoteProcessGroup.getInputPort(remoteProcessGroupPortDto.getId()); if (port == null) { throw new ResourceNotFoundException( String.format("Unable to find remote process group input port with id '%s'.", remoteProcessGroupPortDto.getId())); } verifyUpdatePort(port, remoteProcessGroupPortDto); } @Override public void verifyUpdateOutputPort(String remoteProcessGroupId, RemoteProcessGroupPortDTO remoteProcessGroupPortDto) { final RemoteProcessGroup remoteProcessGroup = locateRemoteProcessGroup(remoteProcessGroupId); final RemoteGroupPort port = remoteProcessGroup.getOutputPort(remoteProcessGroupPortDto.getId()); if (port == null) { throw new ResourceNotFoundException( String.format("Unable to find remote process group output port with id '%s'.", remoteProcessGroupPortDto.getId())); } verifyUpdatePort(port, remoteProcessGroupPortDto); } /** * Verified the specified remote port can be updated, if necessary. */ private void verifyUpdatePort(RemoteGroupPort port, RemoteProcessGroupPortDTO remoteProcessGroupPortDto) { // see if the remote process group can start/stop transmitting if (isNotNull(remoteProcessGroupPortDto.isTransmitting())) { if (!port.isRunning() && remoteProcessGroupPortDto.isTransmitting()) { port.verifyCanStart(); } else if (port.isRunning() && !remoteProcessGroupPortDto.isTransmitting()) { port.verifyCanStop(); } } // validate the proposed configuration final List<String> requestValidation = validateProposedRemoteProcessGroupPortConfiguration(port, remoteProcessGroupPortDto); // ensure there was no validation errors if (!requestValidation.isEmpty()) { throw new ValidationException(requestValidation); } // verify update when appropriate if (isAnyNotNull(remoteProcessGroupPortDto.getConcurrentlySchedulableTaskCount(), remoteProcessGroupPortDto.getUseCompression(), remoteProcessGroupPortDto.getBatchSettings())) { port.verifyCanUpdate(); } } /** * Validates the proposed configuration for the specified remote port. */ private List<String> validateProposedRemoteProcessGroupPortConfiguration(RemoteGroupPort remoteGroupPort, RemoteProcessGroupPortDTO remoteProcessGroupPortDTO) { final List<String> validationErrors = new ArrayList<>(); // ensure the proposed port configuration is valid if (isNotNull(remoteProcessGroupPortDTO.getConcurrentlySchedulableTaskCount()) && remoteProcessGroupPortDTO.getConcurrentlySchedulableTaskCount() <= 0) { validationErrors.add(String.format("Concurrent tasks for port '%s' must be a positive integer.", remoteGroupPort.getName())); } final BatchSettingsDTO batchSettingsDTO = remoteProcessGroupPortDTO.getBatchSettings(); if (batchSettingsDTO != null) { final Integer batchCount = batchSettingsDTO.getCount(); if (isNotNull(batchCount) && batchCount < 0) { validationErrors.add(String.format("Batch count for port '%s' must be a positive integer.", remoteGroupPort.getName())); } final String batchSize = batchSettingsDTO.getSize(); if (isNotNull(batchSize) && batchSize.length() > 0 && !DataUnit.DATA_SIZE_PATTERN.matcher(batchSize.trim().toUpperCase()).matches()) { validationErrors.add(String.format("Batch size for port '%s' must be of format <Data Size> <Data Unit>" + " where <Data Size> is a non-negative integer and <Data Unit> is a supported Data" + " Unit, such as: B, KB, MB, GB, TB", remoteGroupPort.getName())); } final String batchDuration = batchSettingsDTO.getDuration(); if (isNotNull(batchDuration) && batchDuration.length() > 0 && !FormatUtils.TIME_DURATION_PATTERN.matcher(batchDuration.trim().toLowerCase()).matches()) { validationErrors.add(String.format("Batch duration for port '%s' must be of format <duration> <TimeUnit>" + " where <duration> is a non-negative integer and TimeUnit is a supported Time Unit, such " + "as: nanos, millis, secs, mins, hrs, days", remoteGroupPort.getName())); } } return validationErrors; } /** * Validates the proposed configuration for the specified remote group. */ private List<String> validateProposedRemoteProcessGroupConfiguration(RemoteProcessGroupDTO remoteProcessGroupDTO) { final List<String> validationErrors = new ArrayList<>(); if (isNotNull(remoteProcessGroupDTO.getCommunicationsTimeout())) { Matcher yieldMatcher = FormatUtils.TIME_DURATION_PATTERN.matcher(remoteProcessGroupDTO.getCommunicationsTimeout()); if (!yieldMatcher.matches()) { validationErrors.add("Communications timeout is not a valid time duration (ie 30 sec, 5 min)"); } } if (isNotNull(remoteProcessGroupDTO.getYieldDuration())) { Matcher yieldMatcher = FormatUtils.TIME_DURATION_PATTERN.matcher(remoteProcessGroupDTO.getYieldDuration()); if (!yieldMatcher.matches()) { validationErrors.add("Yield duration is not a valid time duration (ie 30 sec, 5 min)"); } } String proxyPassword = remoteProcessGroupDTO.getProxyPassword(); String proxyUser = remoteProcessGroupDTO.getProxyUser(); String proxyHost = remoteProcessGroupDTO.getProxyHost(); if (isNotNull(remoteProcessGroupDTO.getProxyPort())) { if (isEmpty(proxyHost)) { validationErrors.add("Proxy port was specified, but proxy host was empty."); } } if (!isEmpty(proxyUser)) { if (isEmpty(proxyHost)) { validationErrors.add("Proxy user name was specified, but proxy host was empty."); } if (isEmpty(proxyPassword)) { validationErrors.add("User password should be specified if Proxy server needs user authentication."); } } if (!isEmpty(proxyPassword)) { if (isEmpty(proxyHost)) { validationErrors.add("Proxy user password was specified, but proxy host was empty."); } if (isEmpty(proxyPassword)) { validationErrors.add("User name should be specified if Proxy server needs user authentication."); } } return validationErrors; } @Override public RemoteGroupPort updateRemoteProcessGroupInputPort(String remoteProcessGroupId, RemoteProcessGroupPortDTO remoteProcessGroupPortDto) { final RemoteProcessGroup remoteProcessGroup = locateRemoteProcessGroup(remoteProcessGroupId); final RemoteGroupPort port = remoteProcessGroup.getInputPort(remoteProcessGroupPortDto.getId()); if (port == null) { throw new ResourceNotFoundException( String.format("Unable to find remote process group input port with id '%s'.", remoteProcessGroupPortDto.getId())); } // verify the update verifyUpdatePort(port, remoteProcessGroupPortDto); // perform the update updatePort(port, remoteProcessGroupPortDto, remoteProcessGroup); remoteProcessGroup.getProcessGroup().onComponentModified(); return port; } @Override public RemoteGroupPort updateRemoteProcessGroupOutputPort(String remoteProcessGroupId, RemoteProcessGroupPortDTO remoteProcessGroupPortDto) { final RemoteProcessGroup remoteProcessGroup = locateRemoteProcessGroup(remoteProcessGroupId); final RemoteGroupPort port = remoteProcessGroup.getOutputPort(remoteProcessGroupPortDto.getId()); if (port == null) { throw new ResourceNotFoundException( String.format("Unable to find remote process group output port with id '%s'.", remoteProcessGroupId)); } // verify the update verifyUpdatePort(port, remoteProcessGroupPortDto); // perform the update updatePort(port, remoteProcessGroupPortDto, remoteProcessGroup); remoteProcessGroup.getProcessGroup().onComponentModified(); return port; } /** * * @param port Port instance to be updated. * @param remoteProcessGroupPortDto DTO containing updated remote process group port settings. * @param remoteProcessGroup If remoteProcessGroupPortDto has updated isTransmitting input, * this method will start or stop the port in this remoteProcessGroup as necessary. */ private void updatePort(RemoteGroupPort port, RemoteProcessGroupPortDTO remoteProcessGroupPortDto, RemoteProcessGroup remoteProcessGroup) { if (isNotNull(remoteProcessGroupPortDto.getConcurrentlySchedulableTaskCount())) { port.setMaxConcurrentTasks(remoteProcessGroupPortDto.getConcurrentlySchedulableTaskCount()); } if (isNotNull(remoteProcessGroupPortDto.getUseCompression())) { port.setUseCompression(remoteProcessGroupPortDto.getUseCompression()); } final BatchSettingsDTO batchSettingsDTO = remoteProcessGroupPortDto.getBatchSettings(); if (isNotNull(batchSettingsDTO)) { port.setBatchCount(batchSettingsDTO.getCount()); port.setBatchSize(batchSettingsDTO.getSize()); port.setBatchDuration(batchSettingsDTO.getDuration()); } final Boolean isTransmitting = remoteProcessGroupPortDto.isTransmitting(); if (isNotNull(isTransmitting)) { // start or stop as necessary if (!port.isRunning() && isTransmitting) { remoteProcessGroup.startTransmitting(port); } else if (port.isRunning() && !isTransmitting) { remoteProcessGroup.stopTransmitting(port); } } } @Override public RemoteProcessGroup updateRemoteProcessGroup(RemoteProcessGroupDTO remoteProcessGroupDTO) { RemoteProcessGroup remoteProcessGroup = locateRemoteProcessGroup(remoteProcessGroupDTO.getId()); return updateRemoteProcessGroup(remoteProcessGroup, remoteProcessGroupDTO); } private RemoteProcessGroup updateRemoteProcessGroup(RemoteProcessGroup remoteProcessGroup, RemoteProcessGroupDTO remoteProcessGroupDTO) { // verify the update request verifyUpdate(remoteProcessGroup, remoteProcessGroupDTO); // configure the remote process group final String targetUris = remoteProcessGroupDTO.getTargetUris(); final String name = remoteProcessGroupDTO.getName(); final String comments = remoteProcessGroupDTO.getComments(); final String communicationsTimeout = remoteProcessGroupDTO.getCommunicationsTimeout(); final String yieldDuration = remoteProcessGroupDTO.getYieldDuration(); final String proxyHost = remoteProcessGroupDTO.getProxyHost(); final Integer proxyPort = remoteProcessGroupDTO.getProxyPort(); final String proxyUser = remoteProcessGroupDTO.getProxyUser(); final String proxyPassword = remoteProcessGroupDTO.getProxyPassword(); final String transportProtocol = remoteProcessGroupDTO.getTransportProtocol(); final String localNetworkInterface = remoteProcessGroupDTO.getLocalNetworkInterface(); if (isNotNull(targetUris)) { remoteProcessGroup.setTargetUris(targetUris); } if (isNotNull(name)) { remoteProcessGroup.setName(name); } if (isNotNull(comments)) { remoteProcessGroup.setComments(comments); } if (isNotNull(communicationsTimeout)) { remoteProcessGroup.setCommunicationsTimeout(communicationsTimeout); } if (isNotNull(yieldDuration)) { remoteProcessGroup.setYieldDuration(yieldDuration); } if (isNotNull(remoteProcessGroupDTO.getPosition())) { remoteProcessGroup.setPosition(new Position(remoteProcessGroupDTO.getPosition().getX(), remoteProcessGroupDTO.getPosition().getY())); } if (isNotNull(transportProtocol)) { remoteProcessGroup.setTransportProtocol(SiteToSiteTransportProtocol.valueOf(transportProtocol.toUpperCase())); // No null check because these proxy settings have to be clear if not specified. // But when user Enable/Disable transmission, only isTransmitting is sent. // To prevent clearing these values in that case, set these only if transportProtocol is sent, // assuming UI sends transportProtocol always for update. remoteProcessGroup.setProxyHost(proxyHost); remoteProcessGroup.setProxyPort(proxyPort); remoteProcessGroup.setProxyUser(proxyUser); // Keep using current password when null or "********" was sent. // Passing other values updates the password, // specify empty String to clear password. if (isNotNull(proxyPassword) && !DtoFactory.SENSITIVE_VALUE_MASK.equals(proxyPassword)) { remoteProcessGroup.setProxyPassword(proxyPassword); } } if (localNetworkInterface != null) { if (StringUtils.isBlank(localNetworkInterface)) { remoteProcessGroup.setNetworkInterface(null); } else { remoteProcessGroup.setNetworkInterface(localNetworkInterface); } } final Boolean isTransmitting = remoteProcessGroupDTO.isTransmitting(); if (isNotNull(isTransmitting)) { // start or stop as necessary if (!remoteProcessGroup.isTransmitting() && isTransmitting) { remoteProcessGroup.startTransmitting(); } else if (remoteProcessGroup.isTransmitting() && !isTransmitting) { remoteProcessGroup.stopTransmitting(); } } final ProcessGroup group = remoteProcessGroup.getProcessGroup(); if (group != null) { group.onComponentModified(); } return remoteProcessGroup; } @Override public void verifyDelete(String remoteProcessGroupId) { RemoteProcessGroup remoteProcessGroup = locateRemoteProcessGroup(remoteProcessGroupId); remoteProcessGroup.verifyCanDelete(); } @Override public void deleteRemoteProcessGroup(String remoteProcessGroupId) { RemoteProcessGroup remoteProcessGroup = locateRemoteProcessGroup(remoteProcessGroupId); remoteProcessGroup.getProcessGroup().removeRemoteProcessGroup(remoteProcessGroup); } public void setFlowController(FlowController flowController) { this.flowController = flowController; } }
package com.sleepycat.je.rep; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import com.sleepycat.je.Database; import com.sleepycat.je.Durability; import com.sleepycat.je.Transaction; import com.sleepycat.je.Durability.ReplicaAckPolicy; import com.sleepycat.je.Durability.SyncPolicy; import com.sleepycat.je.rep.impl.RepParams; import com.sleepycat.je.rep.impl.RepTestBase; import com.sleepycat.je.rep.utilint.RepTestUtils.RepEnvInfo; public class ElectableGroupSizeOverrideTest extends RepTestBase { /* * Verify that elections can be held, and writes performed by a minority * with an override in place. */ public void testBasic() throws InterruptedException { createGroup(); /* Shutdown the entire group. */ closeNodes(repEnvInfo); /* Verify that the node cannot come up on its own. */ RepEnvInfo ri0 = repEnvInfo[0]; setEnvSetupTimeout("2 s"); try { ri0.openEnv(); fail("Unknow master exception expected."); } catch (UnknownMasterException ume) { /* Expected. */ } /* Restore the timeout. */ setEnvSetupTimeout(RepParams.ENV_SETUP_TIMEOUT.getDefault()); startGroupWithOverride(1); /* Resume normal operations, eliminate override */ setElectableGroupSize(0, repEnvInfo); RepEnvInfo mi = restartNodes(repEnvInfo); assertNotNull(mi); closeNodes(repEnvInfo); } /** * tests a 5 node group, with the master failing as part of the majority * of the nodes being lost. * * 1) Shutdown nodes n1-n3, including n1 the master * 2) Verify no master amongst the remaining * 3) Set override * 4) Verify master emerges and write transactions can be committed. * 5) Remove override * 6) Bring up down nodes n1-n3 -- group is in normal working order */ public void testMasterDownOverride() throws InterruptedException { createGroup(); assertTrue(repEnvInfo[0].getEnv().getState().isMaster()); /* Shutdown a simple majority, including the Master. */ final int simpleMajority = (repEnvInfo.length + 1) / 2; RepEnvInfo[] downNodes = copyArray(repEnvInfo, 0, simpleMajority); RepEnvInfo[] activeNodes = copyArray(repEnvInfo, simpleMajority, repEnvInfo.length - simpleMajority); closeNodes(downNodes); final CountDownLatch masterLatch = new CountDownLatch(1); for (RepEnvInfo ri : activeNodes) { /* No master amongst the remaining nodes. */ assertTrue(!ri.getEnv().getState().isMaster()); ri.getEnv(). setStateChangeListener(new MasterListener(masterLatch)); } setMutableElectableGroupSize(simpleMajority-1, activeNodes); /* They should now be able to conclude an election. */ boolean done = masterLatch.await(10, TimeUnit.SECONDS); assertTrue(done); /* Write should succeed without exceptions with the override */ tryWrite(findMaster(activeNodes).getEnv(), "dbok"); /* Bring up a down node, restoring a simple majority of active nodes */ ReplicatedEnvironment renv0 = downNodes[0].openEnv(); assertTrue(renv0.getState().isReplica()); /* Restore normal functioning. */ setMutableElectableGroupSize(0, activeNodes); /* Bring up the rest of the nodes. */ restartNodes(copyArray(downNodes, 1, downNodes.length - 1)); } /* Copy a part of an array to a new array. */ private RepEnvInfo[] copyArray(RepEnvInfo[] nodes, int srcStart, int copyLength) { RepEnvInfo[] newNodes = new RepEnvInfo[copyLength]; System.arraycopy(nodes, srcStart, newNodes, 0, copyLength); return newNodes; } /** * tests a 5 node group, with the master being retained when the majority * of the nodes is lost. * * 1) Shutdown nodes n3-n5, n1 is the master and is alive. * 2) Verify that master can no longer commit transactions * 3) Set override * 4) Verify that write transactions can be committed * 5) Remove override * 6) Bring up down nodes n3-n5 -- group is in normal working order */ public void testMasterUpOverride() throws InterruptedException { createGroup(); assertTrue(repEnvInfo[0].getEnv().getState().isMaster()); /* Shutdown a simple majority, excluding the Master. */ final int simpleMajority = (repEnvInfo.length + 1) / 2; RepEnvInfo[] downNodes = copyArray(repEnvInfo, simpleMajority - 1, repEnvInfo.length - simpleMajority + 1); RepEnvInfo[] activeNodes = copyArray(repEnvInfo, 0, simpleMajority - 1); closeNodes(downNodes); assertTrue(repEnvInfo[0].getEnv().getState().isMaster()); /* Write should fail without the override */ try { tryWrite(findMaster(activeNodes).getEnv(), "dbfail"); fail("Exception expected"); } catch (InsufficientAcksException iae) { // ok } catch (InsufficientReplicasException ire) { // ok } setMutableElectableGroupSize(simpleMajority-1, activeNodes); /* Write should succeed without exceptions with the override */ tryWrite(findMaster(activeNodes).getEnv(), "dbok"); /* Bring up a down node, restoring a simple majority of active nodes */ ReplicatedEnvironment renv = downNodes[0].openEnv(); assertTrue(renv.getState().isReplica()); /* Restore normal functioning. */ setMutableElectableGroupSize(0, activeNodes); /* Bring up the rest of the nodes. */ restartNodes(copyArray(downNodes, 1, downNodes.length - 1)); } private void startGroupWithOverride(int override) throws InterruptedException { /* Now Try bringing up just one node using override */ setElectableGroupSize(override, repEnvInfo); RepEnvInfo[] activeNodes = copyArray(repEnvInfo, 0, override); RepEnvInfo mi = restartNodes(activeNodes); assertNotNull(mi); ReplicatedEnvironment menv = mi.getEnv(); /* Write must succeed without exceptions. */ tryWrite(menv, "dbok" + override); /* * It should be possible for the other nodes to find the master * and join. */ for (int i=override; i < repEnvInfo.length; i++) { repEnvInfo[i].openEnv(); assertTrue(repEnvInfo[i].getEnv().getState().isReplica()); } /* The master should be unchanged */ assertTrue(menv.getState().isMaster()); closeNodes(repEnvInfo); } /* * Attempt write operation by creating a database. Caller knows whether or * not to expect an exception. */ private void tryWrite(ReplicatedEnvironment repEnv, String dbName) { Transaction txn = repEnv.beginTransaction(null, null); Database db = repEnv.openDatabase(txn, dbName, dbconfig); txn.commit(new Durability(SyncPolicy.SYNC, SyncPolicy.SYNC, ReplicaAckPolicy.SIMPLE_MAJORITY)); db.close(); } /** * Sets the electable group size in the configuration associated with each * of the nodes. * * @param override the override value * @param nodes the configs where the override is to be applied */ void setElectableGroupSize(int override, RepEnvInfo... nodes) { for (RepEnvInfo ri : nodes) { ri.getRepConfig().setElectableGroupSizeOverride(override); } } /** * Sets the electable group size mutable associated with an open * environment handle. * * @param override the override value * @param nodes the nodes where the override is to be applied */ void setMutableElectableGroupSize(int override, RepEnvInfo... nodes) { for (RepEnvInfo ri : nodes) { ReplicationConfig mutableConfig = ri.getEnv().getRepConfig(); mutableConfig.setElectableGroupSizeOverride(override); ri.getEnv().setRepMutableConfig(mutableConfig); } } /** * Sets the setup timeout associated with all the nodes in the test. * * @param duration the amount of time to wait */ private void setEnvSetupTimeout(String duration) { for (RepEnvInfo ri : repEnvInfo) { ri.getRepConfig().setConfigParam (RepParams.ENV_SETUP_TIMEOUT.getName(), duration); } } }
/* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.query.impl.extractor.specification; import com.hazelcast.config.Config; import com.hazelcast.config.InMemoryFormat; import com.hazelcast.internal.util.UuidUtil; import com.hazelcast.query.Predicates; import com.hazelcast.query.impl.extractor.AbstractExtractionTest; import com.hazelcast.test.HazelcastParametrizedRunner; import com.hazelcast.test.annotation.ParallelJVMTest; import com.hazelcast.test.annotation.QuickTest; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.util.ArrayList; import java.util.Collection; import static com.hazelcast.config.InMemoryFormat.BINARY; import static com.hazelcast.config.InMemoryFormat.OBJECT; import static com.hazelcast.query.impl.extractor.AbstractExtractionSpecification.Index.NO_INDEX; import static com.hazelcast.query.impl.extractor.AbstractExtractionSpecification.Index.ORDERED; import static com.hazelcast.query.impl.extractor.AbstractExtractionSpecification.Index.UNORDERED; import static com.hazelcast.query.impl.extractor.AbstractExtractionSpecification.Multivalue.PORTABLE; import static com.hazelcast.query.impl.extractor.specification.ComplexTestDataStructure.Finger; import static com.hazelcast.query.impl.extractor.specification.ComplexTestDataStructure.Person; import static com.hazelcast.query.impl.extractor.specification.ComplexTestDataStructure.finger; import static com.hazelcast.query.impl.extractor.specification.ComplexTestDataStructure.limb; import static com.hazelcast.query.impl.extractor.specification.ComplexTestDataStructure.person; import static com.hazelcast.query.impl.extractor.specification.ComplexTestDataStructure.tattoos; import static java.util.Arrays.asList; /** * Specification test that verifies the behavior of corner-cases extraction in single-value attributes. * It's a detailed test especially for portables, since the extraction is much more complex there. * <p> * Extraction mechanism: IN-BUILT REFLECTION EXTRACTION * <p> * This test is parametrised on two axes (see the parametrisationData() method): * - in memory format * - indexing */ @RunWith(HazelcastParametrizedRunner.class) @Category({QuickTest.class, ParallelJVMTest.class}) public class ExtractionInPortableSpecTest extends AbstractExtractionTest { private static final Person BOND = person("Bond", limb("left-hand", tattoos(), finger("thumb"), finger(null)), limb("right-hand", tattoos("knife"), finger("middle"), finger("index")) ); private static final Person KRUEGER = person("Krueger", limb("linke-hand", tattoos("bratwurst"), finger("Zeigefinger"), finger("Mittelfinger")), limb("rechte-hand", tattoos(), finger("Ringfinger"), finger("Daumen")) ); private static final Person HUNT_WITH_NULLS = person(null, limb(null, new ArrayList<String>(), new Finger[]{}) ); public ExtractionInPortableSpecTest(InMemoryFormat inMemoryFormat, Index index, Multivalue multivalue) { super(inMemoryFormat, index, multivalue); } protected Configurator getInstanceConfigurator() { return new Configurator() { @Override public void doWithConfig(Config config, Multivalue mv) { config.getSerializationConfig().addPortableFactory(ComplexTestDataStructure.PersonPortableFactory.ID, new ComplexTestDataStructure.PersonPortableFactory()); } }; } @Override protected void doWithMap() { // init fully populated object to handle nulls properly if (mv == PORTABLE) { String key = UuidUtil.newUnsecureUuidString(); map.put(key, KRUEGER.getPortable()); map.remove(key); } } @Test public void wrong_attribute_name_atLeaf() { execute(Input.of(BOND, KRUEGER, HUNT_WITH_NULLS), Query.of(Predicates.equal("name12312", "Bond"), mv), Expected.empty()); } @Test public void wrong_attribute_name_atLeaf_comparedToNull() { execute(Input.of(BOND, KRUEGER, HUNT_WITH_NULLS), Query.of(Predicates.equal("name12312", null), mv), Expected.of(BOND, KRUEGER, HUNT_WITH_NULLS)); } @Test public void nested_wrong_attribute_name_atLeaf() { execute(Input.of(BOND, KRUEGER), Query.of(Predicates.equal("firstLimb.name12312", "left-hand"), mv), Expected.empty()); } @Test public void nested_wrong_attribute_name_atLeaf_comparedToNull() { execute(Input.of(BOND, KRUEGER), Query.of(Predicates.equal("firstLimb.name12312", null), mv), Expected.of(BOND, KRUEGER)); } @Test public void nested_wrong_attribute_notAtLeaf() { execute(Input.of(BOND, KRUEGER), Query.of(Predicates.equal("firstLimb.notExisting.notExistingToo", "left-hand"), mv), Expected.empty()); } @Test public void nested_wrong_attribute_notAtLeaf_comparedToNull() { execute(Input.of(BOND, KRUEGER), Query.of(Predicates.equal("firstLimb.notExisting.notExistingToo", null), mv), Expected.of(BOND, KRUEGER)); } @Test public void indexOutOfBoundFirst_notExistingProperty_notAtLeaf() { execute(Input.of(BOND, KRUEGER), Query.of(equal("limbs_[100].notExisting.notExistingToo", "knife"), mv), Expected.empty()); } @Test public void indexOutOfBoundFirst_notExistingProperty_notAtLeaf_comparedToNull() { execute(Input.of(BOND, KRUEGER), Query.of(equal("limbs_[100].notExisting.notExistingToo", null), mv), Expected.of(BOND, KRUEGER)); } @Test public void indexOutOfBound_notExistingProperty() { execute(Input.of(BOND, KRUEGER), Query.of(equal("limbs_[100].sdafasdf", "knife"), mv), Expected.empty()); } @Test public void indexOutOfBound_atLeaf_notExistingProperty() { execute(Input.of(BOND, KRUEGER), Query.of(equal("limbs_[0].fingers_[100].asdfas", "knife"), mv), Expected.empty()); } @Test public void wrong_attribute_name_compared_to_null() { execute(Input.of(BOND, KRUEGER, HUNT_WITH_NULLS), Query.of(Predicates.equal("name12312", null), mv), Expected.of(BOND, KRUEGER, HUNT_WITH_NULLS)); } @Test public void primitiveNull_comparedToNull_matching() { execute(Input.of(BOND, KRUEGER, HUNT_WITH_NULLS), Query.of(Predicates.equal("name", null), mv), Expected.of(HUNT_WITH_NULLS)); } @Test public void primitiveNull_comparedToNotNull_notMatching() { execute(Input.of(BOND, KRUEGER, HUNT_WITH_NULLS), Query.of(Predicates.equal("name", "Non-null-value"), mv), Expected.empty()); } @Test public void nestedAttribute_firstIsNull_comparedToNotNull() { execute(Input.of(BOND, KRUEGER, HUNT_WITH_NULLS), Query.of(Predicates.equal("secondLimb.name", "Non-null-value"), mv), Expected.empty()); } @Test public void nestedAttribute_firstIsNull_comparedToNull() { execute(Input.of(BOND, KRUEGER, HUNT_WITH_NULLS), Query.of(Predicates.equal("secondLimb.name", null), mv), Expected.of(HUNT_WITH_NULLS)); } @Test public void correct_attribute_name() { execute(Input.of(BOND), Query.of(Predicates.equal("name", "Bond"), mv), Expected.of(BOND)); } @Test public void correct_nestedAttribute_name() { execute(Input.of(BOND), Query.of(Predicates.equal("firstLimb.name", "left-hand"), mv), Expected.of(BOND)); } @Test public void correct_portableAttribute() { execute(Input.of(BOND), Query.of(Predicates.equal("firstLimb", BOND.firstLimb.getPortable()), mv), Expected.of(BOND)); } @Test public void correct_portableArrayInTheMiddle_matching() { execute(Input.of(BOND), Query.of(Predicates.equal("limbs_[0].name", "left-hand"), mv), Expected.of(BOND)); } @Test public void correct_portableArrayInTheMiddle_notMatching() { execute(Input.of(BOND), Query.of(Predicates.equal("limbs_[0].name", "dasdfasdfasdf"), mv), Expected.empty()); } @Test public void correct_portableInTheMiddle_portableAtTheEnd_matching() { execute(Input.of(BOND), Query.of(Predicates.equal("firstLimb.fingers_[0]", (BOND.firstLimb.fingers_array[0]).getPortable()), mv), Expected.of(BOND)); } @Test public void correct_portableInTheMiddle_portableAtTheEnd_notMatching() { execute(Input.of(BOND), Query.of(Predicates.equal("firstLimb.fingers_[0]", (BOND.firstLimb.fingers_array[1]).getPortable()), mv), Expected.empty()); } @Test public void correct_portableInTheMiddle_portableArrayAtTheEnd_primitiveAttribute_notMatching() { execute(Input.of(BOND), Query.of(Predicates.equal("firstLimb.fingers_[0].name", "thumb123"), mv), Expected.empty()); } @Test public void correct_portableArrayInTheMiddle_portableAtTheEnd_notMatching() { execute(Input.of(BOND), Query.of(Predicates.equal("limbs_[0].fingers_[0]", (BOND.firstLimb.fingers_array[1]).getPortable()), mv), Expected.empty()); } @Test public void correct_portableArrayInTheMiddle_portableArrayAtTheEnd_primitiveAttribute_notMatching() { execute(Input.of(BOND), Query.of(Predicates.equal("limbs_[0].fingers_[0].name", "thumb123"), mv), Expected.empty()); } @Test public void correct_portableArrayInTheMiddle_portableArrayAtTheEnd_primitiveAttribute_matching() { execute(Input.of(BOND), Query.of(Predicates.equal("firstLimb.fingers_[0].name", "thumb"), mv), Expected.of(BOND)); } @Test public void correct_portableArrayAtTheEnd_matching() { execute(Input.of(BOND), Query.of(Predicates.equal("limbs_[0]", BOND.limbs_array[0].getPortable()), mv), Expected.of(BOND)); } @Test public void correct_portableArrayAtTheEnd_notMatching() { execute(Input.of(BOND), Query.of(Predicates.equal("limbs_[1]", BOND.limbs_array[0].getPortable()), mv), Expected.empty()); } @Test public void correct_primitiveArrayAtTheEnd_matching() { execute(Input.of(BOND), Query.of(Predicates.equal("secondLimb.tattoos_[0]", "knife"), mv), Expected.of(BOND)); } @Test public void correct_primitiveArrayAtTheEnd_notMatching() { execute(Input.of(BOND), Query.of(Predicates.equal("secondLimb.tattoos_[0]", "knife123"), mv), Expected.empty()); } @Parameterized.Parameters(name = "{index}: {0}, {1}, {2}") public static Collection<Object[]> parametrisationData() { return axes( asList(BINARY, OBJECT), asList(NO_INDEX, UNORDERED, ORDERED), asList(PORTABLE) ); } }
/* * Copyright (c) 2004-2010, Kohsuke Kawaguchi * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are permitted provided * that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this list of * conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, this list of * conditions and the following disclaimer in the documentation and/or other materials * provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY * AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER * IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF * THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.kohsuke.stapler.export; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Properties; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.CheckForNull; import javax.annotation.Nullable; import com.google.common.base.Predicate; import org.kohsuke.stapler.export.TreePruner.ByDepth; /** * Writes all the property of one {@link ExportedBean} to {@link DataWriter}. * * @author Kohsuke Kawaguchi */ public class Model<T> { /** * The class being modeled. */ public final Class<T> type; /** * {@link Model} for the super class. */ public final Model<? super T> superModel; private final Property[] properties; /*package*/ final ModelBuilder parent; /*package*/ final int defaultVisibility; /** * Lazily loaded "*.javadoc" file for this model. */ private volatile Properties javadoc; private final Set<String> propertyNames = new HashSet<String>(); /*package*/ Model(ModelBuilder parent, Class<T> type, @CheckForNull Class<?> propertyOwner, @Nullable String property) throws NotExportableException { this.parent = parent; this.type = type; ExportedBean eb = type.getAnnotation(ExportedBean.class); if (eb == null) { throw propertyOwner != null ? new NotExportableException(type, propertyOwner, property) : new NotExportableException(type); } this.defaultVisibility = eb.defaultVisibility(); Class<? super T> sc = type.getSuperclass(); if(sc!=null && sc.getAnnotation(ExportedBean.class)!=null) superModel = parent.get(sc); else superModel = null; List<Property> properties = new ArrayList<Property>(); // Use reflection to find out what properties are exposed. for( Field f : type.getFields() ) { if(f.getDeclaringClass()!=type) continue; Exported exported = f.getAnnotation(Exported.class); if(exported !=null) properties.add(new FieldProperty(this,f, exported)); } for( Method m : type.getMethods() ) { if(m.getDeclaringClass()!=type) continue; if(m.isSynthetic() && m.isBridge()) continue; Exported exported = m.getAnnotation(Exported.class); if(exported !=null) { if (m.getParameterTypes().length > 0) { LOGGER.log(Level.WARNING, "Method " + m.getName() + " of " + type.getName() + " is annotated @Exported but requires arguments"); } else { properties.add(new MethodProperty(this,m, exported)); } } } this.properties = properties.toArray(new Property[properties.size()]); Arrays.sort(this.properties); for (Property p : properties) this.propertyNames.add(p.name); parent.models.put(type,this); } /** * Gets all the exported properties. */ public List<Property> getProperties() { return Collections.unmodifiableList(Arrays.asList(properties)); } /** * Does a property exist strictly in this class? */ /*package*/ final Predicate<String> HAS_PROPERTY_NAME = new Predicate<String>() { @Override public boolean apply(@Nullable String name) { return propertyNames.contains(name); } }; /** * Does a property exist strictly in this class or its ancestors? */ /*package*/ final Predicate<String> HAS_PROPERTY_NAME_IN_ANCESTORY = new Predicate<String>() { @Override public boolean apply(@Nullable String name) { for (Model m=Model.this; m!=null; m=m.superModel) if (m.propertyNames.contains(name)) return true; return false; } }; /** * Loads the javadoc list and returns it as {@link Properties}. * * @return always non-null. */ /*package*/ Properties getJavadoc() { if(javadoc!=null) return javadoc; synchronized(this) { if(javadoc!=null) return javadoc; // load Properties p = new Properties(); InputStream is = type.getClassLoader().getResourceAsStream(type.getName().replace('$', '/').replace('.', '/') + ".javadoc"); if(is!=null) { try { try { p.load(is); } finally { is.close(); } } catch (IOException e) { throw new RuntimeException("Unable to load javadoc for "+type,e); } } javadoc = p; return javadoc; } } /** * Writes the property values of the given object to the writer. */ public void writeTo(T object, DataWriter writer) throws IOException { writeTo(object, 0, writer); } /** * Writes the property values of the given object to the writer. * * @param pruner * Controls which portion of the object graph will be sent to the writer. */ public void writeTo(T object, TreePruner pruner, DataWriter writer) throws IOException { try { writer.type(null,object.getClass()); } catch (AbstractMethodError _) { // legacy client that doesn't understand this } writer.startObject(); writeNestedObjectTo(object, pruner, writer); writer.endObject(); } /** * Writes the property values of the given object to the writer. * * @param baseVisibility * This parameters controls how much data we'd be writing, * by adding bias to the sub tree cutting. * A property with {@link Exported#visibility() visibility} X will be written * if the current depth Y and baseVisibility Z satisfies {@code X + Z > Y}. * * 0 is the normal value. Positive value means writing bigger tree, * and negative value means writing smaller trees. * * @deprecated as of 1.139 */ @Deprecated public void writeTo(T object, int baseVisibility, DataWriter writer) throws IOException { writeTo(object,new ByDepth(1-baseVisibility),writer); } void writeNestedObjectTo(T object, TreePruner pruner, DataWriter writer) throws IOException { if (superModel != null) { superModel.writeNestedObjectTo(object, new FilteringTreePruner(HAS_PROPERTY_NAME,pruner), writer); } for (Property p : properties) { p.writeTo(object, pruner, writer); } } private static final Logger LOGGER = Logger.getLogger(Model.class.getName()); }
package com.gmail.nuclearcat1337.horse_stats.gui; import com.gmail.nuclearcat1337.horse_stats.HorseStats; import com.gmail.nuclearcat1337.horse_stats.Threshold; import net.minecraft.client.Minecraft; import net.minecraft.client.gui.*; import java.text.DecimalFormat; /** * Created by Mr_Little_Kitty on 1/10/2017. */ public class GuiHorseStats extends GuiScreen { private static final int BUTTON_WIDTH = GuiConstants.SMALL_BUTTON_WIDTH * 3; private static final String HEALTH_STRING = "Horse Health"; private static final String JUMP_STRING = "Horse Jump"; private static final String SPEED_STRING = "Horse Speed"; private final int healthWidth, jumpWidth, speedWidth; private final HorseStats horseStats; public GuiHorseStats(HorseStats stats) { this.horseStats = stats; Minecraft mc = Minecraft.getMinecraft(); healthWidth = mc.fontRenderer.getStringWidth(HEALTH_STRING); jumpWidth = mc.fontRenderer.getStringWidth(JUMP_STRING); speedWidth = mc.fontRenderer.getStringWidth(SPEED_STRING); } @Override public void initGui() { this.buttonList.clear(); int xPos = (this.width / 2) - (BUTTON_WIDTH / 2) - (GuiConstants.STANDARD_SEPARATION_DISTANCE * 2) - BUTTON_WIDTH; int yPos = (this.height / 2) - (GuiConstants.STANDARD_BUTTON_HEIGHT / 2) - GuiConstants.STANDARD_SEPARATION_DISTANCE - GuiConstants.STANDARD_BUTTON_HEIGHT * 2; //the last *2 is so that the buttons are higher up int buttonYPos1 = yPos + GuiConstants.STANDARD_BUTTON_HEIGHT * 3 + GuiConstants.STANDARD_SEPARATION_DISTANCE * 3; int buttonYPos2 = buttonYPos1 + GuiConstants.STANDARD_BUTTON_HEIGHT + GuiConstants.STANDARD_SEPARATION_DISTANCE; layoutThresholdButtons(xPos, yPos, horseStats.getJumpThreshold(), "Jump", 1, 6); buttonList.add(new GuiSlider(renderDistanceResponder, 10, xPos, buttonYPos1, "Render Distance", 2, 30, horseStats.getRenderDistance(), renderDistanceFormatter)); buttonList.add(new GuiButton(0, xPos, buttonYPos2, BUTTON_WIDTH, GuiConstants.STANDARD_BUTTON_HEIGHT, "Overlay Render: " + (horseStats.shouldRenderStats() ? "On" : "Off"))); xPos += BUTTON_WIDTH + GuiConstants.STANDARD_SEPARATION_DISTANCE * 2; layoutThresholdButtons(xPos, yPos, horseStats.getHealthThreshold(), "Health", 18, 32); buttonList.add(new GuiSlider(decimalPlacesResponder, 11, xPos, buttonYPos1, "Decimal Places", 1, 10, horseStats.getDecimalPlaces(), decimalPlacesFormatter)); buttonList.add(new GuiButton(1, xPos, buttonYPos2, BUTTON_WIDTH, GuiConstants.STANDARD_BUTTON_HEIGHT, "Done")); xPos += BUTTON_WIDTH + GuiConstants.STANDARD_SEPARATION_DISTANCE * 2; layoutThresholdButtons(xPos, yPos, horseStats.getSpeedThreshold(), "Speed", 8, 15); super.initGui(); } @Override public void drawScreen(int mouseX, int mouseY, float partialTicks) { super.drawScreen(mouseX, mouseY, partialTicks); int xPos = (this.width / 2) - (BUTTON_WIDTH / 2) - (GuiConstants.STANDARD_SEPARATION_DISTANCE * 2) - BUTTON_WIDTH; int yPos = (this.height / 2) - (GuiConstants.STANDARD_BUTTON_HEIGHT / 2) - GuiConstants.STANDARD_SEPARATION_DISTANCE - GuiConstants.STANDARD_BUTTON_HEIGHT * 2; //the last *2 is so that the buttons are higher up mc.fontRenderer.drawString(JUMP_STRING, xPos + BUTTON_WIDTH / 2 - jumpWidth / 2, yPos - GuiConstants.STANDARD_BUTTON_HEIGHT / 2 - mc.fontRenderer.FONT_HEIGHT / 2, 16777215); xPos += BUTTON_WIDTH + GuiConstants.STANDARD_SEPARATION_DISTANCE * 2; mc.fontRenderer.drawString(HEALTH_STRING, xPos + BUTTON_WIDTH / 2 - healthWidth / 2, yPos - GuiConstants.STANDARD_BUTTON_HEIGHT / 2 - mc.fontRenderer.FONT_HEIGHT / 2, 16777215); xPos += BUTTON_WIDTH + GuiConstants.STANDARD_SEPARATION_DISTANCE * 2; mc.fontRenderer.drawString(SPEED_STRING, xPos + BUTTON_WIDTH / 2 - speedWidth / 2, yPos - GuiConstants.STANDARD_BUTTON_HEIGHT / 2 - mc.fontRenderer.FONT_HEIGHT / 2, 16777215); } @Override public void actionPerformed(GuiButton button) { if (!button.enabled) return; switch (button.id) { case 0: boolean nextState = !horseStats.shouldRenderStats(); horseStats.getSettings().shouldRender = nextState; button.displayString = "Overlay Render: " + (nextState ? "On" : "Off"); horseStats.saveSettings(); break; case 1: horseStats.saveSettings(); Minecraft.getMinecraft().displayGuiScreen(null); break; } } @Override public boolean doesGuiPauseGame() { return false; } @Override public void onGuiClosed() { horseStats.saveSettings(); } private final GuiPageButtonList.GuiResponder decimalPlacesResponder = new GuiPageButtonList.GuiResponder() { @Override public void setEntryValue(int id, boolean value) { } @Override public void setEntryValue(int id, float value) { horseStats.getSettings().decimalPlaces = (int) value; horseStats.updateDecimalPlaces(); } @Override public void setEntryValue(int id, String value) { } }; private final GuiPageButtonList.GuiResponder renderDistanceResponder = new GuiPageButtonList.GuiResponder() { @Override public void setEntryValue(int id, boolean value) { } @Override public void setEntryValue(int id, float value) { horseStats.getSettings().renderDistance = value; } @Override public void setEntryValue(int id, String value) { } }; private final GuiSlider.FormatHelper decimalPlacesFormatter = new GuiSlider.FormatHelper() { @Override public String getText(int id, String name, float value) { return name + ": " + (int) value + " decimals"; } }; private final GuiSlider.FormatHelper renderDistanceFormatter = new GuiSlider.FormatHelper() { @Override public String getText(int id, String name, float value) { return name + ": " + (int) (value) + " blocks"; } }; private static final DecimalFormat format = new DecimalFormat("#.0"); private static final GuiSlider.FormatHelper formatHelper = new GuiSlider.FormatHelper() { @Override public String getText(int id, String name, float value) { return name + ": " + format.format(value); } }; private void layoutThresholdButtons(int xPos, int yPos, Threshold threshold, String name, float min, float max) { GuiSlider greatSlider = new GuiSlider(new ThresholdRunnable(threshold, true), 7, xPos, yPos, name + " Great", min, max, threshold.getGreat(), formatHelper); greatSlider.width = BUTTON_WIDTH; yPos += greatSlider.height + GuiConstants.STANDARD_SEPARATION_DISTANCE; GuiSlider averageSlider = new GuiSlider(new ThresholdRunnable(threshold, false), 8, xPos, yPos, name + " Avg", min, max, threshold.getAverage(), formatHelper); averageSlider.width = BUTTON_WIDTH; buttonList.add(greatSlider); buttonList.add(averageSlider); } private static class ThresholdRunnable implements GuiPageButtonList.GuiResponder { private final Threshold threshold; private final boolean great; public ThresholdRunnable(Threshold threshold, boolean great) { this.threshold = threshold; this.great = great; } @Override public void setEntryValue(int id, boolean value) { } @Override public void setEntryValue(int id, float value) { if (this.great) threshold.setGreat(value); else threshold.setAverage(value); } @Override public void setEntryValue(int id, String value) { } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.ws.security.processor; import java.security.Principal; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.crypto.SecretKey; import org.apache.ws.security.WSConstants; import org.apache.ws.security.WSDataRef; import org.apache.ws.security.WSDerivedKeyTokenPrincipal; import org.apache.ws.security.WSDocInfo; import org.apache.ws.security.WSSecurityEngineResult; import org.apache.ws.security.WSSecurityException; import org.apache.ws.security.components.crypto.AlgorithmSuite; import org.apache.ws.security.components.crypto.AlgorithmSuiteValidator; import org.apache.ws.security.handler.RequestData; import org.apache.ws.security.message.CallbackLookup; import org.apache.ws.security.message.DOMCallbackLookup; import org.apache.ws.security.message.token.SecurityTokenReference; import org.apache.ws.security.str.STRParser; import org.apache.ws.security.str.SecurityTokenRefSTRParser; import org.apache.ws.security.util.WSSecurityUtil; import org.apache.xml.security.encryption.XMLCipher; import org.apache.xml.security.encryption.XMLEncryptionException; import org.w3c.dom.Attr; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; public class ReferenceListProcessor implements Processor { private static org.apache.commons.logging.Log log = org.apache.commons.logging.LogFactory.getLog(ReferenceListProcessor.class); public List<WSSecurityEngineResult> handleToken( Element elem, RequestData data, WSDocInfo wsDocInfo ) throws WSSecurityException { if (log.isDebugEnabled()) { log.debug("Found reference list element"); } List<WSDataRef> dataRefs = handleReferenceList(elem, data, wsDocInfo); WSSecurityEngineResult result = new WSSecurityEngineResult(WSConstants.ENCR, dataRefs); result.put(WSSecurityEngineResult.TAG_ID, elem.getAttributeNS(null, "Id")); wsDocInfo.addTokenElement(elem); wsDocInfo.addResult(result); return java.util.Collections.singletonList(result); } /** * Dereferences and decodes encrypted data elements. * * @param elem contains the <code>ReferenceList</code> to the encrypted * data elements * @param cb the callback handler to get the key for a key name stored if * <code>KeyInfo</code> inside the encrypted data elements */ private List<WSDataRef> handleReferenceList( Element elem, RequestData data, WSDocInfo wsDocInfo ) throws WSSecurityException { List<WSDataRef> dataRefs = new ArrayList<WSDataRef>(); //find out if there's an EncryptedKey in the doc (AsymmetricBinding) Element wsseHeaderElement = wsDocInfo.getSecurityHeader(); boolean asymBinding = WSSecurityUtil.getDirectChildElement( wsseHeaderElement, WSConstants.ENC_KEY_LN, WSConstants.ENC_NS) != null; for (Node node = elem.getFirstChild(); node != null; node = node.getNextSibling() ) { if (Node.ELEMENT_NODE == node.getNodeType() && WSConstants.ENC_NS.equals(node.getNamespaceURI()) && "DataReference".equals(node.getLocalName())) { String dataRefURI = ((Element) node).getAttribute("URI"); if (dataRefURI.charAt(0) == '#') { dataRefURI = dataRefURI.substring(1); } if (wsDocInfo.getResultByTag(WSConstants.ENCR, dataRefURI) == null) { WSDataRef dataRef = decryptDataRefEmbedded( elem.getOwnerDocument(), dataRefURI, data, wsDocInfo, asymBinding); dataRefs.add(dataRef); } } } return dataRefs; } /** * Decrypt an (embedded) EncryptedData element referenced by dataRefURI. */ private WSDataRef decryptDataRefEmbedded( Document doc, String dataRefURI, RequestData data, WSDocInfo wsDocInfo, boolean asymBinding ) throws WSSecurityException { if (log.isDebugEnabled()) { log.debug("Found data reference: " + dataRefURI); } // // Find the encrypted data element referenced by dataRefURI // Element encryptedDataElement = findEncryptedDataElement(doc, wsDocInfo, dataRefURI); if (encryptedDataElement != null && asymBinding && data.isRequireSignedEncryptedDataElements()) { WSSecurityUtil.verifySignedElement(encryptedDataElement, doc, wsDocInfo.getSecurityHeader()); } // // Prepare the SecretKey object to decrypt EncryptedData // String symEncAlgo = X509Util.getEncAlgo(encryptedDataElement); Element keyInfoElement = (Element)WSSecurityUtil.getDirectChildElement( encryptedDataElement, "KeyInfo", WSConstants.SIG_NS ); // KeyInfo cannot be null if (keyInfoElement == null) { throw new WSSecurityException(WSSecurityException.INVALID_SECURITY, "noKeyinfo"); } // Check BSP compliance if (data.getWssConfig().isWsiBSPCompliant()) { checkBSPCompliance(keyInfoElement, symEncAlgo); } // // Try to get a security reference token, if none found try to get a // shared key using a KeyName. // Element secRefToken = WSSecurityUtil.getDirectChildElement( keyInfoElement, "SecurityTokenReference", WSConstants.WSSE_NS ); SecretKey symmetricKey = null; Principal principal = null; if (secRefToken == null) { symmetricKey = X509Util.getSharedKey(keyInfoElement, symEncAlgo, data.getCallbackHandler()); } else { STRParser strParser = new SecurityTokenRefSTRParser(); Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put(SecurityTokenRefSTRParser.SIGNATURE_METHOD, symEncAlgo); strParser.parseSecurityTokenReference( secRefToken, data, wsDocInfo, parameters ); byte[] secretKey = strParser.getSecretKey(); principal = strParser.getPrincipal(); symmetricKey = WSSecurityUtil.prepareSecretKey(symEncAlgo, secretKey); } // Check for compliance against the defined AlgorithmSuite AlgorithmSuite algorithmSuite = data.getAlgorithmSuite(); if (algorithmSuite != null) { AlgorithmSuiteValidator algorithmSuiteValidator = new AlgorithmSuiteValidator(algorithmSuite); if (principal instanceof WSDerivedKeyTokenPrincipal) { algorithmSuiteValidator.checkDerivedKeyAlgorithm( ((WSDerivedKeyTokenPrincipal)principal).getAlgorithm() ); algorithmSuiteValidator.checkEncryptionDerivedKeyLength( ((WSDerivedKeyTokenPrincipal)principal).getLength() ); } algorithmSuiteValidator.checkSymmetricKeyLength(symmetricKey.getEncoded().length); algorithmSuiteValidator.checkSymmetricEncryptionAlgorithm(symEncAlgo); } return decryptEncryptedData( doc, dataRefURI, encryptedDataElement, symmetricKey, symEncAlgo ); } /** * Check for BSP compliance * @param keyInfoElement The KeyInfo element child * @param encAlgo The encryption algorithm * @throws WSSecurityException */ private static void checkBSPCompliance( Element keyInfoElement, String encAlgo ) throws WSSecurityException { // We can only have one token reference int result = 0; Node node = keyInfoElement.getFirstChild(); Element child = null; while (node != null) { if (Node.ELEMENT_NODE == node.getNodeType()) { result++; child = (Element)node; } node = node.getNextSibling(); } if (result != 1) { throw new WSSecurityException( WSSecurityException.INVALID_SECURITY, "invalidDataRef" ); } if (!WSConstants.WSSE_NS.equals(child.getNamespaceURI()) || !SecurityTokenReference.SECURITY_TOKEN_REFERENCE.equals(child.getLocalName())) { throw new WSSecurityException( WSSecurityException.INVALID_SECURITY, "noSecTokRef" ); } // EncryptionAlgorithm cannot be null if (encAlgo == null) { throw new WSSecurityException( WSSecurityException.UNSUPPORTED_ALGORITHM, "noEncAlgo" ); } // EncryptionAlgorithm must be 3DES, or AES128, or AES256 if (!WSConstants.TRIPLE_DES.equals(encAlgo) && !WSConstants.AES_128.equals(encAlgo) && !WSConstants.AES_128_GCM.equals(encAlgo) && !WSConstants.AES_256.equals(encAlgo) && !WSConstants.AES_256_GCM.equals(encAlgo)) { throw new WSSecurityException( WSSecurityException.INVALID_SECURITY, "badEncAlgo", new Object[]{encAlgo} ); } } /** * Look up the encrypted data. First try Id="someURI". If no such Id then try * wsu:Id="someURI". * * @param doc The document in which to find EncryptedData * @param wsDocInfo The WSDocInfo object to use * @param dataRefURI The URI of EncryptedData * @return The EncryptedData element * @throws WSSecurityException if the EncryptedData element referenced by dataRefURI is * not found */ public static Element findEncryptedDataElement( Document doc, WSDocInfo wsDocInfo, String dataRefURI ) throws WSSecurityException { CallbackLookup callbackLookup = wsDocInfo.getCallbackLookup(); if (callbackLookup == null) { callbackLookup = new DOMCallbackLookup(doc); } Element encryptedDataElement = callbackLookup.getElement(dataRefURI, null, true); if (encryptedDataElement == null) { throw new WSSecurityException( WSSecurityException.INVALID_SECURITY, "dataRef", new Object[] {dataRefURI} ); } if (encryptedDataElement.getLocalName().equals(WSConstants.ENCRYPTED_HEADER) && encryptedDataElement.getNamespaceURI().equals(WSConstants.WSSE11_NS)) { Node child = encryptedDataElement.getFirstChild(); while (child != null && child.getNodeType() != Node.ELEMENT_NODE) { child = child.getNextSibling(); } return (Element)child; } return encryptedDataElement; } /** * Decrypt the EncryptedData argument using a SecretKey. * @param doc The (document) owner of EncryptedData * @param dataRefURI The URI of EncryptedData * @param encData The EncryptedData element * @param symmetricKey The SecretKey with which to decrypt EncryptedData * @param symEncAlgo The symmetric encryption algorithm to use * @throws WSSecurityException */ public static WSDataRef decryptEncryptedData( Document doc, String dataRefURI, Element encData, SecretKey symmetricKey, String symEncAlgo ) throws WSSecurityException { XMLCipher xmlCipher = null; try { xmlCipher = XMLCipher.getInstance(symEncAlgo); xmlCipher.setSecureValidation(true); xmlCipher.init(XMLCipher.DECRYPT_MODE, symmetricKey); } catch (XMLEncryptionException ex) { throw new WSSecurityException( WSSecurityException.UNSUPPORTED_ALGORITHM, null, null, ex ); } WSDataRef dataRef = new WSDataRef(); dataRef.setWsuId(dataRefURI); dataRef.setAlgorithm(symEncAlgo); boolean content = X509Util.isContent(encData); dataRef.setContent(content); Node parent = encData.getParentNode(); Node previousSibling = encData.getPreviousSibling(); if (content) { encData = (Element) encData.getParentNode(); parent = encData.getParentNode(); } try { xmlCipher.doFinal(doc, encData, content); } catch (Exception ex) { throw new WSSecurityException(WSSecurityException.FAILED_CHECK, null, null, ex); } if (parent.getLocalName().equals(WSConstants.ENCRYPTED_HEADER) && parent.getNamespaceURI().equals(WSConstants.WSSE11_NS)) { Node decryptedHeader = parent.getFirstChild(); Node soapHeader = parent.getParentNode(); soapHeader.replaceChild(decryptedHeader, parent); dataRef.setProtectedElement((Element)decryptedHeader); dataRef.setXpath(getXPath(decryptedHeader)); } else if (content) { dataRef.setProtectedElement(encData); dataRef.setXpath(getXPath(encData)); } else { Node decryptedNode; if (previousSibling == null) { decryptedNode = parent.getFirstChild(); } else { decryptedNode = previousSibling.getNextSibling(); } if (decryptedNode != null && Node.ELEMENT_NODE == decryptedNode.getNodeType()) { dataRef.setProtectedElement((Element)decryptedNode); } dataRef.setXpath(getXPath(decryptedNode)); } return dataRef; } public String getId() { return null; } /** * @param decryptedNode the decrypted node * @return a fully built xpath * (eg. &quot;/soapenv:Envelope/soapenv:Body/ns:decryptedElement&quot;) * if the decryptedNode is an Element or an Attr node and is not detached * from the document. <code>null</code> otherwise */ public static String getXPath(Node decryptedNode) { if (decryptedNode == null) { return null; } String result = ""; if (Node.ELEMENT_NODE == decryptedNode.getNodeType()) { result = decryptedNode.getNodeName(); result = prependFullPath(result, decryptedNode.getParentNode()); } else if (Node.ATTRIBUTE_NODE == decryptedNode.getNodeType()) { result = "@" + decryptedNode.getNodeName(); result = prependFullPath(result, ((Attr)decryptedNode).getOwnerElement()); } else { return null; } return result; } /** * Recursively build an absolute xpath (starting with the root &quot;/&quot;) * * @param xpath the xpath expression built so far * @param node the current node whose name is to be prepended * @return a fully built xpath */ private static String prependFullPath(String xpath, Node node) { if (node == null) { // probably a detached node... not really useful return null; } else if (Node.ELEMENT_NODE == node.getNodeType()) { xpath = node.getNodeName() + "/" + xpath; return prependFullPath(xpath, node.getParentNode()); } else if (Node.DOCUMENT_NODE == node.getNodeType()) { return "/" + xpath; } else { return prependFullPath(xpath, node.getParentNode()); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.usecases; import static org.junit.Assert.assertTrue; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Enumeration; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import javax.jms.Connection; import javax.jms.Destination; import javax.jms.JMSException; import javax.jms.Message; import javax.jms.MessageConsumer; import javax.jms.MessageProducer; import javax.jms.Session; import javax.jms.TextMessage; import org.apache.activemq.ActiveMQConnection; import org.apache.activemq.ActiveMQSession; import org.apache.activemq.broker.BrokerService; import org.apache.activemq.command.ActiveMQDestination; import org.apache.activemq.network.DiscoveryNetworkConnector; import org.apache.activemq.network.NetworkConnector; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.junit.Test; public class RequestReplyToTopicViaThreeNetworkHopsTest { protected static final int CONCURRENT_CLIENT_COUNT = 5; protected static final int CONCURRENT_SERVER_COUNT = 5; protected static final int TOTAL_CLIENT_ITER = 10; protected static int Next_broker_num = 0; protected EmbeddedTcpBroker edge1; protected EmbeddedTcpBroker edge2; protected EmbeddedTcpBroker core1; protected EmbeddedTcpBroker core2; protected boolean testError = false; protected boolean fatalTestError = false; protected int echoResponseFill = 0; // Number of "filler" response messages per request protected static Log LOG; public boolean duplex = true; static { LOG = LogFactory.getLog(RequestReplyToTopicViaThreeNetworkHopsTest.class); } public RequestReplyToTopicViaThreeNetworkHopsTest() throws Exception { edge1 = new EmbeddedTcpBroker("edge", 1); edge2 = new EmbeddedTcpBroker("edge", 2); core1 = new EmbeddedTcpBroker("core", 1); core2 = new EmbeddedTcpBroker("core", 2); // duplex is necessary to serialise sends with consumer/destination creation edge1.coreConnectTo(core1, duplex); edge2.coreConnectTo(core2, duplex); core1.coreConnectTo(core2, duplex); } public void logMessage(String msg) { System.out.println(msg); System.out.flush(); } public void testMessages(Session sess, MessageProducer req_prod, Destination resp_dest, int num_msg) throws Exception { MessageConsumer resp_cons; TextMessage msg; MessageClient cons_client; int cur; int tot_expected; resp_cons = sess.createConsumer(resp_dest); cons_client = new MessageClient(resp_cons, num_msg); cons_client.start(); cur = 0; while ((cur < num_msg) && (!fatalTestError)) { msg = sess.createTextMessage("MSG AAAA " + cur); msg.setIntProperty("SEQ", 100 + cur); msg.setStringProperty("TEST", "TOPO"); msg.setJMSReplyTo(resp_dest); if (cur == (num_msg - 1)) msg.setBooleanProperty("end-of-response", true); sendWithRetryOnDeletedDest(req_prod, msg); LOG.debug("Sent:" + msg); cur++; } // // Give the consumer some time to receive the response. // cons_client.waitShutdown(5000); // // Now shutdown the consumer if it's still running. // if (cons_client.shutdown()) LOG.debug("Consumer client shutdown complete"); else LOG.debug("Consumer client shutdown incomplete!!!"); // // Check that the correct number of messages was received. // tot_expected = num_msg * (echoResponseFill + 1); if (cons_client.getNumMsgReceived() == tot_expected) { LOG.debug("Have " + tot_expected + " messages, as-expected"); } else { testError = true; if (cons_client.getNumMsgReceived() == 0) fatalTestError = true; LOG.error("Have " + cons_client.getNumMsgReceived() + " messages; expected " + tot_expected + " on destination " + resp_dest); } resp_cons.close(); } protected void sendWithRetryOnDeletedDest(MessageProducer prod, Message msg) throws JMSException { try { if (LOG.isDebugEnabled()) LOG.debug("SENDING REQUEST message " + msg); prod.send(msg); } catch (JMSException jms_exc) { System.out.println("AAA: " + jms_exc.getMessage()); throw jms_exc; } } /** * Test one destination between the given "producer broker" and "consumer broker" specified. */ public void testOneDest(Connection conn, Session sess, Destination cons_dest, int num_msg) throws Exception { Destination prod_dest; MessageProducer msg_prod; // // Create the Producer to the echo request Queue // LOG.trace("Creating echo queue and producer"); prod_dest = sess.createQueue("echo"); msg_prod = sess.createProducer(prod_dest); // // Pass messages around. // testMessages(sess, msg_prod, cons_dest, num_msg); msg_prod.close(); } /** * TEST TEMPORARY TOPICS */ public void testTempTopic(String prod_broker_url, String cons_broker_url) throws Exception { Connection conn; Session sess; Destination cons_dest; int num_msg; num_msg = 5; LOG.debug("TESTING TEMP TOPICS " + prod_broker_url + " -> " + cons_broker_url + " (" + num_msg + " messages)"); // // Connect to the bus. // conn = createConnection(cons_broker_url); conn.start(); sess = conn.createSession(false, Session.AUTO_ACKNOWLEDGE); // // Create the destination on which messages are being tested. // LOG.trace("Creating destination"); cons_dest = sess.createTemporaryTopic(); testOneDest(conn, sess, cons_dest, num_msg); // // Cleanup // sess.close(); conn.close(); } /** * TEST TOPICS */ public void testTopic(String prod_broker_url, String cons_broker_url) throws Exception { int num_msg; Connection conn; Session sess; String topic_name; Destination cons_dest; num_msg = 5; LOG.info("TESTING TOPICS " + prod_broker_url + " -> " + cons_broker_url + " (" + num_msg + " messages)"); conn = createConnection(cons_broker_url); conn.start(); sess = conn.createSession(false, Session.AUTO_ACKNOWLEDGE); // // Create the destination on which messages are being tested. // topic_name = "topotest2.perm.topic"; LOG.trace("Removing existing Topic"); removeTopic(conn, topic_name); LOG.trace("Creating Topic, " + topic_name); cons_dest = sess.createTopic(topic_name); testOneDest(conn, sess, cons_dest, num_msg); // // Cleanup // removeTopic(conn, topic_name); sess.close(); conn.close(); } /** * TEST TEMPORARY QUEUES */ public void testTempQueue(String prod_broker_url, String cons_broker_url) throws Exception { int num_msg; Connection conn; Session sess; Destination cons_dest; num_msg = 5; LOG.info("TESTING TEMP QUEUES " + prod_broker_url + " -> " + cons_broker_url + " (" + num_msg + " messages)"); // // Connect to the bus. // conn = createConnection(cons_broker_url); conn.start(); sess = conn.createSession(false, Session.AUTO_ACKNOWLEDGE); // // Create the destination on which messages are being tested. // LOG.trace("Creating destination"); cons_dest = sess.createTemporaryQueue(); testOneDest(conn, sess, cons_dest, num_msg); // // Cleanup // sess.close(); conn.close(); } /** * TEST QUEUES */ public void testQueue(String prod_broker_url, String cons_broker_url) throws Exception { int num_msg; Connection conn; Session sess; String queue_name; Destination cons_dest; num_msg = 5; LOG.info("TESTING QUEUES " + prod_broker_url + " -> " + cons_broker_url + " (" + num_msg + " messages)"); conn = createConnection(cons_broker_url); conn.start(); sess = conn.createSession(false, Session.AUTO_ACKNOWLEDGE); // // Create the destination on which messages are being tested. // queue_name = "topotest2.perm.queue"; LOG.trace("Removing existing Queue"); removeQueue(conn, queue_name); LOG.trace("Creating Queue, " + queue_name); cons_dest = sess.createQueue(queue_name); testOneDest(conn, sess, cons_dest, num_msg); removeQueue(conn, queue_name); sess.close(); conn.close(); } @Test public void runWithTempTopicReplyTo() throws Exception { EchoService echo_svc; TopicTrafficGenerator traffic_gen; Thread start1; Thread start2; Thread start3; Thread start4; ThreadPoolExecutor clientExecPool; final CountDownLatch clientCompletionLatch; int iter; fatalTestError = false; testError = false; // // Execute up to 20 clients at a time to simulate that load. // clientExecPool = new ThreadPoolExecutor(CONCURRENT_CLIENT_COUNT, CONCURRENT_CLIENT_COUNT, 0, TimeUnit.SECONDS, new ArrayBlockingQueue<Runnable>(10000)); clientCompletionLatch = new CountDownLatch(TOTAL_CLIENT_ITER); // Use threads to avoid startup deadlock since the first broker started waits until // it knows the name of the remote broker before finishing its startup, which means // the remote must already be running. start1 = new Thread() { @Override public void run() { try { edge1.start(); } catch (Exception ex) { LOG.error(null, ex); } } }; start2 = new Thread() { @Override public void run() { try { edge2.start(); } catch (Exception ex) { LOG.error(null, ex); } } }; start3 = new Thread() { @Override public void run() { try { core1.start(); } catch (Exception ex) { LOG.error(null, ex); } } }; start4 = new Thread() { @Override public void run() { try { core2.start(); } catch (Exception ex) { LOG.error(null, ex); } } }; start1.start(); start2.start(); start3.start(); start4.start(); start1.join(); start2.join(); start3.join(); start4.join(); traffic_gen = new TopicTrafficGenerator(edge1.getConnectionUrl(), edge2.getConnectionUrl()); traffic_gen.start(); // // Now start the echo service with that queue. // echo_svc = new EchoService("echo", edge1.getConnectionUrl()); echo_svc.start(); // // Run the tests on Temp Topics. // LOG.info("** STARTING TEMP TOPIC TESTS"); iter = 0; while ((iter < TOTAL_CLIENT_ITER) && (!fatalTestError)) { clientExecPool.execute(new Runnable() { @Override public void run() { try { RequestReplyToTopicViaThreeNetworkHopsTest.this.testTempTopic(edge1.getConnectionUrl(), edge2.getConnectionUrl()); } catch (Exception exc) { LOG.error("test exception", exc); fatalTestError = true; testError = true; } clientCompletionLatch.countDown(); } }); iter++; } boolean allDoneOnTime = clientCompletionLatch.await(20, TimeUnit.MINUTES); LOG.info("** FINISHED TEMP TOPIC TESTS AFTER " + iter + " ITERATIONS, testError:" + testError + ", fatal: " + fatalTestError + ", onTime:" + allDoneOnTime); Thread.sleep(100); echo_svc.shutdown(); traffic_gen.shutdown(); shutdown(); assertTrue("test completed in time", allDoneOnTime); assertTrue("no errors", !testError); } public void shutdown() throws Exception { edge1.stop(); edge2.stop(); core1.stop(); core2.stop(); } protected Connection createConnection(String url) throws Exception { return org.apache.activemq.ActiveMQConnection.makeConnection(url); } protected static void removeQueue(Connection conn, String dest_name) throws java.lang.Exception { org.apache.activemq.command.ActiveMQDestination dest; if (conn instanceof org.apache.activemq.ActiveMQConnection) { dest = org.apache.activemq.command.ActiveMQDestination.createDestination(dest_name, org.apache.activemq.command.ActiveMQDestination.QUEUE_TYPE); ((org.apache.activemq.ActiveMQConnection) conn).destroyDestination(dest); } } protected static void removeTopic(Connection conn, String dest_name) throws java.lang.Exception { org.apache.activemq.command.ActiveMQDestination dest; if (conn instanceof org.apache.activemq.ActiveMQConnection) { dest = org.apache.activemq.command.ActiveMQDestination.createDestination(dest_name, org.apache.activemq.command.ActiveMQDestination.TOPIC_TYPE); ((org.apache.activemq.ActiveMQConnection) conn).destroyDestination(dest); } } public static String fmtMsgInfo(Message msg) throws Exception { StringBuilder msg_desc; String prop; Enumeration<?> prop_enum; msg_desc = new StringBuilder(); msg_desc = new StringBuilder(); if (msg instanceof TextMessage) { msg_desc.append(((TextMessage) msg).getText()); } else { msg_desc.append("["); msg_desc.append(msg.getClass().getName()); msg_desc.append("]"); } prop_enum = msg.getPropertyNames(); while (prop_enum.hasMoreElements()) { prop = (String) prop_enum.nextElement(); msg_desc.append("; "); msg_desc.append(prop); msg_desc.append("="); msg_desc.append(msg.getStringProperty(prop)); } return msg_desc.toString(); } protected class EmbeddedTcpBroker { protected BrokerService brokerSvc; protected int brokerNum; protected String brokerName; protected String brokerId; protected int port; protected String tcpUrl; protected String fullUrl; public EmbeddedTcpBroker(String name, int number) throws Exception { brokerSvc = new BrokerService(); synchronized (this.getClass()) { brokerNum = Next_broker_num; Next_broker_num++; } brokerName = name + number; brokerId = brokerName; brokerSvc.setBrokerName(brokerName); brokerSvc.setBrokerId(brokerId); brokerSvc.setPersistent(false); brokerSvc.setUseJmx(false); port = 60000 + (brokerNum * 10); tcpUrl = "tcp://127.0.0.1:" + Integer.toString(port); fullUrl = tcpUrl + "?jms.watchTopicAdvisories=false"; brokerSvc.addConnector(tcpUrl); } public Connection createConnection() throws URISyntaxException, JMSException { Connection result; result = org.apache.activemq.ActiveMQConnection.makeConnection(this.fullUrl); return result; } public String getConnectionUrl() { return this.fullUrl; } public void coreConnectTo(EmbeddedTcpBroker other, boolean duplex_f) throws Exception { this.makeConnectionTo(other, duplex_f, true); this.makeConnectionTo(other, duplex_f, false); if (!duplex_f) { other.makeConnectionTo(this, duplex_f, true); other.makeConnectionTo(this, duplex_f, false); } } public void start() throws Exception { brokerSvc.start(); brokerSvc.waitUntilStarted(); } public void stop() throws Exception { brokerSvc.stop(); } protected void makeConnectionTo(EmbeddedTcpBroker other, boolean duplex_f, boolean queue_f) throws Exception { NetworkConnector nw_conn; String prefix; ActiveMQDestination excl_dest; ArrayList<ActiveMQDestination> excludes; nw_conn = new DiscoveryNetworkConnector(new URI("static:(" + other.tcpUrl + ")")); nw_conn.setDuplex(duplex_f); if (queue_f) nw_conn.setConduitSubscriptions(false); else nw_conn.setConduitSubscriptions(true); nw_conn.setNetworkTTL(3); nw_conn.setSuppressDuplicateQueueSubscriptions(true); nw_conn.setDecreaseNetworkConsumerPriority(true); nw_conn.setBridgeTempDestinations(queue_f); if (queue_f) { prefix = "queue"; excl_dest = ActiveMQDestination.createDestination(">", ActiveMQDestination.TOPIC_TYPE); } else { prefix = "topic"; excl_dest = ActiveMQDestination.createDestination(">", ActiveMQDestination.QUEUE_TYPE); } excludes = new ArrayList<ActiveMQDestination>(); excludes.add(excl_dest); nw_conn.setExcludedDestinations(excludes); if (duplex_f) nw_conn.setName(this.brokerId + "<-" + prefix + "->" + other.brokerId); else nw_conn.setName(this.brokerId + "-" + prefix + "->" + other.brokerId); brokerSvc.addNetworkConnector(nw_conn); } } protected class MessageClient extends java.lang.Thread { protected MessageConsumer msgCons; protected boolean shutdownInd; protected int expectedCount; protected int lastSeq = 0; protected int msgCount = 0; protected boolean haveFirstSeq; protected CountDownLatch shutdownLatch; public MessageClient(MessageConsumer cons, int num_to_expect) { msgCons = cons; expectedCount = (num_to_expect * (echoResponseFill + 1)); shutdownLatch = new CountDownLatch(1); } @Override public void run() { CountDownLatch latch; try { synchronized (this) { latch = shutdownLatch; } shutdownInd = false; processMessages(); latch.countDown(); } catch (Exception exc) { LOG.error("message client error", exc); } } public void waitShutdown(long timeout) { CountDownLatch latch; try { synchronized (this) { latch = shutdownLatch; } if (latch != null) latch.await(timeout, TimeUnit.MILLISECONDS); else LOG.info("echo client shutdown: client does not appear to be active"); } catch (InterruptedException int_exc) { LOG.warn("wait for message client shutdown interrupted", int_exc); } } public boolean shutdown() { boolean down_ind; if (!shutdownInd) { shutdownInd = true; } waitShutdown(200); synchronized (this) { if ((shutdownLatch == null) || (shutdownLatch.getCount() == 0)) down_ind = true; else down_ind = false; } return down_ind; } public int getNumMsgReceived() { return msgCount; } protected void processMessages() throws Exception { Message in_msg; haveFirstSeq = false; // // Stop at shutdown time or after any test error is detected. // while ((!shutdownInd) && (!fatalTestError)) { in_msg = msgCons.receive(100); if (in_msg != null) { msgCount++; checkMessage(in_msg); } } msgCons.close(); } protected void checkMessage(Message in_msg) throws Exception { int seq; LOG.debug("received message " + fmtMsgInfo(in_msg) + " from " + in_msg.getJMSDestination()); // // Only check messages with a sequence number. // if (in_msg.propertyExists("SEQ")) { seq = in_msg.getIntProperty("SEQ"); if ((haveFirstSeq) && (seq != (lastSeq + 1))) { LOG.error("***ERROR*** incorrect sequence number; expected " + Integer.toString(lastSeq + 1) + " but have " + Integer.toString(seq)); testError = true; } lastSeq = seq; if (msgCount > expectedCount) { LOG.error("*** have more messages than expected; have " + msgCount + "; expect " + expectedCount); testError = true; } } if (in_msg.propertyExists("end-of-response")) { LOG.trace("received end-of-response message"); } } } /** * */ protected class EchoService extends java.lang.Thread { protected String destName; protected Connection jmsConn; protected Session sess; protected MessageConsumer msg_cons; protected boolean Shutdown_ind; protected Destination req_dest; protected CountDownLatch waitShutdown; protected ThreadPoolExecutor processorPool; public EchoService(String dest, Connection broker_conn) throws Exception { destName = dest; jmsConn = broker_conn; Shutdown_ind = false; sess = jmsConn.createSession(false, Session.AUTO_ACKNOWLEDGE); req_dest = sess.createQueue(destName); msg_cons = sess.createConsumer(req_dest); jmsConn.start(); waitShutdown = new CountDownLatch(1); processorPool = new ThreadPoolExecutor(CONCURRENT_SERVER_COUNT, CONCURRENT_SERVER_COUNT, 0, TimeUnit.SECONDS, new ArrayBlockingQueue<Runnable>( 10000)); } public EchoService(String dest, String broker_url) throws Exception { this(dest, ActiveMQConnection.makeConnection(broker_url)); } @Override public void run() { Message req; try { LOG.info("STARTING ECHO SERVICE"); while (!Shutdown_ind) { req = msg_cons.receive(100); if (req != null) { processorPool.execute(new EchoRequestProcessor(sess, req)); } } } catch (Exception ex) { LOG.error("error processing echo service requests", ex); } finally { LOG.info("shutting down test echo service"); try { jmsConn.stop(); } catch (javax.jms.JMSException jms_exc) { LOG.warn("error on shutting down JMS connection", jms_exc); } synchronized (this) { waitShutdown.countDown(); } } } /** * Shut down the service, waiting up to 3 seconds for the service to terminate. */ public void shutdown() { CountDownLatch wait_l; synchronized (this) { wait_l = waitShutdown; } Shutdown_ind = true; try { if (wait_l != null) { if (wait_l.await(3000, TimeUnit.MILLISECONDS)) LOG.info("echo service shutdown complete"); else LOG.warn("timeout waiting for echo service shutdown"); } else { LOG.info("echo service shutdown: service does not appear to be active"); } } catch (InterruptedException int_exc) { LOG.warn("interrupted while waiting for echo service shutdown"); } } } /** * */ protected class EchoRequestProcessor implements Runnable { protected Session session; protected Destination resp_dest; protected MessageProducer msg_prod; protected Message request; public EchoRequestProcessor(Session sess, Message req) throws Exception { this.session = sess; this.request = req; this.resp_dest = req.getJMSReplyTo(); if (resp_dest == null) { throw new Exception("invalid request: no reply-to destination given"); } this.msg_prod = session.createProducer(this.resp_dest); } @Override public void run() { try { this.processRequest(this.request); } catch (Exception ex) { LOG.error("Failed to process request", ex); } } /** * Process one request for the Echo Service. */ protected void processRequest(Message req) throws Exception { if (LOG.isDebugEnabled()) LOG.debug("ECHO request message " + req.toString()); resp_dest = req.getJMSReplyTo(); if (resp_dest != null) { msg_prod = session.createProducer(resp_dest); LOG.debug("SENDING ECHO RESPONSE to:" + resp_dest); msg_prod.send(req); LOG.debug((((ActiveMQSession) session).getConnection()).getBrokerName() + " SENT ECHO RESPONSE to " + resp_dest); msg_prod.close(); msg_prod = null; } else { LOG.warn("invalid request: no reply-to destination given"); } } } protected class TopicTrafficGenerator extends java.lang.Thread { protected Connection conn1; protected Connection conn2; protected Session sess1; protected Session sess2; protected Destination dest; protected MessageProducer prod; protected MessageConsumer cons; protected boolean Shutdown_ind; protected int send_count; public TopicTrafficGenerator(String url1, String url2) throws Exception { conn1 = createConnection(url1); conn2 = createConnection(url2); sess1 = conn1.createSession(false, Session.AUTO_ACKNOWLEDGE); sess2 = conn2.createSession(false, Session.AUTO_ACKNOWLEDGE); conn1.start(); conn2.start(); dest = sess1.createTopic("traffic"); prod = sess1.createProducer(dest); dest = sess2.createTopic("traffic"); cons = sess2.createConsumer(dest); } public void shutdown() { Shutdown_ind = true; } @Override public void run() { Message msg; try { LOG.info("Starting Topic Traffic Generator"); while (!Shutdown_ind) { msg = sess1.createTextMessage("TRAFFIC"); prod.send(msg); send_count++; // // Time out the receipt; early messages may not make it. // msg = cons.receive(250); } } catch (JMSException jms_exc) { LOG.warn("traffic generator failed on jms exception", jms_exc); } finally { LOG.info("Shutdown of Topic Traffic Generator; send count = " + send_count); if (conn1 != null) { try { conn1.stop(); } catch (JMSException jms_exc) { LOG.warn("failed to shutdown connection", jms_exc); } } if (conn2 != null) { try { conn2.stop(); } catch (JMSException jms_exc) { LOG.warn("failed to shutdown connection", jms_exc); } } } } } }
/* * Copyright 2008 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.javascript.jscomp.CodingConvention.SubclassRelationship; import com.google.javascript.jscomp.ReferenceCollectingCallback.Reference; import com.google.javascript.jscomp.ReferenceCollectingCallback.ReferenceCollection; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import java.util.ArrayDeque; import java.util.Deque; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import java.util.logging.Logger; /** * A {@link Compiler} pass for moving code to a deeper module if possible. * - currently it only moves functions + variables * */ class CrossModuleCodeMotion implements CompilerPass { private static final Logger logger = Logger.getLogger(CrossModuleCodeMotion.class.getName()); private final AbstractCompiler compiler; private final JSModuleGraph graph; /** * Map from module to the node in that module that should parent any string * variable declarations that have to be moved into that module */ private final Map<JSModule, Node> moduleVarParentMap = new HashMap<>(); /* * NOTE - I made this a LinkedHashMap to make testing easier. With a regular * HashMap, the variables may not output in a consistent order */ private final Map<Var, NamedInfo> namedInfo = new LinkedHashMap<>(); private final Map<Node, InstanceofInfo> instanceofNodes = new LinkedHashMap<>(); private final boolean parentModuleCanSeeSymbolsDeclaredInChildren; /** * Creates an instance. * * @param compiler The compiler */ CrossModuleCodeMotion( AbstractCompiler compiler, JSModuleGraph graph, boolean parentModuleCanSeeSymbolsDeclaredInChildren) { this.compiler = compiler; this.graph = graph; this.parentModuleCanSeeSymbolsDeclaredInChildren = parentModuleCanSeeSymbolsDeclaredInChildren; } @Override public void process(Node externs, Node root) { logger.fine("Moving functions + variable into deeper modules"); // If there are <2 modules, then we will never move anything, so we're done if (graph != null && graph.getModuleCount() > 1) { // Traverse the tree and find the modules where a var is declared + used collectReferences(root); // Make is so we can ignore constructor references in instanceof. if (parentModuleCanSeeSymbolsDeclaredInChildren) { makeInstanceOfCodeOrderIndependent(); } // Move the functions + variables to a deeper module [if possible] moveCode(); } } /** move the code accordingly */ private void moveCode() { for (NamedInfo info : namedInfo.values()) { JSModule deepestDependency = info.deepestModule; // Only move if all are true: // a) allowMove is true // b) it was used + declared somewhere [if not, then it will be removed // as dead or invalid code elsewhere] // c) the new dependency depends on the declModule if (info.allowMove && deepestDependency != null) { Iterator<Declaration> it = info.declarationIterator(); JSModuleGraph moduleGraph = compiler.getModuleGraph(); while (it.hasNext()) { Declaration decl = it.next(); if (decl.module != null && moduleGraph.dependsOn(deepestDependency, decl.module)) { // Find the appropriate spot to move it to Node destParent = moduleVarParentMap.get(deepestDependency); if (destParent == null) { destParent = compiler.getNodeForCodeInsertion(deepestDependency); moduleVarParentMap.put(deepestDependency, destParent); } // VAR Nodes are normalized to have only one child. Node declParent = decl.node.getParent(); Preconditions.checkState( !declParent.isVar() || declParent.hasOneChild(), "AST not normalized."); // Remove it declParent.detach(); // Add it to the new spot destParent.addChildToFront(declParent); compiler.reportCodeChange(); } } } } } /** useful information for each variable candidate */ private class NamedInfo { boolean allowMove = true; // The deepest module where the variable is used. Starts at null private JSModule deepestModule = null; // The module where declarations appear private JSModule declModule = null; // information on the spot where the item was declared private final Deque<Declaration> declarations = new ArrayDeque<>(); // Add a Module where it is used void addUsedModule(JSModule m) { // If we are not allowed to move it, all bets are off if (!allowMove) { return; } // If we have no deepest module yet, set this one if (deepestModule == null) { deepestModule = m; } else { // Find the deepest common dependency deepestModule = graph.getDeepestCommonDependencyInclusive(m, deepestModule); } } boolean isUsedInOrDependencyOfModule(JSModule m) { if (deepestModule == null || m == null) { return false; } return m == deepestModule || graph.dependsOn(m, deepestModule); } /** * Add a declaration for this name. * @return Whether this is a valid declaration. If this returns false, * this should be added as a reference. */ boolean addDeclaration(Declaration d) { // all declarations must appear in the same module. if (declModule != null && d.module != declModule) { return false; } declarations.push(d); declModule = d.module; return true; } /** * Returns an iterator over the declarations, in the order that they were * declared. */ Iterator<Declaration> declarationIterator() { return declarations.iterator(); } } private static class Declaration { final JSModule module; final Node node; Declaration(JSModule module, Node node) { this.module = module; this.node = node; } } /** * return true if the node has any form of conditional in its ancestry * TODO(nicksantos) keep track of the conditionals in the ancestry, so * that we don't have to recrawl it. */ private static boolean hasConditionalAncestor(Node n) { for (Node ancestor : n.getAncestors()) { switch (ancestor.getToken()) { case DO: case FOR: case FOR_IN: case HOOK: case IF: case SWITCH: case WHILE: case FUNCTION: return true; default: break; } } return false; } /** * get the information on a variable */ private NamedInfo getNamedInfo(Var v) { NamedInfo info = namedInfo.get(v); if (info == null) { info = new NamedInfo(); namedInfo.put(v, info); } return info; } /** * Process the reads to named variables */ private void processRead(Reference ref, NamedInfo info) { // A name is recursively defined if: // 1: It is calling itself. // 2: One of its property calls itself. // Recursive definition should not block movement. String name = ref.getNode().getString(); boolean recursive = false; Scope hoistTarget = ref.getScope().getClosestHoistScope(); if (hoistTarget.isFunctionBlockScope()) { Node rootNode = hoistTarget.getRootNode().getParent(); // CASE #1: String scopeFuncName = rootNode.getFirstChild().getString(); Node scopeFuncParent = rootNode.getParent(); if (scopeFuncName.equals(name)) { recursive = true; } else if (scopeFuncParent.isName() && scopeFuncParent.getString().equals(name)) { recursive = true; } else { // CASE #2: // Suppose name is Foo, we keep look up the scope stack to look for // a scope with "Foo.prototype.bar = function() { ..... " for (Scope s = ref.getScope(); s.getParent() != null; s = s.getParent()) { Node curRoot = s.getRootNode(); if (curRoot.getParent().isAssign()) { Node owner = curRoot.getParent().getFirstChild(); while (owner.isGetProp()) { owner = owner.getFirstChild(); } if (owner.isName() && owner.getString().equals(name)) { recursive = true; break; } } } } } if (!recursive) { info.addUsedModule(getModule(ref)); } } private void collectReferences(Node root) { ReferenceCollectingCallback collector = new ReferenceCollectingCallback( compiler, ReferenceCollectingCallback.DO_NOTHING_BEHAVIOR, new Es6SyntacticScopeCreator(compiler), new Predicate<Var>() { @Override public boolean apply(Var var) { // Only collect global and non-exported names. return var.isGlobal() && !compiler.getCodingConvention().isExported(var.getName()); } }); collector.process(root); for (Var v : collector.getAllSymbols()) { NamedInfo info = getNamedInfo(v); if (!info.allowMove) { continue; } ReferenceCollection refCollection = collector.getReferences(v); for (Reference ref : refCollection) { processReference(collector, ref, info, v); } } } private void processReference( ReferenceCollectingCallback collector, Reference ref, NamedInfo info, Var v) { Node n = ref.getNode(); if (isRecursiveDeclaration(v, n)) { return; } Node parent = n.getParent(); if (maybeProcessDeclaration(collector, ref, info)) { // Check to see if the declaration is conditional starting at the // grandparent of the name node. Since a function declaration // is considered conditional (the function might not be called) // we would need to skip the parent in this check as the name could // just be a function itself. if (hasConditionalAncestor(parent.getParent())) { info.allowMove = false; } } else { if (parentModuleCanSeeSymbolsDeclaredInChildren && parent.isInstanceOf() && parent.getLastChild() == n) { instanceofNodes.put(parent, new InstanceofInfo(getModule(ref), info)); } else { // Otherwise, it's a read processRead(ref, info); } } } /** * @param variable a variable which may be movable * @param referenceNode a node which is a reference to 'variable' * @return whether the reference to the variable is a recursive declaration * e.g. function foo() { foo = function() {}; } */ private boolean isRecursiveDeclaration(Var variable, Node referenceNode) { if (!referenceNode.getParent().isAssign()) { return false; } Node enclosingFunction = NodeUtil.getEnclosingFunction(referenceNode); return enclosingFunction != null && variable.getName().equals(NodeUtil.getNearestFunctionName(enclosingFunction)); } private JSModule getModule(Reference ref) { return compiler.getInput(ref.getInputId()).getModule(); } /** * Determines whether the given NAME node belongs to a declaration that * can be moved across modules. If it is, registers it properly. * * There are four types of movable declarations: * 1) var NAME = [movable object]; * 2) function NAME() {} * 3) NAME = [movable object]; * NAME.prop = [movable object]; * NAME.prop.prop2 = [movable object]; * etc. * 4) Class-defining function calls, like "inherits" and "mixin". * NAME.inherits([some other name]); * where "movable object" is a literal or a function. */ private boolean maybeProcessDeclaration( ReferenceCollectingCallback collector, Reference ref, NamedInfo info) { Node name = ref.getNode(); Node parent = name.getParent(); Node grandparent = parent.getParent(); switch (parent.getToken()) { case VAR: if (canMoveValue(collector, ref.getScope(), name.getFirstChild())) { return info.addDeclaration( new Declaration(getModule(ref), name)); } return false; case FUNCTION: if (NodeUtil.isFunctionDeclaration(parent)) { return info.addDeclaration( new Declaration(getModule(ref), name)); } return false; case ASSIGN: case GETPROP: Node child = name; // Look for assignment expressions where the name is the root // of a qualified name on the left hand side of the assignment. for (Node current : name.getAncestors()) { if (current.isGetProp()) { // fallthrough } else if (current.isAssign() && current.getFirstChild() == child) { Node currentParent = current.getParent(); if (currentParent.isExprResult() && canMoveValue( collector, ref.getScope(), current.getLastChild())) { return info.addDeclaration( new Declaration(getModule(ref), current)); } } else { return false; } child = current; } return false; case CALL: if (NodeUtil.isExprCall(grandparent)) { SubclassRelationship relationship = compiler.getCodingConvention().getClassesDefinedByCall(parent); if (relationship != null && name.getString().equals(relationship.subclassName)) { return info.addDeclaration( new Declaration(getModule(ref), parent)); } } return false; default: return false; } } /** * Determines whether the given value is eligible to be moved across modules. */ private static boolean canMoveValue( ReferenceCollectingCallback collector, Scope scope, Node n) { // the value is only movable if it's // a) nothing, // b) a constant literal, // c) a function, or // d) an array/object literal of movable values. // e) a function stub generated by CrossModuleMethodMotion. if (n == null || NodeUtil.isLiteralValue(n, true) || n.isFunction()) { return true; } else if (n.isCall()) { Node functionName = n.getFirstChild(); return functionName.isName() && (functionName.getString().equals( CrossModuleMethodMotion.STUB_METHOD_NAME) || functionName.getString().equals( CrossModuleMethodMotion.UNSTUB_METHOD_NAME)); } else if (n.isArrayLit() || n.isObjectLit()) { boolean isObjectLit = n.isObjectLit(); for (Node child = n.getFirstChild(); child != null; child = child.getNext()) { if (!canMoveValue(collector, scope, isObjectLit ? child.getFirstChild() : child)) { return false; } } return true; } else if (n.isName()) { // If the value is guaranteed to never be changed after // this reference, then we can move it. Var v = scope.getVar(n.getString()); if (v != null && v.isGlobal()) { ReferenceCollection refCollection = collector.getReferences(v); if (refCollection != null && refCollection.isWellDefined() && refCollection.isAssignedOnceInLifetime()) { return true; } } } return false; } /** * Transforms instanceof usages into an expression that short circuits to * false if tested with a constructor that is undefined. This allows ignoring * instanceof with respect to cross module code motion. */ private void makeInstanceOfCodeOrderIndependent() { Node tmp = IR.block(); for (Map.Entry<Node, InstanceofInfo> entry : instanceofNodes.entrySet()) { Node n = entry.getKey(); InstanceofInfo info = entry.getValue(); if (!info.namedInfo.allowMove || !info.mustBeGuardedByTypeof()) { continue; } // In order for the compiler pass to be idempotent, this checks whether // the instanceof is already wrapped in the code that is generated below. Node parent = n.getParent(); if (parent.isAnd() && parent.getLastChild() == n && parent.getFirstChild().isNE()) { Node ne = parent.getFirstChild(); if (ne.getFirstChild().isString() && "undefined".equals(ne.getFirstChild().getString()) && ne.getLastChild().isTypeOf()) { Node ref = ne.getLastChild().getFirstChild(); if (ref.isEquivalentTo(n.getLastChild())) { continue; } } } // Wrap "foo instanceof Bar" in // "('undefined' != typeof Bar && foo instanceof Bar)" Node reference = n.getLastChild().cloneNode(); Preconditions.checkState(reference.isName()); n.replaceWith(tmp); Node and = IR.and( new Node(Token.NE, IR.string("undefined"), new Node(Token.TYPEOF, reference) ), n ); and.useSourceInfoIfMissingFromForTree(n); tmp.replaceWith(and); compiler.reportCodeChange(); } } private static class InstanceofInfo { private final JSModule module; private final NamedInfo namedInfo; InstanceofInfo(JSModule module, NamedInfo namedInfo) { this.module = module; this.namedInfo = namedInfo; } /** * Returns true if this instance of instanceof is in a deeper module than * the deepest module (by reference) of the related name. * In that case the name may be undefined when the instanceof runs and we * have to guard it with typeof. */ boolean mustBeGuardedByTypeof() { return !this.namedInfo.isUsedInOrDependencyOfModule(this.module); } } }
/** * Apache License * Version 2.0, January 2004 * http://www.apache.org/licenses/ * * TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION * * 1. Definitions. * * "License" shall mean the terms and conditions for use, reproduction, * and distribution as defined by Sections 1 through 9 of this document. * * "Licensor" shall mean the copyright owner or entity authorized by * the copyright owner that is granting the License. * * "Legal Entity" shall mean the union of the acting entity and all * other entities that control, are controlled by, or are under common * control with that entity. For the purposes of this definition, * "control" means (i) the power, direct or indirect, to cause the * direction or management of such entity, whether by contract or * otherwise, or (ii) ownership of fifty percent (50%) or more of the * outstanding shares, or (iii) beneficial ownership of such entity. * * "You" (or "Your") shall mean an individual or Legal Entity * exercising permissions granted by this License. * * "Source" form shall mean the preferred form for making modifications, * including but not limited to software source code, documentation * source, and configuration files. * * "Object" form shall mean any form resulting from mechanical * transformation or translation of a Source form, including but * not limited to compiled object code, generated documentation, * and conversions to other media types. * * "Work" shall mean the work of authorship, whether in Source or * Object form, made available under the License, as indicated by a * copyright notice that is included in or attached to the work * (an example is provided in the Appendix below). * * "Derivative Works" shall mean any work, whether in Source or Object * form, that is based on (or derived from) the Work and for which the * editorial revisions, annotations, elaborations, or other modifications * represent, as a whole, an original work of authorship. For the purposes * of this License, Derivative Works shall not include works that remain * separable from, or merely link (or bind by name) to the interfaces of, * the Work and Derivative Works thereof. * * "Contribution" shall mean any work of authorship, including * the original version of the Work and any modifications or additions * to that Work or Derivative Works thereof, that is intentionally * submitted to Licensor for inclusion in the Work by the copyright owner * or by an individual or Legal Entity authorized to submit on behalf of * the copyright owner. For the purposes of this definition, "submitted" * means any form of electronic, verbal, or written communication sent * to the Licensor or its representatives, including but not limited to * communication on electronic mailing lists, source code control systems, * and issue tracking systems that are managed by, or on behalf of, the * Licensor for the purpose of discussing and improving the Work, but * excluding communication that is conspicuously marked or otherwise * designated in writing by the copyright owner as "Not a Contribution." * * "Contributor" shall mean Licensor and any individual or Legal Entity * on behalf of whom a Contribution has been received by Licensor and * subsequently incorporated within the Work. * * 2. Grant of Copyright License. Subject to the terms and conditions of * this License, each Contributor hereby grants to You a perpetual, * worldwide, non-exclusive, no-charge, royalty-free, irrevocable * copyright license to reproduce, prepare Derivative Works of, * publicly display, publicly perform, sublicense, and distribute the * Work and such Derivative Works in Source or Object form. * * 3. Grant of Patent License. Subject to the terms and conditions of * this License, each Contributor hereby grants to You a perpetual, * worldwide, non-exclusive, no-charge, royalty-free, irrevocable * (except as stated in this section) patent license to make, have made, * use, offer to sell, sell, import, and otherwise transfer the Work, * where such license applies only to those patent claims licensable * by such Contributor that are necessarily infringed by their * Contribution(s) alone or by combination of their Contribution(s) * with the Work to which such Contribution(s) was submitted. If You * institute patent litigation against any entity (including a * cross-claim or counterclaim in a lawsuit) alleging that the Work * or a Contribution incorporated within the Work constitutes direct * or contributory patent infringement, then any patent licenses * granted to You under this License for that Work shall terminate * as of the date such litigation is filed. * * 4. Redistribution. You may reproduce and distribute copies of the * Work or Derivative Works thereof in any medium, with or without * modifications, and in Source or Object form, provided that You * meet the following conditions: * * (a) You must give any other recipients of the Work or * Derivative Works a copy of this License; and * * (b) You must cause any modified files to carry prominent notices * stating that You changed the files; and * * (c) You must retain, in the Source form of any Derivative Works * that You distribute, all copyright, patent, trademark, and * attribution notices from the Source form of the Work, * excluding those notices that do not pertain to any part of * the Derivative Works; and * * (d) If the Work includes a "NOTICE" text file as part of its * distribution, then any Derivative Works that You distribute must * include a readable copy of the attribution notices contained * within such NOTICE file, excluding those notices that do not * pertain to any part of the Derivative Works, in at least one * of the following places: within a NOTICE text file distributed * as part of the Derivative Works; within the Source form or * documentation, if provided along with the Derivative Works; or, * within a display generated by the Derivative Works, if and * wherever such third-party notices normally appear. The contents * of the NOTICE file are for informational purposes only and * do not modify the License. You may add Your own attribution * notices within Derivative Works that You distribute, alongside * or as an addendum to the NOTICE text from the Work, provided * that such additional attribution notices cannot be construed * as modifying the License. * * You may add Your own copyright statement to Your modifications and * may provide additional or different license terms and conditions * for use, reproduction, or distribution of Your modifications, or * for any such Derivative Works as a whole, provided Your use, * reproduction, and distribution of the Work otherwise complies with * the conditions stated in this License. * * 5. Submission of Contributions. Unless You explicitly state otherwise, * any Contribution intentionally submitted for inclusion in the Work * by You to the Licensor shall be under the terms and conditions of * this License, without any additional terms or conditions. * Notwithstanding the above, nothing herein shall supersede or modify * the terms of any separate license agreement you may have executed * with Licensor regarding such Contributions. * * 6. Trademarks. This License does not grant permission to use the trade * names, trademarks, service marks, or product names of the Licensor, * except as required for reasonable and customary use in describing the * origin of the Work and reproducing the content of the NOTICE file. * * 7. Disclaimer of Warranty. Unless required by applicable law or * agreed to in writing, Licensor provides the Work (and each * Contributor provides its Contributions) on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied, including, without limitation, any warranties or conditions * of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A * PARTICULAR PURPOSE. You are solely responsible for determining the * appropriateness of using or redistributing the Work and assume any * risks associated with Your exercise of permissions under this License. * * 8. Limitation of Liability. In no event and under no legal theory, * whether in tort (including negligence), contract, or otherwise, * unless required by applicable law (such as deliberate and grossly * negligent acts) or agreed to in writing, shall any Contributor be * liable to You for damages, including any direct, indirect, special, * incidental, or consequential damages of any character arising as a * result of this License or out of the use or inability to use the * Work (including but not limited to damages for loss of goodwill, * work stoppage, computer failure or malfunction, or any and all * other commercial damages or losses), even if such Contributor * has been advised of the possibility of such damages. * * 9. Accepting Warranty or Additional Liability. While redistributing * the Work or Derivative Works thereof, You may choose to offer, * and charge a fee for, acceptance of support, warranty, indemnity, * or other liability obligations and/or rights consistent with this * License. However, in accepting such obligations, You may act only * on Your own behalf and on Your sole responsibility, not on behalf * of any other Contributor, and only if You agree to indemnify, * defend, and hold each Contributor harmless for any liability * incurred by, or claims asserted against, such Contributor by reason * of your accepting any such warranty or additional liability. * * END OF TERMS AND CONDITIONS * * APPENDIX: How to apply the Apache License to your work. * * To apply the Apache License to your work, attach the following * boilerplate notice, with the fields enclosed by brackets "{}" * replaced with your own identifying information. (Don't include * the brackets!) The text should be enclosed in the appropriate * comment syntax for the file format. We also recommend that a * file or class name and description of purpose be included on the * same "printed page" as the copyright notice for easier * identification within third-party archives. * * Copyright {yyyy} {name of copyright owner} * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.deleidos.rtws.transport.Services.filehandler; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.commons.io.FileUtils; import org.apache.log4j.Logger; import com.deleidos.rtws.commons.cloud.exception.StorageException; import com.deleidos.rtws.commons.cloud.platform.StorageInterface; public class S3TarFileHandler { private static final Logger log = Logger.getLogger(S3TarFileHandler.class); private final static String TMP_PATH = "/tmp/transport/s3/"; private final int BUFFER = 2048; private StorageInterface storage; public S3TarFileHandler(StorageInterface storage) { this.storage = storage; } public List<String> extract(String bucketName, String fileName) throws StorageException { ArrayList<String> filepaths = new ArrayList<String>(); InputStream is = storage.getFile(bucketName, fileName); TarArchiveInputStream tis = new TarArchiveInputStream(new BufferedInputStream(is)); TarArchiveEntry entry = null; File tmpDir = new File(TMP_PATH); if (! tmpDir.exists()) tmpDir.mkdirs(); try { while ((entry = (TarArchiveEntry) tis.getNextEntry()) != null) { if (entry.isFile()) { BufferedOutputStream dest = null; String entryFilePath = TMP_PATH + entry.getName(); try { int count; byte data [] = new byte[BUFFER]; FileOutputStream fos = new FileOutputStream(entryFilePath); dest = new BufferedOutputStream(fos, BUFFER); while ((count = tis.read(data, 0, BUFFER)) != -1) { dest.write(data, 0, count); } dest.flush(); filepaths.add(entryFilePath); log.debug("Extracted file '" + entryFilePath + "'."); } catch(Exception ex) { log.debug("Fail to extract file '" + entryFilePath + "'. Message: " + ex.getMessage()); } finally { if (dest != null) dest.close(); } } if (entry.isDirectory()) { File entryDirPath = new File(TMP_PATH + entry.getName()); if (! entryDirPath.exists()) { if (entryDirPath.mkdirs()){ log.debug("Created directory '" + entryDirPath.getAbsolutePath() + "'."); } } } } } catch(FileNotFoundException fnfe) { log.debug("Fail to extract tar file '" + fileName + "'. Message: " + fnfe.getMessage()); } catch(IOException ioe) { log.debug("Fail to extract tar file '" + fileName + "'. Message: " + ioe.getMessage()); } finally { if (tis != null) try { tis.close(); } catch (IOException e) {} } return filepaths; } public void cleanup() { File tmpDir = new File(TMP_PATH); if (tmpDir.exists()) { for (File file : tmpDir.listFiles()) { try { if (file.isFile()) { file.delete(); log.debug("Deleted file '" + file.getAbsolutePath() + "'."); } if (file.isDirectory()) { FileUtils.deleteDirectory(file); log.debug("Deleted directory '" + file.getAbsolutePath() + "'."); } } catch (IOException ioe) { log.debug("Fail to delete file '" + file.getAbsolutePath() + "'. Message: " + ioe.getMessage()); } } } } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.diagnostic; import com.intellij.icons.AllIcons; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.wm.IconLikeCustomStatusBarWidget; import com.intellij.openapi.wm.StatusBar; import com.intellij.ui.LightColors; import com.intellij.ui.popup.NotificationPopup; import com.intellij.util.concurrency.EdtExecutorService; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.List; import java.util.concurrent.TimeUnit; public class IdeMessagePanel extends JPanel implements MessagePoolListener, IconLikeCustomStatusBarWidget { public static final String FATAL_ERROR = "FatalError"; private final IdeFatalErrorsIcon myIdeFatal; static final String INTERNAL_ERROR_NOTICE = DiagnosticBundle.message("error.notification.tooltip"); private IdeErrorsDialog myDialog; private boolean myOpeningInProgress; private final MessagePool myMessagePool; private boolean myNotificationPopupAlreadyShown = false; public IdeMessagePanel(@NotNull MessagePool messagePool) { super(new BorderLayout()); myIdeFatal = new IdeFatalErrorsIcon(new ActionListener() { public void actionPerformed(ActionEvent e) { openFatals(null); } }); myIdeFatal.setVerticalAlignment(SwingConstants.CENTER); add(myIdeFatal, BorderLayout.CENTER); myMessagePool = messagePool; messagePool.addListener(this); updateFatalErrorsIcon(); setOpaque(false); } @NotNull public String ID() { return FATAL_ERROR; } public WidgetPresentation getPresentation(@NotNull PlatformType type) { return null; } public void dispose() { myMessagePool.removeListener(this); } public void install(@NotNull StatusBar statusBar) { } public JComponent getComponent() { return this; } public void openFatals(@Nullable final LogMessage message) { if (myDialog != null) return; if (myOpeningInProgress) return; myOpeningInProgress = true; final Runnable task = new Runnable() { public void run() { if (isOtherModalWindowActive()) { if (myDialog == null) { EdtExecutorService.getScheduledExecutorInstance().schedule(this, (long)300, TimeUnit.MILLISECONDS); } return; } try { _openFatals(message); } finally { myOpeningInProgress = false; } } }; task.run(); } private void _openFatals(@Nullable final LogMessage message) { ApplicationManager.getApplication().invokeLater(new Runnable() { public void run() { myDialog = new IdeErrorsDialog(myMessagePool, message) { public void doOKAction() { super.doOKAction(); disposeDialog(this); } public void doCancelAction() { super.doCancelAction(); disposeDialog(this); } @Override protected void updateOnSubmit() { super.updateOnSubmit(); updateState(computeState()); } }; myMessagePool.addListener(myDialog); if (!isOtherModalWindowActive()) { myDialog.show(); } else { myDialog.close(0); disposeDialog(myDialog); } } }); } private void updateState(final IdeFatalErrorsIcon.State state) { myIdeFatal.setState(state); UIUtil.invokeLaterIfNeeded(new Runnable() { @Override public void run() { setVisible(state != IdeFatalErrorsIcon.State.NoErrors); } }); } private void disposeDialog(final IdeErrorsDialog listDialog) { myMessagePool.removeListener(listDialog); updateFatalErrorsIcon(); myDialog = null; } public void newEntryAdded() { updateFatalErrorsIcon(); } public void poolCleared() { updateFatalErrorsIcon(); } @Override public void entryWasRead() { updateFatalErrorsIcon(); } private boolean isOtherModalWindowActive() { final Window window = getActiveModalWindow(); if (window == null) return false; return myDialog == null || myDialog.getWindow() != window; } private static Window getActiveModalWindow() { final KeyboardFocusManager manager = KeyboardFocusManager.getCurrentKeyboardFocusManager(); final Window activeWindow = manager.getActiveWindow(); if (activeWindow instanceof JDialog) { if (((JDialog) activeWindow).isModal()) { return activeWindow; } } return null; } private IdeFatalErrorsIcon.State computeState() { final List<AbstractMessage> errors = myMessagePool.getFatalErrors(true, false); if (errors.isEmpty()) { return IdeFatalErrorsIcon.State.NoErrors; } else { for (AbstractMessage error : errors) { if (!error.isRead()) { return IdeFatalErrorsIcon.State.UnreadErrors; } } return IdeFatalErrorsIcon.State.ReadErrors; } } void updateFatalErrorsIcon() { final IdeFatalErrorsIcon.State state = computeState(); updateState(state); if (state == IdeFatalErrorsIcon.State.NoErrors) { myNotificationPopupAlreadyShown = false; } else if (state == IdeFatalErrorsIcon.State.UnreadErrors && !myNotificationPopupAlreadyShown) { SwingUtilities.invokeLater(new Runnable() { public void run() { String notificationText = tryGetFromMessages(myMessagePool.getFatalErrors(false, false)); if (notificationText == null) { notificationText = INTERNAL_ERROR_NOTICE; } final JLabel label = new JLabel(notificationText); label.setIcon(AllIcons.Ide.FatalError); new NotificationPopup(IdeMessagePanel.this, label, LightColors.RED, false, new ActionListener() { public void actionPerformed(ActionEvent e) { _openFatals(null); } }, true); } }); myNotificationPopupAlreadyShown = true; } } private static String tryGetFromMessages(List<AbstractMessage> messages) { String result = null; for (AbstractMessage message : messages) { String s; if (message instanceof LogMessageEx) { s = ((LogMessageEx)message).getNotificationText(); } else if (message instanceof GroupedLogMessage) { s = tryGetFromMessages(((GroupedLogMessage)message).getMessages()); } else { return null; } if (result == null) { result = s; } else if (!result.equals(s)) { // if texts are different, show default return null; } } return result; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.blobstore.gcs; import com.google.api.client.googleapis.batch.BatchRequest; import com.google.api.client.googleapis.batch.json.JsonBatchCallback; import com.google.api.client.googleapis.json.GoogleJsonError; import com.google.api.client.googleapis.json.GoogleJsonResponseException; import com.google.api.client.http.HttpHeaders; import com.google.api.client.http.InputStreamContent; import com.google.api.services.storage.Storage; import com.google.api.services.storage.model.Bucket; import com.google.api.services.storage.model.Objects; import com.google.api.services.storage.model.StorageObject; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.BlobStoreException; import org.elasticsearch.common.blobstore.gcs.util.SocketAccess; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.CountDown; import java.io.IOException; import java.io.InputStream; import java.nio.file.NoSuchFileException; import java.security.AccessController; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Spliterator; import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.stream.StreamSupport; import static java.net.HttpURLConnection.HTTP_NOT_FOUND; public class GoogleCloudStorageBlobStore extends AbstractComponent implements BlobStore { /** * Google Cloud Storage batch requests are limited to 1000 operations **/ private static final int MAX_BATCHING_REQUESTS = 999; private final Storage client; private final String bucket; public GoogleCloudStorageBlobStore(Settings settings, String bucket, Storage storageClient) { super(settings); this.bucket = bucket; this.client = storageClient; if (doesBucketExist(bucket) == false) { throw new BlobStoreException("Bucket [" + bucket + "] does not exist"); } } @Override public BlobContainer blobContainer(BlobPath path) { return new GoogleCloudStorageBlobContainer(path, this); } @Override public void delete(BlobPath path) throws IOException { deleteBlobsByPrefix(path.buildAsString()); } @Override public void close() { } /** * Return true if the given bucket exists * * @param bucketName name of the bucket * @return true if the bucket exists, false otherwise */ boolean doesBucketExist(String bucketName) { try { return SocketAccess.doPrivilegedIOException(() -> { try { Bucket bucket = client.buckets().get(bucketName).execute(); if (bucket != null) { return Strings.hasText(bucket.getId()); } } catch (GoogleJsonResponseException e) { GoogleJsonError error = e.getDetails(); if ((e.getStatusCode() == HTTP_NOT_FOUND) || ((error != null) && (error.getCode() == HTTP_NOT_FOUND))) { return false; } throw e; } return false; }); } catch (IOException e) { throw new BlobStoreException("Unable to check if bucket [" + bucketName + "] exists", e); } } /** * List all blobs in the bucket * * @param path base path of the blobs to list * @return a map of blob names and their metadata */ Map<String, BlobMetaData> listBlobs(String path) throws IOException { return SocketAccess.doPrivilegedIOException(() -> listBlobsByPath(bucket, path, path)); } /** * List all blobs in the bucket which have a prefix * * @param path base path of the blobs to list * @param prefix prefix of the blobs to list * @return a map of blob names and their metadata */ Map<String, BlobMetaData> listBlobsByPrefix(String path, String prefix) throws IOException { return SocketAccess.doPrivilegedIOException(() -> listBlobsByPath(bucket, buildKey(path, prefix), path)); } /** * Lists all blobs in a given bucket * * @param bucketName name of the bucket * @param path base path of the blobs to list * @param pathToRemove if true, this path part is removed from blob name * @return a map of blob names and their metadata */ private Map<String, BlobMetaData> listBlobsByPath(String bucketName, String path, String pathToRemove) throws IOException { return blobsStream(client, bucketName, path, MAX_BATCHING_REQUESTS) .map(new BlobMetaDataConverter(pathToRemove)) .collect(Collectors.toMap(PlainBlobMetaData::name, Function.identity())); } /** * Returns true if the blob exists in the bucket * * @param blobName name of the blob * @return true if the blob exists, false otherwise */ boolean blobExists(String blobName) throws IOException { try { StorageObject blob = SocketAccess.doPrivilegedIOException(() -> client.objects().get(bucket, blobName).execute()); if (blob != null) { return Strings.hasText(blob.getId()); } } catch (GoogleJsonResponseException e) { GoogleJsonError error = e.getDetails(); if ((e.getStatusCode() == HTTP_NOT_FOUND) || ((error != null) && (error.getCode() == HTTP_NOT_FOUND))) { return false; } throw e; } return false; } /** * Returns an {@link java.io.InputStream} for a given blob * * @param blobName name of the blob * @return an InputStream */ InputStream readBlob(String blobName) throws IOException { try { return SocketAccess.doPrivilegedIOException(() -> { Storage.Objects.Get object = client.objects().get(bucket, blobName); return object.executeMediaAsInputStream(); }); } catch (GoogleJsonResponseException e) { GoogleJsonError error = e.getDetails(); if ((e.getStatusCode() == HTTP_NOT_FOUND) || ((error != null) && (error.getCode() == HTTP_NOT_FOUND))) { throw new NoSuchFileException(e.getMessage()); } throw e; } } /** * Writes a blob in the bucket. * * @param inputStream content of the blob to be written * @param blobSize expected size of the blob to be written */ void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { SocketAccess.doPrivilegedVoidIOException(() -> { InputStreamContent stream = new InputStreamContent(null, inputStream); stream.setLength(blobSize); Storage.Objects.Insert insert = client.objects().insert(bucket, null, stream); insert.setName(blobName); insert.execute(); }); } /** * Deletes a blob in the bucket * * @param blobName name of the blob */ void deleteBlob(String blobName) throws IOException { if (!blobExists(blobName)) { throw new NoSuchFileException("Blob [" + blobName + "] does not exist"); } SocketAccess.doPrivilegedIOException(() -> client.objects().delete(bucket, blobName).execute()); } /** * Deletes multiple blobs in the bucket that have a given prefix * * @param prefix prefix of the buckets to delete */ void deleteBlobsByPrefix(String prefix) throws IOException { deleteBlobs(listBlobsByPath(bucket, prefix, null).keySet()); } /** * Deletes multiple blobs in the given bucket (uses a batch request to perform this) * * @param blobNames names of the bucket to delete */ void deleteBlobs(Collection<String> blobNames) throws IOException { if (blobNames == null || blobNames.isEmpty()) { return; } if (blobNames.size() == 1) { deleteBlob(blobNames.iterator().next()); return; } final List<Storage.Objects.Delete> deletions = new ArrayList<>(); final Iterator<String> blobs = blobNames.iterator(); SocketAccess.doPrivilegedVoidIOException(() -> { while (blobs.hasNext()) { // Create a delete request for each blob to delete deletions.add(client.objects().delete(bucket, blobs.next())); if (blobs.hasNext() == false || deletions.size() == MAX_BATCHING_REQUESTS) { try { // Deletions are executed using a batch request BatchRequest batch = client.batch(); // Used to track successful deletions CountDown countDown = new CountDown(deletions.size()); for (Storage.Objects.Delete delete : deletions) { // Queue the delete request in batch delete.queue(batch, new JsonBatchCallback<Void>() { @Override public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) throws IOException { logger.error("failed to delete blob [{}] in bucket [{}]: {}", delete.getObject(), delete.getBucket(), e .getMessage()); } @Override public void onSuccess(Void aVoid, HttpHeaders responseHeaders) throws IOException { countDown.countDown(); } }); } batch.execute(); if (countDown.isCountedDown() == false) { throw new IOException("Failed to delete all [" + deletions.size() + "] blobs"); } } finally { deletions.clear(); } } } }); } /** * Moves a blob within the same bucket * * @param sourceBlob name of the blob to move * @param targetBlob new name of the blob in the target bucket */ void moveBlob(String sourceBlob, String targetBlob) throws IOException { SocketAccess.doPrivilegedIOException(() -> { // There's no atomic "move" in GCS so we need to copy and delete client.objects().copy(bucket, sourceBlob, bucket, targetBlob, null).execute(); client.objects().delete(bucket, sourceBlob).execute(); return null; }); } private String buildKey(String keyPath, String s) { assert s != null; return keyPath + s; } /** * Converts a {@link StorageObject} to a {@link PlainBlobMetaData} */ class BlobMetaDataConverter implements Function<StorageObject, PlainBlobMetaData> { private final String pathToRemove; BlobMetaDataConverter(String pathToRemove) { this.pathToRemove = pathToRemove; } @Override public PlainBlobMetaData apply(StorageObject storageObject) { String blobName = storageObject.getName(); if (Strings.hasLength(pathToRemove)) { blobName = blobName.substring(pathToRemove.length()); } return new PlainBlobMetaData(blobName, storageObject.getSize().longValue()); } } /** * Spliterator can be used to list storage objects stored in a bucket. */ static class StorageObjectsSpliterator implements Spliterator<StorageObject> { private final Storage.Objects.List list; StorageObjectsSpliterator(Storage client, String bucketName, String prefix, long pageSize) throws IOException { list = SocketAccess.doPrivilegedIOException(() -> client.objects().list(bucketName)); list.setMaxResults(pageSize); if (prefix != null) { list.setPrefix(prefix); } } @Override public boolean tryAdvance(Consumer<? super StorageObject> action) { try { // Retrieves the next page of items Objects objects = SocketAccess.doPrivilegedIOException(list::execute); if ((objects == null) || (objects.getItems() == null) || (objects.getItems().isEmpty())) { return false; } // Consumes all the items objects.getItems().forEach(action::accept); // Sets the page token of the next page, // null indicates that all items have been consumed String next = objects.getNextPageToken(); if (next != null) { list.setPageToken(next); return true; } return false; } catch (Exception e) { throw new BlobStoreException("Exception while listing objects", e); } } @Override public Spliterator<StorageObject> trySplit() { return null; } @Override public long estimateSize() { return Long.MAX_VALUE; } @Override public int characteristics() { return 0; } } /** * Returns a {@link Stream} of {@link StorageObject}s that are stored in a given bucket. */ static Stream<StorageObject> blobsStream(Storage client, String bucketName, String prefix, long pageSize) throws IOException { return StreamSupport.stream(new StorageObjectsSpliterator(client, bucketName, prefix, pageSize), false); } }
/* * Copyright 2013 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp.newtypes; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.javascript.jscomp.CodingConvention; import com.google.javascript.jscomp.DiagnosticGroup; import com.google.javascript.jscomp.DiagnosticType; import com.google.javascript.jscomp.JSError; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.JSTypeExpression; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; /** * During GlobalTypeInfo, this class parses type ASTs inside jsdocs and converts them * to JSTypes. * * There isn't a clear distinction which warnings should be signaled here and which * ones in GlobalTypeInfo; we give the warning in whichever class is most convenient. * * @author [email protected] (Ben Lickly) * @author [email protected] (Dimitris Vardoulakis) */ public final class JSTypeCreatorFromJSDoc { public static final DiagnosticType INVALID_GENERICS_INSTANTIATION = DiagnosticType.warning( "JSC_NTI_INVALID_GENERICS_INSTANTIATION", "Invalid generics instantiation{0}.\n" + "Expected {1} type argument(s), but found {2}"); public static final DiagnosticType EXTENDS_NON_OBJECT = DiagnosticType.warning( "JSC_NTI_EXTENDS_NON_OBJECT", "{0} extends non-object type {1}.\n"); public static final DiagnosticType EXTENDS_NOT_ON_CTOR_OR_INTERF = DiagnosticType.warning( "JSC_NTI_EXTENDS_NOT_ON_CTOR_OR_INTERF", "@extends used without @constructor or @interface for {0}.\n"); public static final DiagnosticType INHERITANCE_CYCLE = DiagnosticType.warning( "JSC_NTI_INHERITANCE_CYCLE", "Cycle detected in inheritance chain of type {0}"); public static final DiagnosticType DICT_IMPLEMENTS_INTERF = DiagnosticType.warning( "JSC_NTI_DICT_IMPLEMENTS_INTERF", "Class {0} is a dict. Dicts can't implement interfaces"); public static final DiagnosticType IMPLEMENTS_WITHOUT_CONSTRUCTOR = DiagnosticType.warning( "JSC_NTI_IMPLEMENTS_WITHOUT_CONSTRUCTOR", "@implements used without @constructor or @interface for {0}"); public static final DiagnosticType CONFLICTING_EXTENDED_TYPE = DiagnosticType.warning( "JSC_NTI_CONFLICTING_EXTENDED_TYPE", "{1} cannot extend this type; {0}s can only extend {0}s"); public static final DiagnosticType CONFLICTING_IMPLEMENTED_TYPE = DiagnosticType.warning( "JSC_NTI_CONFLICTING_IMPLEMENTED_TYPE", "{0} cannot implement this type; " + "an interface can only extend, but not implement interfaces"); public static final DiagnosticType UNION_IS_UNINHABITABLE = DiagnosticType.warning( "JSC_NTI_UNION_IS_UNINHABITABLE", "Union of {0} with {1} would create an impossible type"); public static final DiagnosticType NEW_EXPECTS_OBJECT_OR_TYPEVAR = DiagnosticType.warning( "JSC_NTI_NEW_EXPECTS_OBJECT_OR_TYPEVAR", "The \"new:\" annotation only accepts object types and type variables; " + "found {0}"); public static final DiagnosticType BAD_ARRAY_TYPE_SYNTAX = DiagnosticType.warning( "JSC_NTI_BAD_ARRAY_TYPE_SYNTAX", "The [] type syntax is not supported. Please use Array<T> instead"); public static final DiagnosticType CANNOT_MAKE_TYPEVAR_NON_NULL = DiagnosticType.warning( "JSC_NTI_CANNOT_MAKE_TYPEVAR_NON_NULL", "Cannot use ! to restrict type variable type.\n" + "Prefer to make type argument non-nullable and add " + "null explicitly where needed (e.g. through ?T or T|null)"); public static final DiagnosticType CIRCULAR_TYPEDEF_ENUM = DiagnosticType.warning( "JSC_NTI_CIRCULAR_TYPEDEF_ENUM", "Circular typedefs/enums are not allowed"); public static final DiagnosticType ENUM_WITH_TYPEVARS = DiagnosticType.warning( "JSC_NTI_ENUM_WITH_TYPEVARS", "An enum type cannot include type variables"); public static final DiagnosticType ENUM_IS_TOP = DiagnosticType.warning( "JSC_NTI_ENUM_IS_TOP", "An enum type cannot be *. " + "Use ? if you do not want the elements checked"); // TODO(dimvar): This may prove to be too strict, may revisit. public static final DiagnosticType ENUM_IS_UNION = DiagnosticType.warning( "JSC_NTI_ENUM_IS_UNION", "An enum type cannot be a union type"); public static final DiagnosticType WRONG_PARAMETER_ORDER = DiagnosticType.warning( "JSC_NTI_WRONG_PARAMETER_ORDER", "Wrong parameter order: required parameters are first, " + "then optional, then varargs"); public static final DiagnosticType IMPLEMENTS_NON_INTERFACE = DiagnosticType.warning( "JSC_NTI_IMPLEMENTS_NON_INTERFACE", "Cannot implement non-interface {0}"); public static final DiagnosticType EXTENDS_NON_INTERFACE = DiagnosticType.warning( "JSC_NTI_EXTENDS_NON_INTERFACE", "Cannot extend non-interface {0}"); public static final DiagnosticType FUNCTION_WITH_NONFUNC_JSDOC = DiagnosticType.warning( "JSC_NTI_FUNCTION_WITH_NONFUNC_JSDOC", "The function is annotated with a non-function jsdoc. " + "Ignoring jsdoc"); public static final DiagnosticType TEMPLATED_GETTER_SETTER = DiagnosticType.warning( "JSC_NTI_TEMPLATED_GETTER_SETTER", "@template can't be used with getters/setters"); public static final DiagnosticType TWO_JSDOCS = DiagnosticType.warning( "JSC_NTI_TWO_JSDOCS", "Found two JsDoc comments for {0}"); public static final DiagnosticGroup COMPATIBLE_DIAGNOSTICS = new DiagnosticGroup( BAD_ARRAY_TYPE_SYNTAX, CIRCULAR_TYPEDEF_ENUM, CONFLICTING_EXTENDED_TYPE, CONFLICTING_IMPLEMENTED_TYPE, EXTENDS_NON_INTERFACE, EXTENDS_NON_OBJECT, EXTENDS_NOT_ON_CTOR_OR_INTERF, IMPLEMENTS_NON_INTERFACE, IMPLEMENTS_WITHOUT_CONSTRUCTOR, INHERITANCE_CYCLE, NEW_EXPECTS_OBJECT_OR_TYPEVAR, TEMPLATED_GETTER_SETTER, TWO_JSDOCS, WRONG_PARAMETER_ORDER); public static final DiagnosticGroup NEW_DIAGNOSTICS = new DiagnosticGroup( CANNOT_MAKE_TYPEVAR_NON_NULL, DICT_IMPLEMENTS_INTERF, ENUM_IS_TOP, // TODO(dimvar): ENUM_IS_UNION is rare, but it happens. Should we support it? ENUM_IS_UNION, ENUM_WITH_TYPEVARS, FUNCTION_WITH_NONFUNC_JSDOC, INVALID_GENERICS_INSTANTIATION, UNION_IS_UNINHABITABLE); private final CodingConvention convention; private final UniqueNameGenerator nameGen; private final JSTypes commonTypes; // In GlobalTypeInfo, we collect all property names defined anywhere in the program. // This field is a reference to that set, so that we can add properties from jsdoc // annotations in externs. private final Set<String> allPropertyNames; // Used to communicate state between methods when resolving enum types private int howmanyTypeVars = 0; /** Exception for when unrecognized type names are encountered */ public static class UnknownTypeException extends Exception { UnknownTypeException(String cause) { super(cause); } } private Set<JSError> warnings = new LinkedHashSet<>(); // Unknown type names indexed by JSDoc AST node at which they were found. private Map<Node, String> unknownTypeNames = new LinkedHashMap<>(); public JSTypeCreatorFromJSDoc(JSTypes commonTypes, CodingConvention convention, UniqueNameGenerator nameGen, Set<String> allPropertyNames) { Preconditions.checkNotNull(commonTypes); this.commonTypes = commonTypes; this.qmarkFunctionDeclared = new FunctionAndSlotType( null, DeclaredFunctionType.qmarkFunctionDeclaration(commonTypes)); this.convention = convention; this.nameGen = nameGen; this.allPropertyNames = allPropertyNames; } private FunctionAndSlotType qmarkFunctionDeclared; private static final boolean NULLABLE_TYPES_BY_DEFAULT = true; public JSType maybeMakeNullable(JSType t) { if (NULLABLE_TYPES_BY_DEFAULT) { return JSType.join(this.commonTypes.NULL, t); } return t; } public JSType getDeclaredTypeOfNode(JSDocInfo jsdoc, RawNominalType ownerType, DeclaredTypeRegistry registry) { return getDeclaredTypeOfNode(jsdoc, registry, ownerType == null ? ImmutableList.<String>of() : ownerType.getTypeParameters()); } public JSType getTypeOfCommentNode( Node n, RawNominalType ownerType, DeclaredTypeRegistry registry) { return getTypeFromComment( n, registry, ownerType == null ? ImmutableList.<String>of() : ownerType.getTypeParameters()); } private JSType getDeclaredTypeOfNode(JSDocInfo jsdoc, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) { if (jsdoc == null) { return null; } return getTypeFromJSTypeExpression( jsdoc.getType(), registry, typeParameters); } public Set<JSError> getWarnings() { return warnings; } public Map<Node, String> getUnknownTypesMap() { return unknownTypeNames; } private JSType getTypeFromJSTypeExpression(JSTypeExpression expr, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) { if (expr == null) { return null; } return getTypeFromComment(expr.getRoot(), registry, typeParameters); } // Very similar to JSTypeRegistry#createFromTypeNodesInternal // n is a jsdoc node, not an AST node; the same class (Node) is used for both private JSType getTypeFromComment(Node n, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) { try { return getTypeFromCommentHelper(n, registry, typeParameters); } catch (UnknownTypeException e) { return this.commonTypes.UNKNOWN; } } private JSType getMaybeTypeFromComment(Node n, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) { try { return getTypeFromCommentHelper(n, registry, typeParameters); } catch (UnknownTypeException e) { return null; } } private JSType getTypeFromCommentHelper(Node n, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) throws UnknownTypeException { Preconditions.checkNotNull(n); if (typeParameters == null) { typeParameters = ImmutableList.of(); } switch (n.getType()) { case LC: return getRecordTypeHelper(n, registry, typeParameters); case EMPTY: // for function types that don't declare a return type return this.commonTypes.UNKNOWN; case VOID: // TODO(dimvar): void can be represented in 2 ways: Token.VOID and a // Token.STRING whose getString() is "void". // Change jsdoc parsing to only have one representation. return this.commonTypes.UNDEFINED; case LB: warnings.add(JSError.make(n, BAD_ARRAY_TYPE_SYNTAX)); return this.commonTypes.UNKNOWN; case STRING: return getNamedTypeHelper(n, registry, typeParameters); case PIPE: { // The way JSType.join works, Subtype|Supertype is equal to Supertype, // so when programmers write un-normalized unions, we normalize them // silently. We may also want to warn. JSType union = this.commonTypes.BOTTOM; for (Node child = n.getFirstChild(); child != null; child = child.getNext()) { // TODO(dimvar): When the union has many things, we join and throw // away types, except the result of the last join. Very inefficient. // Consider optimizing. JSType nextType = getTypeFromCommentHelper(child, registry, typeParameters); if (nextType.isUnknown()) { return this.commonTypes.UNKNOWN; } JSType nextUnion = JSType.join(union, nextType); if (nextUnion.isBottom()) { warnings.add(JSError.make(n, UNION_IS_UNINHABITABLE, nextType.toString(), union.toString())); return this.commonTypes.UNKNOWN; } union = nextUnion; } return union; } case BANG: { JSType nullableType = getTypeFromCommentHelper( n.getFirstChild(), registry, typeParameters); if (nullableType.isTypeVariable()) { warnings.add(JSError.make(n, CANNOT_MAKE_TYPEVAR_NON_NULL)); } return nullableType.removeType(this.commonTypes.NULL); } case QMARK: { Node child = n.getFirstChild(); if (child == null) { return this.commonTypes.UNKNOWN; } else { return JSType.join(this.commonTypes.NULL, getTypeFromCommentHelper(child, registry, typeParameters)); } } case STAR: return this.commonTypes.TOP; case FUNCTION: return getFunTypeHelper(n, registry, typeParameters); default: throw new IllegalArgumentException( "Unsupported type exp: " + n.getType() + " " + n.toStringTree()); } } // Looks at the type AST without evaluating it private boolean isUnionWithUndefined(Node n) { if (n == null || n.getType() != Token.PIPE) { return false; } for (Node child : n.children()) { if (child.getType() == Token.VOID || child.getType() == Token.STRING && (child.getString().equals("void") || child.getString().equals("undefined"))) { return true; } } return false; } private JSType getRecordTypeHelper(Node n, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) throws UnknownTypeException { Map<String, Property> props = new LinkedHashMap<>(); for (Node propNode = n.getFirstFirstChild(); propNode != null; propNode = propNode.getNext()) { boolean isPropDeclared = propNode.getType() == Token.COLON; Node propNameNode = isPropDeclared ? propNode.getFirstChild() : propNode; String propName = propNameNode.getString(); if (propName.startsWith("'") || propName.startsWith("\"")) { propName = propName.substring(1, propName.length() - 1); } if (n.isFromExterns()) { this.allPropertyNames.add(propName); } JSType propType = !isPropDeclared ? this.commonTypes.UNKNOWN : getTypeFromCommentHelper(propNode.getLastChild(), registry, typeParameters); Property prop; if (propType.equals(this.commonTypes.UNDEFINED) || isUnionWithUndefined(propNode.getLastChild())) { prop = Property.makeOptional(null, propType, propType); } else { prop = Property.make(propType, propType); } props.put(propName, prop); } return JSType.fromObjectType(ObjectType.fromProperties(this.commonTypes, props)); } private JSType getNamedTypeHelper(Node n, DeclaredTypeRegistry registry, ImmutableList<String> outerTypeParameters) throws UnknownTypeException { String typeName = n.getString(); switch (typeName) { case "boolean": checkInvalidGenericsInstantiation(n); return this.commonTypes.BOOLEAN; case "null": checkInvalidGenericsInstantiation(n); return this.commonTypes.NULL; case "number": checkInvalidGenericsInstantiation(n); return this.commonTypes.NUMBER; case "string": checkInvalidGenericsInstantiation(n); return this.commonTypes.STRING; case "undefined": case "void": checkInvalidGenericsInstantiation(n); return this.commonTypes.UNDEFINED; case "Function": checkInvalidGenericsInstantiation(n); return maybeMakeNullable(this.commonTypes.qmarkFunction()); case "Object": // We don't generally handle parameterized Object<...>, but we want to // at least not warn about inexistent properties on it, so we type it // as @dict. return maybeMakeNullable(n.hasChildren() ? this.commonTypes.TOP_DICT : this.commonTypes.TOP_OBJECT); default: return lookupTypeByName(typeName, n, registry, outerTypeParameters); } } private JSType lookupTypeByName(String name, Node n, DeclaredTypeRegistry registry, ImmutableList<String> outerTypeParameters) throws UnknownTypeException { String tvar = UniqueNameGenerator.findGeneratedName(name, outerTypeParameters); if (tvar != null) { checkInvalidGenericsInstantiation(n); return JSType.fromTypeVar(this.commonTypes, tvar); } Declaration decl = registry.getDeclaration(QualifiedName.fromQualifiedString(name), true); if (decl == null) { unknownTypeNames.put(n, name); throw new UnknownTypeException("Unhandled type: " + name); } // It's either a typedef, an enum, a type variable, a nominal type, or a // forward-declared type. if (decl.getTypedef() != null) { checkInvalidGenericsInstantiation(n); return getTypedefType(decl.getTypedef(), registry); } if (decl.getEnum() != null) { checkInvalidGenericsInstantiation(n); return getEnumPropType(decl.getEnum(), registry); } if (decl.isTypeVar()) { checkInvalidGenericsInstantiation(n); howmanyTypeVars++; return decl.getTypeOfSimpleDecl(); } if (decl.getNominal() != null) { return getNominalTypeHelper(decl.getNominal(), n, registry, outerTypeParameters); } // Forward-declared type return this.commonTypes.UNKNOWN; } private JSType getTypedefType(Typedef td, DeclaredTypeRegistry registry) { resolveTypedef(td, registry); return td.getType(); } public void resolveTypedef(Typedef td, DeclaredTypeRegistry registry) { Preconditions.checkState(td != null, "getTypedef should only be " + "called when we know that the typedef is defined"); if (td.isResolved()) { return; } JSTypeExpression texp = td.getTypeExpr(); JSType tdType; if (texp == null) { warnings.add(JSError.make( td.getTypeExprForErrorReporting().getRoot(), CIRCULAR_TYPEDEF_ENUM)); tdType = this.commonTypes.UNKNOWN; } else { tdType = getTypeFromJSTypeExpression(texp, registry, null); } td.resolveTypedef(tdType); } private JSType getEnumPropType(EnumType e, DeclaredTypeRegistry registry) { resolveEnum(e, registry); return e.getPropType(); } public void resolveEnum(EnumType e, DeclaredTypeRegistry registry) { Preconditions.checkState(e != null, "getEnum should only be " + "called when we know that the enum is defined"); if (e.isResolved()) { return; } JSTypeExpression texp = e.getTypeExpr(); JSType enumeratedType; if (texp == null) { warnings.add(JSError.make( e.getTypeExprForErrorReporting().getRoot(), CIRCULAR_TYPEDEF_ENUM)); enumeratedType = this.commonTypes.UNKNOWN; } else { int numTypeVars = howmanyTypeVars; enumeratedType = getTypeFromJSTypeExpression(texp, registry, null); if (howmanyTypeVars > numTypeVars) { warnings.add(JSError.make(texp.getRoot(), ENUM_WITH_TYPEVARS)); enumeratedType = this.commonTypes.UNKNOWN; howmanyTypeVars = numTypeVars; } else if (enumeratedType.isTop()) { warnings.add(JSError.make(texp.getRoot(), ENUM_IS_TOP)); enumeratedType = this.commonTypes.UNKNOWN; } else if (enumeratedType.isUnion()) { warnings.add(JSError.make(texp.getRoot(), ENUM_IS_UNION)); enumeratedType = this.commonTypes.UNKNOWN; } } e.resolveEnum(enumeratedType); } private void checkInvalidGenericsInstantiation(Node n) { if (n.hasChildren()) { Preconditions.checkState(n.getFirstChild().isBlock(), n); warnings.add(JSError.make(n, INVALID_GENERICS_INSTANTIATION, "", "0", String.valueOf(n.getFirstChild().getChildCount()))); } } private JSType getNominalTypeHelper(RawNominalType rawType, Node n, DeclaredTypeRegistry registry, ImmutableList<String> outerTypeParameters) throws UnknownTypeException { NominalType uninstantiated = rawType.getAsNominalType(); if (!rawType.isGeneric() && !n.hasChildren()) { return rawType.getInstanceWithNullability(NULLABLE_TYPES_BY_DEFAULT); } ImmutableList.Builder<JSType> typeList = ImmutableList.builder(); if (n.hasChildren()) { // Compute instantiation of polymorphic class/interface. Preconditions.checkState(n.getFirstChild().isBlock(), n); for (Node child : n.getFirstChild().children()) { typeList.add( getTypeFromCommentHelper(child, registry, outerTypeParameters)); } } ImmutableList<JSType> typeArguments = typeList.build(); ImmutableList<String> typeParameters = rawType.getTypeParameters(); int typeArgsSize = typeArguments.size(); int typeParamsSize = typeParameters.size(); if (typeArgsSize != typeParamsSize) { // We used to also warn when (typeArgsSize < typeParamsSize), but it // happens so often that we stopped. Array, Object and goog.Promise are // common culprits, but many other types as well. if (typeArgsSize > typeParamsSize) { warnings.add(JSError.make( n, INVALID_GENERICS_INSTANTIATION, " for type " + uninstantiated.getName(), String.valueOf(typeParamsSize), String.valueOf(typeArgsSize))); } return maybeMakeNullable(JSType.fromObjectType(ObjectType.fromNominalType( uninstantiated.instantiateGenerics( fixLengthOfTypeList(typeParameters.size(), typeArguments))))); } return maybeMakeNullable(JSType.fromObjectType(ObjectType.fromNominalType( uninstantiated.instantiateGenerics(typeArguments)))); } private List<JSType> fixLengthOfTypeList( int desiredLength, List<JSType> typeList) { int length = typeList.size(); if (length == desiredLength) { return typeList; } ImmutableList.Builder<JSType> builder = ImmutableList.builder(); for (int i = 0; i < desiredLength; i++) { builder.add(i < length ? typeList.get(i) : this.commonTypes.UNKNOWN); } return builder.build(); } // Computes a type from a jsdoc that includes a function type, rather than // one that includes @param, @return, etc. private JSType getFunTypeHelper(Node jsdocNode, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) throws UnknownTypeException { FunctionTypeBuilder builder = new FunctionTypeBuilder(this.commonTypes); fillInFunTypeBuilder(jsdocNode, null, registry, typeParameters, builder); return this.commonTypes.fromFunctionType(builder.buildFunction()); } private void fillInFunTypeBuilder( Node jsdocNode, RawNominalType ownerType, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters, FunctionTypeBuilder builder) throws UnknownTypeException { Node child = jsdocNode.getFirstChild(); if (child.getType() == Token.THIS) { if (ownerType == null) { builder.addReceiverType( getThisOrNewType(child.getFirstChild(), registry, typeParameters)); } child = child.getNext(); } else if (child.getType() == Token.NEW) { Node newTypeNode = child.getFirstChild(); JSType t = getThisOrNewType(newTypeNode, registry, typeParameters); if (!t.isSubtypeOf(this.commonTypes.TOP_OBJECT) && (!t.hasTypeVariable() || t.hasScalar())) { warnings.add(JSError.make( newTypeNode, NEW_EXPECTS_OBJECT_OR_TYPEVAR, t.toString())); } builder.addNominalType(t); child = child.getNext(); } if (child.getType() == Token.PARAM_LIST) { for (Node arg = child.getFirstChild(); arg != null; arg = arg.getNext()) { try { switch (arg.getType()) { case EQUALS: builder.addOptFormal(getTypeFromCommentHelper( arg.getFirstChild(), registry, typeParameters)); break; case ELLIPSIS: Node restNode = arg.getFirstChild(); builder.addRestFormals(restNode == null ? this.commonTypes.UNKNOWN : getTypeFromCommentHelper(restNode, registry, typeParameters)); break; default: builder.addReqFormal( getTypeFromCommentHelper(arg, registry, typeParameters)); break; } } catch (FunctionTypeBuilder.WrongParameterOrderException e) { warnings.add(JSError.make(jsdocNode, WRONG_PARAMETER_ORDER)); builder.addPlaceholderFormal(); } } child = child.getNext(); } builder.addRetType( getTypeFromCommentHelper(child, registry, typeParameters)); } private JSType getThisOrNewType(Node n, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) { JSType t = getTypeFromComment(n, registry, typeParameters); return t.isSingletonObjWithNull() ? t.removeType(this.commonTypes.NULL) : t; } private ImmutableSet<NominalType> getImplementedInterfaces( JSDocInfo jsdoc, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) { return getInterfacesHelper(jsdoc, registry, typeParameters, true); } private ImmutableSet<NominalType> getExtendedInterfaces( JSDocInfo jsdoc, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) { return getInterfacesHelper(jsdoc, registry, typeParameters, false); } private ImmutableSet<NominalType> getInterfacesHelper( JSDocInfo jsdoc, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters, boolean implementedIntfs) { ImmutableSet.Builder<NominalType> builder = ImmutableSet.builder(); for (JSTypeExpression texp : (implementedIntfs ? jsdoc.getImplementedInterfaces() : jsdoc.getExtendedInterfaces())) { Node expRoot = texp.getRoot(); JSType interfaceType = getMaybeTypeFromComment(expRoot, registry, typeParameters); if (interfaceType != null) { NominalType nt = interfaceType.getNominalTypeIfSingletonObj(); if (nt != null && nt.isInterface()) { builder.add(nt); } else if (implementedIntfs) { warnings.add(JSError.make( expRoot, IMPLEMENTS_NON_INTERFACE, interfaceType.toString())); } else { warnings.add(JSError.make( expRoot, EXTENDS_NON_INTERFACE, interfaceType.toString())); } } } return builder.build(); } public static class FunctionAndSlotType { public JSType slotType; public DeclaredFunctionType functionType; public FunctionAndSlotType(JSType slotType, DeclaredFunctionType functionType) { this.slotType = slotType; this.functionType = functionType; } } /** * Consumes either a "classic" function jsdoc with @param, @return, etc, * or a jsdoc with @type {function ...} and finds the types of the formal * parameters and the return value. It returns a builder because the callers * of this function must separately handle @constructor, @interface, etc. * * constructorType is non-null iff this function is a constructor or * interface declaration. */ public FunctionAndSlotType getFunctionType( JSDocInfo jsdoc, String functionName, Node declNode, RawNominalType constructorType, RawNominalType ownerType, DeclaredTypeRegistry registry) { FunctionTypeBuilder builder = new FunctionTypeBuilder(this.commonTypes); if (ownerType != null) { builder.addReceiverType(ownerType.getInstanceAsJSType()); } try { if (jsdoc != null && jsdoc.getType() != null) { JSType simpleType = getDeclaredTypeOfNode(jsdoc, ownerType, registry); if (simpleType.isUnknown() || simpleType.isTop()) { return qmarkFunctionDeclared; } FunctionType funType = simpleType.getFunType(); if (funType != null) { JSType slotType = simpleType.isFunctionType() ? null : simpleType; DeclaredFunctionType declType = funType.toDeclaredFunctionType(); if (ownerType != null && funType.getThisType() == null) { declType = declType.withReceiverType(ownerType.getInstanceAsJSType()); } return new FunctionAndSlotType(slotType, declType); } else { warnings.add(JSError.make(declNode, FUNCTION_WITH_NONFUNC_JSDOC)); jsdoc = null; } } DeclaredFunctionType declType = getFunTypeFromTypicalFunctionJsdoc( jsdoc, functionName, declNode, constructorType, ownerType, registry, builder); return new FunctionAndSlotType(null, declType); } catch (FunctionTypeBuilder.WrongParameterOrderException e) { warnings.add(JSError.make(declNode, WRONG_PARAMETER_ORDER)); return qmarkFunctionDeclared; } } private static class ParamIterator { /** The parameter names from the JSDocInfo. Only set if 'params' is null. */ Iterator<String> paramNames; /** * The PARAM_LIST node containing the function parameters. Only set if * 'paramNames' is null. */ Node params; int index = -1; ParamIterator(Node params, JSDocInfo jsdoc) { Preconditions.checkArgument(params != null || jsdoc != null); if (params != null) { this.params = params; this.paramNames = null; } else { this.params = null; this.paramNames = jsdoc.getParameterNames().iterator(); } } boolean hasNext() { if (paramNames != null) { return paramNames.hasNext(); } return index + 1 < params.getChildCount(); } String nextString() { if (paramNames != null) { return paramNames.next(); } index++; return params.getChildAtIndex(index).getString(); } Node getNode() { if (paramNames != null) { return null; } return params.getChildAtIndex(index); } } private DeclaredFunctionType getFunTypeFromTypicalFunctionJsdoc( JSDocInfo jsdoc, String functionName, Node funNode, RawNominalType constructorType, RawNominalType ownerType, DeclaredTypeRegistry registry, FunctionTypeBuilder builder) { ImmutableList.Builder<String> typeParamsBuilder = ImmutableList.builder(); ImmutableList<String> typeParameters = ImmutableList.of(); Node parent = funNode.getParent(); // TODO(dimvar): need more @template warnings // - warn for multiple @template annotations // - warn for @template annotation w/out usage boolean ignoreJsdoc = false; if (jsdoc != null) { if (constructorType != null) { // We have created new names for these type variables in GTI, don't // create new ones here. typeParamsBuilder.addAll(constructorType.getTypeParameters()); } else { for (String typeParam : jsdoc.getTemplateTypeNames()) { typeParamsBuilder.add(this.nameGen.getNextName(typeParam)); } } // We don't properly support the type transformation language; we treat // its type variables as ordinary type variables. for (String typeParam : jsdoc.getTypeTransformations().keySet()) { typeParamsBuilder.add(this.nameGen.getNextName(typeParam)); } typeParameters = typeParamsBuilder.build(); if (!typeParameters.isEmpty()) { if (parent.isSetterDef() || parent.isGetterDef()) { ignoreJsdoc = true; jsdoc = null; warnings.add(JSError.make(funNode, TEMPLATED_GETTER_SETTER)); } else { builder.addTypeParameters(typeParameters); } } } if (ownerType != null) { typeParamsBuilder.addAll(ownerType.getTypeParameters()); typeParameters = typeParamsBuilder.build(); } fillInFormalParameterTypes( jsdoc, funNode, typeParameters, registry, builder, ignoreJsdoc); fillInReturnType( jsdoc, funNode, parent, typeParameters, registry, builder, ignoreJsdoc); if (jsdoc == null) { return builder.buildDeclaration(); } // Look at other annotations, eg, @constructor NominalType parentClass = getMaybeParentClass( jsdoc, functionName, funNode, typeParameters, registry); ImmutableSet<NominalType> implementedIntfs = getImplementedInterfaces( jsdoc, registry, typeParameters); if (constructorType == null && jsdoc.isConstructorOrInterface()) { // Anonymous type, don't register it. return builder.buildDeclaration(); } else if (jsdoc.isConstructor()) { handleConstructorAnnotation(functionName, funNode, constructorType, parentClass, implementedIntfs, registry, builder); } else if (jsdoc.isInterface()) { handleInterfaceAnnotation(jsdoc, functionName, funNode, constructorType, implementedIntfs, typeParameters, registry, builder); } else if (!implementedIntfs.isEmpty()) { warnings.add(JSError.make( funNode, IMPLEMENTS_WITHOUT_CONSTRUCTOR, functionName)); } if (jsdoc.hasThisType()) { Node thisRoot = jsdoc.getThisType().getRoot(); Preconditions.checkState(thisRoot.getType() == Token.BANG); builder.addReceiverType( getThisOrNewType(thisRoot.getFirstChild(), registry, typeParameters)); } return builder.buildDeclaration(); } private void fillInFormalParameterTypes( JSDocInfo jsdoc, Node funNode, ImmutableList<String> typeParameters, DeclaredTypeRegistry registry, FunctionTypeBuilder builder, boolean ignoreJsdoc /* for when the jsdoc is malformed */) { boolean ignoreFunNode = !funNode.isFunction(); Node params = ignoreFunNode ? null : funNode.getSecondChild(); ParamIterator iterator = new ParamIterator(params, jsdoc); while (iterator.hasNext()) { String pname = iterator.nextString(); Node param = iterator.getNode(); ParameterKind p = ParameterKind.REQUIRED; if (param != null && convention.isOptionalParameter(param)) { p = ParameterKind.OPTIONAL; } else if (param != null && convention.isVarArgsParameter(param)) { p = ParameterKind.REST; } ParameterType inlineParamType = (ignoreJsdoc || ignoreFunNode || param.getJSDocInfo() == null) ? null : parseParameter(param.getJSDocInfo().getType(), p, registry, typeParameters); ParameterType fnParamType = inlineParamType; JSTypeExpression jsdocExp = jsdoc == null ? null : jsdoc.getParameterType(pname); if (jsdocExp != null) { if (inlineParamType == null) { fnParamType = parseParameter(jsdocExp, p, registry, typeParameters); } else { warnings.add(JSError.make( param, TWO_JSDOCS, "formal parameter " + pname)); } } JSType t = null; if (fnParamType != null) { p = fnParamType.kind; t = fnParamType.type; } switch (p) { case REQUIRED: builder.addReqFormal(t); break; case OPTIONAL: builder.addOptFormal(t); break; case REST: builder.addRestFormals(t != null ? t : this.commonTypes.UNKNOWN); break; } } } private void fillInReturnType( JSDocInfo jsdoc, Node funNode, Node parent, ImmutableList<String> typeParameters, DeclaredTypeRegistry registry, FunctionTypeBuilder builder, boolean ignoreJsdoc /* for when the jsdoc is malformed */) { JSDocInfo inlineRetJsdoc = ignoreJsdoc || !funNode.isFunction() ? null : funNode.getFirstChild().getJSDocInfo(); JSTypeExpression retTypeExp = jsdoc == null ? null : jsdoc.getReturnType(); if (parent.isSetterDef() && retTypeExp == null) { // inline returns for getters/setters are not parsed builder.addRetType(this.commonTypes.UNDEFINED); } else if (inlineRetJsdoc != null) { builder.addRetType( getDeclaredTypeOfNode(inlineRetJsdoc, registry, typeParameters)); if (retTypeExp != null) { warnings.add(JSError.make(funNode, TWO_JSDOCS, "the return type")); } } else { builder.addRetType( getTypeFromJSTypeExpression(retTypeExp, registry, typeParameters)); } } private NominalType getMaybeParentClass( JSDocInfo jsdoc, String functionName, Node funNode, ImmutableList<String> typeParameters, DeclaredTypeRegistry registry) { if (!jsdoc.hasBaseType()) { return null; } if (!jsdoc.isConstructor()) { warnings.add(JSError.make( funNode, EXTENDS_NOT_ON_CTOR_OR_INTERF, functionName)); return null; } Node docNode = jsdoc.getBaseType().getRoot(); JSType extendedType = getMaybeTypeFromComment(docNode, registry, typeParameters); if (extendedType == null) { return null; } NominalType parentClass = extendedType.getNominalTypeIfSingletonObj(); if (parentClass != null && parentClass.isClass()) { return parentClass; } if (parentClass == null) { warnings.add(JSError.make(funNode, EXTENDS_NON_OBJECT, functionName, extendedType.toString())); } else { Preconditions.checkState(parentClass.isInterface()); warnings.add(JSError.make(funNode, CONFLICTING_EXTENDED_TYPE, "constructor", functionName)); } return null; } private void handleConstructorAnnotation( String functionName, Node funNode, RawNominalType constructorType, NominalType parentClass, ImmutableSet<NominalType> implementedIntfs, DeclaredTypeRegistry registry, FunctionTypeBuilder builder) { String className = constructorType.toString(); NominalType builtinObject = this.commonTypes.getObjectType(); if (parentClass == null && !functionName.equals("Object")) { parentClass = builtinObject; } if (parentClass != null && !constructorType.addSuperClass(parentClass)) { warnings.add(JSError.make(funNode, INHERITANCE_CYCLE, className)); } if (constructorType.isDict() && !implementedIntfs.isEmpty()) { warnings.add(JSError.make(funNode, DICT_IMPLEMENTS_INTERF, className)); } boolean noCycles = constructorType.addInterfaces(implementedIntfs); Preconditions.checkState(noCycles); builder.addNominalType(constructorType.getInstanceAsJSType()); } private void handleInterfaceAnnotation( JSDocInfo jsdoc, String functionName, Node funNode, RawNominalType constructorType, ImmutableSet<NominalType> implementedIntfs, ImmutableList<String> typeParameters, DeclaredTypeRegistry registry, FunctionTypeBuilder builder) { if (!implementedIntfs.isEmpty()) { warnings.add(JSError.make( funNode, CONFLICTING_IMPLEMENTED_TYPE, functionName)); } ImmutableSet<NominalType> extendedInterfaces = getExtendedInterfaces(jsdoc, registry, typeParameters); boolean noCycles = constructorType.addInterfaces( extendedInterfaces.isEmpty() ? ImmutableSet.of(this.commonTypes.getObjectType()) : extendedInterfaces); if (!noCycles) { warnings.add(JSError.make( funNode, INHERITANCE_CYCLE, constructorType.toString())); } builder.addNominalType(constructorType.getInstanceAsJSType()); } // /** @param {...?} var_args */ function f(var_args) { ... } // var_args shouldn't be used in the body of f public static boolean isRestArg(JSDocInfo funJsdoc, String formalParamName) { if (funJsdoc == null) { return false; } JSTypeExpression texp = funJsdoc.getParameterType(formalParamName); Node jsdocNode = texp == null ? null : texp.getRoot(); return jsdocNode != null && jsdocNode.getType() == Token.ELLIPSIS; } private ParameterType parseParameter( JSTypeExpression jsdoc, ParameterKind p, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) { if (jsdoc == null) { return null; } return parseParameter(jsdoc.getRoot(), p, registry, typeParameters); } private ParameterType parseParameter( Node jsdoc, ParameterKind p, DeclaredTypeRegistry registry, ImmutableList<String> typeParameters) { if (jsdoc == null) { return null; } switch (jsdoc.getType()) { case EQUALS: p = ParameterKind.OPTIONAL; jsdoc = jsdoc.getFirstChild(); break; case ELLIPSIS: p = ParameterKind.REST; jsdoc = jsdoc.getFirstChild(); break; default: break; } JSType t = getMaybeTypeFromComment(jsdoc, registry, typeParameters); return new ParameterType(t, p); } private static class ParameterType { private JSType type; private ParameterKind kind; ParameterType(JSType type, ParameterKind kind) { this.type = type; this.kind = kind; } } private static enum ParameterKind { REQUIRED, OPTIONAL, REST, } }
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInspection; import com.intellij.codeInsight.AnnotationUtil; import com.intellij.codeInsight.ChangeContextUtil; import com.intellij.codeInsight.FileModificationService; import com.intellij.codeInsight.daemon.GroupNames; import com.intellij.codeInsight.daemon.impl.analysis.HighlightControlFlowUtil; import com.intellij.codeInsight.intention.HighPriorityAction; import com.intellij.codeInspection.ui.SingleCheckboxOptionsPanel; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.StringUtil; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.*; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.controlFlow.AnalysisCanceledException; import com.intellij.psi.controlFlow.ControlFlow; import com.intellij.psi.controlFlow.ControlFlowUtil; import com.intellij.psi.util.*; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.hash.LinkedHashMap; import com.intellij.util.text.UniqueNameGenerator; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.util.*; /** * User: anna */ public class AnonymousCanBeLambdaInspection extends BaseJavaBatchLocalInspectionTool { public static final Logger LOG = Logger.getInstance("#" + AnonymousCanBeLambdaInspection.class.getName()); public boolean reportNotAnnotatedInterfaces = true; @Nls @NotNull @Override public String getGroupDisplayName() { return GroupNames.LANGUAGE_LEVEL_SPECIFIC_GROUP_NAME; } @Nls @NotNull @Override public String getDisplayName() { return "Anonymous type can be replaced with lambda"; } @Override public boolean isEnabledByDefault() { return true; } @NotNull @Override public String getShortName() { return "Convert2Lambda"; } @Nullable @Override public JComponent createOptionsPanel() { return new SingleCheckboxOptionsPanel("Report when interface is not annotated with @FunctionalInterface", this, "reportNotAnnotatedInterfaces"); } @NotNull @Override public PsiElementVisitor buildVisitor(@NotNull final ProblemsHolder holder, boolean isOnTheFly) { return new JavaElementVisitor() { @Override public void visitAnonymousClass(final PsiAnonymousClass aClass) { super.visitAnonymousClass(aClass); final PsiElement parent = aClass.getParent(); final PsiElement lambdaContext = parent != null ? parent.getParent() : null; if (lambdaContext != null && (LambdaUtil.isValidLambdaContext(lambdaContext) || !(lambdaContext instanceof PsiExpressionStatement)) && canBeConvertedToLambda(aClass, false, reportNotAnnotatedInterfaces, Collections.emptySet())) { final PsiElement lBrace = aClass.getLBrace(); LOG.assertTrue(lBrace != null); final TextRange rangeInElement = new TextRange(0, aClass.getStartOffsetInParent() + lBrace.getStartOffsetInParent()); holder.registerProblem(parent, "Anonymous #ref #loc can be replaced with lambda", ProblemHighlightType.LIKE_UNUSED_SYMBOL, rangeInElement, new ReplaceWithLambdaFix()); } } }; } private static boolean hasRuntimeAnnotations(PsiMethod method, @NotNull Set<String> runtimeAnnotationsToIgnore) { PsiAnnotation[] annotations = method.getModifierList().getAnnotations(); for (PsiAnnotation annotation : annotations) { PsiJavaCodeReferenceElement ref = annotation.getNameReferenceElement(); PsiElement target = ref != null ? ref.resolve() : null; if (target instanceof PsiClass) { if (runtimeAnnotationsToIgnore.contains(((PsiClass)target).getQualifiedName())) { continue; } final PsiAnnotation retentionAnno = AnnotationUtil.findAnnotation((PsiClass)target, Retention.class.getName()); if (retentionAnno != null) { PsiAnnotationMemberValue value = retentionAnno.findAttributeValue("value"); if (value instanceof PsiReferenceExpression) { final PsiElement resolved = ((PsiReferenceExpression)value).resolve(); if (resolved instanceof PsiField && RetentionPolicy.RUNTIME.name().equals(((PsiField)resolved).getName())) { final PsiClass containingClass = ((PsiField)resolved).getContainingClass(); if (containingClass != null && RetentionPolicy.class.getName().equals(containingClass.getQualifiedName())) { return true; } } } } } } return false; } public static boolean hasForbiddenRefsInsideBody(PsiMethod method, PsiAnonymousClass aClass) { final ForbiddenRefsChecker checker = new ForbiddenRefsChecker(method, aClass); final PsiCodeBlock body = method.getBody(); LOG.assertTrue(body != null); body.accept(checker); return checker.hasForbiddenRefs(); } private static PsiType getInferredType(PsiAnonymousClass aClass, PsiMethod method) { final PsiExpression expression = (PsiExpression)aClass.getParent(); final PsiType psiType = PsiTypesUtil.getExpectedTypeByParent(expression); if (psiType != null) { return psiType; } PsiExpression topExpr = expression; while (topExpr.getParent() instanceof PsiParenthesizedExpression) { topExpr = (PsiExpression)topExpr.getParent(); } final PsiCall call = LambdaUtil.treeWalkUp(topExpr); if (call != null && call.resolveMethod() != null) { final int offsetInTopCall = aClass.getTextRange().getStartOffset() - call.getTextRange().getStartOffset(); PsiCall copyCall = LambdaUtil.copyTopLevelCall(call); if (copyCall == null) return null; final PsiAnonymousClass classArg = PsiTreeUtil.getParentOfType(copyCall.findElementAt(offsetInTopCall), PsiAnonymousClass.class); if (classArg != null) { PsiExpression lambda = JavaPsiFacade.getElementFactory(aClass.getProject()) .createExpressionFromText(ReplaceWithLambdaFix.composeLambdaText(method), expression); lambda = (PsiExpression)classArg.getParent().replace(lambda); ((PsiLambdaExpression)lambda).getBody().replace(method.getBody()); final PsiType interfaceType; if (copyCall.resolveMethod() == null) { return PsiType.NULL; } else { interfaceType = ((PsiLambdaExpression)lambda).getFunctionalInterfaceType(); } return interfaceType; } } return PsiType.NULL; } public static boolean canBeConvertedToLambda(PsiAnonymousClass aClass, boolean acceptParameterizedFunctionTypes, @NotNull Set<String> ignoredRuntimeAnnotations) { return canBeConvertedToLambda(aClass, acceptParameterizedFunctionTypes, true, ignoredRuntimeAnnotations); } public static boolean canBeConvertedToLambda(PsiAnonymousClass aClass, boolean acceptParameterizedFunctionTypes, boolean reportNotAnnotatedInterfaces, @NotNull Set<String> ignoredRuntimeAnnotations) { if (PsiUtil.getLanguageLevel(aClass).isAtLeast(LanguageLevel.JDK_1_8)) { final PsiClassType baseClassType = aClass.getBaseClassType(); final PsiClassType.ClassResolveResult resolveResult = baseClassType.resolveGenerics(); final PsiClass baseClass = resolveResult.getElement(); if (baseClass == null || !reportNotAnnotatedInterfaces && !AnnotationUtil.isAnnotated(baseClass, CommonClassNames.JAVA_LANG_FUNCTIONAL_INTERFACE, false, false)) { return false; } final PsiMethod interfaceMethod = LambdaUtil.getFunctionalInterfaceMethod(resolveResult); if (interfaceMethod != null && (acceptParameterizedFunctionTypes || !interfaceMethod.hasTypeParameters())) { final PsiMethod[] methods = aClass.getMethods(); if (methods.length == 1 && aClass.getFields().length == 0 && aClass.getInnerClasses().length == 0 && aClass.getInitializers().length == 0) { final PsiMethod method = methods[0]; if (method.getBody() != null && method.getDocComment() == null && !hasRuntimeAnnotations(method, ignoredRuntimeAnnotations) && !method.hasModifierProperty(PsiModifier.SYNCHRONIZED) && !hasForbiddenRefsInsideBody(method, aClass)) { final PsiType inferredType = getInferredType(aClass, method); if (inferredType == null) { return false; } return true; } } } } return false; } public static PsiExpression replaceAnonymousWithLambda(@NotNull PsiElement anonymousClass, PsiType expectedType) { PsiNewExpression newArrayExpression = (PsiNewExpression)JavaPsiFacade.getElementFactory(anonymousClass.getProject()) .createExpressionFromText("new " + expectedType.getCanonicalText() + "[]{" + anonymousClass.getText() + "}", anonymousClass); PsiArrayInitializerExpression initializer = newArrayExpression.getArrayInitializer(); LOG.assertTrue(initializer != null); return replacePsiElementWithLambda(initializer.getInitializers()[0], true, false); } public static PsiExpression replacePsiElementWithLambda(@NotNull PsiElement element, final boolean ignoreEqualsMethod, boolean forceIgnoreTypeCast) { if (element instanceof PsiNewExpression) { if (!FileModificationService.getInstance().preparePsiElementForWrite(element)) return null; final PsiAnonymousClass anonymousClass = ((PsiNewExpression)element).getAnonymousClass(); if (anonymousClass == null) return null; ChangeContextUtil.encodeContextInfo(anonymousClass, true); final String canonicalText = anonymousClass.getBaseClassType().getCanonicalText(); final PsiMethod method; if (ignoreEqualsMethod) { final List<PsiMethod> methods = ContainerUtil.filter(anonymousClass.getMethods(), method1 -> !"equals".equals(method1.getName())); method = methods.get(0); } else { method = anonymousClass.getMethods()[0]; } if (method == null) return null; final PsiCodeBlock body = method.getBody(); if (body == null) return null; final Collection<PsiComment> comments = collectCommentsOutsideMethodBody(anonymousClass, body); final Project project = element.getProject(); final PsiElementFactory elementFactory = JavaPsiFacade.getElementFactory(project); final String withoutTypesDeclared = ReplaceWithLambdaFix.composeLambdaText(method); PsiLambdaExpression lambdaExpression = (PsiLambdaExpression)elementFactory.createExpressionFromText(withoutTypesDeclared, anonymousClass); PsiElement lambdaBody = lambdaExpression.getBody(); LOG.assertTrue(lambdaBody != null); lambdaBody.replace(body); final PsiNewExpression newExpression = (PsiNewExpression)anonymousClass.getParent(); lambdaExpression = (PsiLambdaExpression)newExpression.replace(lambdaExpression); final Set<PsiVariable> variables = new HashSet<>(); final Set<String> usedLocalNames = new HashSet<>(); collectLocalVariablesDefinedInsideLambda(lambdaExpression, variables, usedLocalNames); ReplaceWithLambdaFix .giveUniqueNames(project, elementFactory, lambdaExpression, usedLocalNames, variables.toArray(new PsiVariable[variables.size()])); final PsiExpression singleExpr = RedundantLambdaCodeBlockInspection.isCodeBlockRedundant(lambdaExpression.getBody()); if (singleExpr != null) { lambdaExpression.getBody().replace(singleExpr); } ChangeContextUtil.decodeContextInfo(lambdaExpression, null, null); restoreComments(comments, lambdaExpression); final JavaCodeStyleManager javaCodeStyleManager = JavaCodeStyleManager.getInstance(project); if (forceIgnoreTypeCast) { return (PsiExpression)javaCodeStyleManager.shortenClassReferences(lambdaExpression); } PsiTypeCastExpression typeCast = (PsiTypeCastExpression)elementFactory .createExpressionFromText("(" + canonicalText + ")" + withoutTypesDeclared, lambdaExpression); final PsiExpression typeCastOperand = typeCast.getOperand(); LOG.assertTrue(typeCastOperand instanceof PsiLambdaExpression); final PsiElement fromText = ((PsiLambdaExpression)typeCastOperand).getBody(); LOG.assertTrue(fromText != null); lambdaBody = lambdaExpression.getBody(); LOG.assertTrue(lambdaBody != null); fromText.replace(lambdaBody); ((PsiLambdaExpression)typeCastOperand).getParameterList().replace(lambdaExpression.getParameterList()); typeCast = (PsiTypeCastExpression)lambdaExpression.replace(typeCast); if (RedundantCastUtil.isCastRedundant(typeCast)) { final PsiExpression operand = typeCast.getOperand(); LOG.assertTrue(operand != null); return (PsiExpression)typeCast.replace(operand); } return (PsiExpression)javaCodeStyleManager.shortenClassReferences(typeCast); } return null; } private static Collection<PsiComment> collectCommentsOutsideMethodBody(PsiAnonymousClass anonymousClass, PsiCodeBlock body) { final Collection<PsiComment> psiComments = PsiTreeUtil.findChildrenOfType(anonymousClass, PsiComment.class); for (Iterator<PsiComment> iterator = psiComments.iterator(); iterator.hasNext(); ) { if (PsiTreeUtil.isAncestor(body, iterator.next(), false)) { iterator.remove(); } } return ContainerUtil.map(psiComments, (comment) -> (PsiComment)comment.copy()); } private static void collectLocalVariablesDefinedInsideLambda(PsiLambdaExpression lambdaExpression, final Set<PsiVariable> variables, Set<String> namesOfVariablesInTheBlock) { PsiElement block = PsiUtil.getTopLevelEnclosingCodeBlock(lambdaExpression, null); if (block == null) { block = lambdaExpression; } block.accept(new JavaRecursiveElementWalkingVisitor() { @Override public void visitVariable(PsiVariable variable) { super.visitVariable(variable); if (!(variable instanceof PsiField)) { variables.add(variable); } } }); final PsiResolveHelper helper = PsiResolveHelper.SERVICE.getInstance(lambdaExpression.getProject()); for (Iterator<PsiVariable> iterator = variables.iterator(); iterator.hasNext(); ) { PsiVariable local = iterator.next(); final String localName = local.getName(); if (localName == null || shadowingResolve(localName, lambdaExpression, helper) || !PsiTreeUtil.isAncestor(lambdaExpression, local, false)) { iterator.remove(); namesOfVariablesInTheBlock.add(localName); } } } private static boolean shadowingResolve(String localName, PsiLambdaExpression lambdaExpression, PsiResolveHelper helper) { final PsiVariable variable = helper.resolveReferencedVariable(localName, lambdaExpression); return variable == null || variable instanceof PsiField; } private static class ReplaceWithLambdaFix implements LocalQuickFix, HighPriorityAction { @NotNull @Override public String getName() { return "Replace with lambda"; } @NotNull @Override public String getFamilyName() { return getName(); } @Override public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) { final PsiElement element = descriptor.getPsiElement(); if (element != null) { replacePsiElementWithLambda(element, false, false); } } private static void giveUniqueNames(Project project, final PsiElementFactory elementFactory, PsiElement body, Set<String> usedLocalNames, PsiVariable[] parameters) { final JavaCodeStyleManager codeStyleManager = JavaCodeStyleManager.getInstance(project); final Map<PsiVariable, String> names = new HashMap<>(); for (PsiVariable parameter : parameters) { String parameterName = parameter.getName(); String uniqueVariableName = UniqueNameGenerator.generateUniqueName(codeStyleManager.suggestUniqueVariableName(parameterName, parameter.getParent(), false), usedLocalNames); if (!Comparing.equal(parameterName, uniqueVariableName)) { names.put(parameter, uniqueVariableName); } } if (names.isEmpty()) return; final LinkedHashMap<PsiElement, PsiElement> replacements = new LinkedHashMap<>(); body.accept(new JavaRecursiveElementWalkingVisitor() { @Override public void visitVariable(PsiVariable variable) { super.visitVariable(variable); final String newName = names.get(variable); if (newName != null) { replacements.put(variable.getNameIdentifier(), elementFactory.createIdentifier(newName)); } } @Override public void visitReferenceExpression(PsiReferenceExpression expression) { super.visitReferenceExpression(expression); final PsiElement resolve = expression.resolve(); if (resolve instanceof PsiVariable) { final String newName = names.get(resolve); if (newName != null) { replacements.put(expression, elementFactory.createExpressionFromText(newName, expression)); } } } }); for (PsiElement psiElement : replacements.keySet()) { psiElement.replace(replacements.get(psiElement)); } } private static String composeLambdaText(PsiMethod method) { final StringBuilder buf = new StringBuilder(); final PsiParameter[] parameters = method.getParameterList().getParameters(); if (parameters.length != 1) { buf.append("("); } buf.append(StringUtil.join(parameters, ReplaceWithLambdaFix::composeParameter, ",")); if (parameters.length != 1) { buf.append(")"); } buf.append("-> {}"); return buf.toString(); } private static String composeParameter(PsiParameter parameter) { String parameterName = parameter.getName(); if (parameterName == null) { parameterName = ""; } return parameterName; } } public static boolean functionalInterfaceMethodReferenced(PsiMethod psiMethod, PsiAnonymousClass anonymClass, PsiCallExpression callExpression) { if (psiMethod != null && !psiMethod.hasModifierProperty(PsiModifier.STATIC)) { final PsiClass containingClass = psiMethod.getContainingClass(); if (containingClass != null && CommonClassNames.JAVA_LANG_OBJECT.equals(containingClass.getQualifiedName())) { return false; } if (callExpression instanceof PsiMethodCallExpression && ((PsiMethodCallExpression)callExpression).getMethodExpression().isQualified()) { return false; } if (InheritanceUtil.isInheritorOrSelf(anonymClass, containingClass, true) && !InheritanceUtil.hasEnclosingInstanceInScope(containingClass, anonymClass.getParent(), true, true)) { return true; } } return false; } public static void restoreComments(Collection<PsiComment> comments, PsiElement lambda) { PsiElement anchor = PsiTreeUtil.getParentOfType(lambda, PsiStatement.class, PsiField.class); if (anchor == null) { anchor = lambda; } for (PsiComment comment : comments) { anchor.getParent().addBefore(comment, anchor); } } private static class ForbiddenRefsChecker extends JavaRecursiveElementWalkingVisitor { private boolean myBodyContainsForbiddenRefs; private final PsiMethod myMethod; private final PsiAnonymousClass myAnonymClass; public ForbiddenRefsChecker(PsiMethod method, PsiAnonymousClass aClass) { myMethod = method; myAnonymClass = aClass; } @Override public void visitMethodCallExpression(PsiMethodCallExpression methodCallExpression) { if (myBodyContainsForbiddenRefs) return; super.visitMethodCallExpression(methodCallExpression); final PsiMethod psiMethod = methodCallExpression.resolveMethod(); if (psiMethod == myMethod || functionalInterfaceMethodReferenced(psiMethod, myAnonymClass, methodCallExpression) || psiMethod != null && !methodCallExpression.getMethodExpression().isQualified() && "getClass".equals(psiMethod.getName()) && psiMethod.getParameterList().getParametersCount() == 0) { myBodyContainsForbiddenRefs = true; } } @Override public void visitThisExpression(PsiThisExpression expression) { if (myBodyContainsForbiddenRefs) return; if (expression.getQualifier() == null) { myBodyContainsForbiddenRefs = true; } } @Override public void visitSuperExpression(PsiSuperExpression expression) { if (myBodyContainsForbiddenRefs) return; if (expression.getQualifier() == null) { myBodyContainsForbiddenRefs = true; } } @Override public void visitVariable(PsiVariable variable) { if (myBodyContainsForbiddenRefs) return; super.visitVariable(variable); } @Override public void visitReferenceExpression(PsiReferenceExpression expression) { if (myBodyContainsForbiddenRefs) return; super.visitReferenceExpression(expression); if (!(expression.getParent() instanceof PsiMethodCallExpression)) { final PsiMember member = PsiTreeUtil.getParentOfType(myAnonymClass, PsiMember.class); if (member instanceof PsiField || member instanceof PsiClassInitializer) { final PsiElement resolved = expression.resolve(); final PsiClass memberContainingClass = member.getContainingClass(); if (resolved instanceof PsiField && memberContainingClass != null && PsiTreeUtil.isAncestor(((PsiField)resolved).getContainingClass(), memberContainingClass, false) && expression.getQualifierExpression() == null) { final PsiExpression initializer = ((PsiField)resolved).getInitializer(); if (initializer == null || resolved == member || initializer.getTextOffset() > myAnonymClass.getTextOffset() && ((PsiField)resolved).hasModifierProperty(PsiModifier.STATIC) == member.hasModifierProperty(PsiModifier.STATIC)) { myBodyContainsForbiddenRefs = true; } } } else { final PsiMethod method = PsiTreeUtil.getParentOfType(myAnonymClass, PsiMethod.class); if (method != null && method.isConstructor()) { final PsiElement resolved = expression.resolve(); if (resolved instanceof PsiField && ((PsiField)resolved).hasModifierProperty(PsiModifier.FINAL) && ((PsiField)resolved).getInitializer() == null && ((PsiField)resolved).getContainingClass() == method.getContainingClass()) { try { final PsiCodeBlock constructorBody = method.getBody(); if (constructorBody != null) { final ControlFlow flow = HighlightControlFlowUtil.getControlFlowNoConstantEvaluate(constructorBody); final int startOffset = flow.getStartOffset(myAnonymClass); final Collection<PsiVariable> writtenVariables = ControlFlowUtil.getWrittenVariables(flow, 0, startOffset, false); if (!writtenVariables.contains(resolved)) { myBodyContainsForbiddenRefs = true; } } } catch (AnalysisCanceledException e) { myBodyContainsForbiddenRefs = true; } } } } } } public boolean hasForbiddenRefs() { return myBodyContainsForbiddenRefs; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.platform.cache.query; import java.util.Iterator; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.binary.BinaryRawWriterEx; import org.apache.ignite.internal.processors.cache.query.QueryCursorEx; import org.apache.ignite.internal.processors.platform.PlatformAbstractTarget; import org.apache.ignite.internal.processors.platform.PlatformContext; import org.apache.ignite.internal.processors.platform.utils.PlatformUtils; /** * */ public abstract class PlatformAbstractQueryCursor<T> extends PlatformAbstractTarget implements AutoCloseable { /** Get multiple entries. */ private static final int OP_GET_ALL = 1; /** Get all entries. */ private static final int OP_GET_BATCH = 2; /** Get single entry. */ private static final int OP_GET_SINGLE = 3; /** Start iterating. */ private static final int OP_ITERATOR = 4; /** Close iterator. */ private static final int OP_ITERATOR_CLOSE = 5; /** Close iterator. */ private static final int OP_ITERATOR_HAS_NEXT = 6; /** Underlying cursor. */ private final QueryCursorEx<T> cursor; /** Batch size size. */ private final int batchSize; /** Underlying iterator. */ private Iterator<T> iter; /** * Constructor. * * @param platformCtx Context. * @param cursor Underlying cursor. * @param batchSize Batch size. */ PlatformAbstractQueryCursor(PlatformContext platformCtx, QueryCursorEx<T> cursor, int batchSize) { super(platformCtx); this.cursor = cursor; this.batchSize = batchSize; } /** {@inheritDoc} */ @Override public void processOutStream(int type, final BinaryRawWriterEx writer) throws IgniteCheckedException { switch (type) { case OP_GET_BATCH: { assert iter != null : "iterator() has not been called"; try { int cntPos = writer.reserveInt(); int cnt = 0; while (cnt < batchSize && iter.hasNext()) { write(writer, iter.next()); cnt++; } writer.writeInt(cntPos, cnt); writer.writeBoolean(iter.hasNext()); if (!iter.hasNext()) cursor.close(); } catch (Exception err) { throw PlatformUtils.unwrapQueryException(err); } break; } case OP_GET_SINGLE: { assert iter != null : "iterator() has not been called"; try { if (iter.hasNext()) { write(writer, iter.next()); return; } } catch (Exception err) { throw PlatformUtils.unwrapQueryException(err); } throw new IgniteCheckedException("No more data available."); } case OP_GET_ALL: { try { int pos = writer.reserveInt(); Consumer<T> consumer = new Consumer<>(this, writer); cursor.getAll(consumer); writer.writeInt(pos, consumer.cnt); } catch (Exception err) { throw PlatformUtils.unwrapQueryException(err); } break; } default: super.processOutStream(type, writer); } } /** {@inheritDoc} */ @Override public long processInLongOutLong(int type, long val) throws IgniteCheckedException { switch (type) { case OP_ITERATOR: iter = cursor.iterator(); return TRUE; case OP_ITERATOR_CLOSE: cursor.close(); return TRUE; case OP_ITERATOR_HAS_NEXT: assert iter != null : "iterator() has not been called"; return iter.hasNext() ? TRUE : FALSE; } return super.processInLongOutLong(type, val); } /** {@inheritDoc} */ @Override public void close() throws Exception { cursor.close(); } /** * Write value to the stream. Extension point to perform conversions on the object before writing it. * * @param writer Writer. * @param val Value. */ protected abstract void write(BinaryRawWriterEx writer, T val); /** * Gets the cursor. * * @return Cursor. */ public QueryCursorEx<T> cursor() { return cursor; } /** * Query cursor consumer. */ private static class Consumer<T> implements QueryCursorEx.Consumer<T> { /** Current query cursor. */ private final PlatformAbstractQueryCursor<T> cursor; /** Writer. */ private final BinaryRawWriterEx writer; /** Count. */ private int cnt; /** * Constructor. * * @param writer Writer. */ Consumer(PlatformAbstractQueryCursor<T> cursor, BinaryRawWriterEx writer) { this.cursor = cursor; this.writer = writer; } /** {@inheritDoc} */ @Override public void consume(T val) throws IgniteCheckedException { cursor.write(writer, val); cnt++; } } }
/* * Copyright 2002-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.http; import java.io.Serializable; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.springframework.util.Assert; import org.springframework.util.InvalidMimeTypeException; import org.springframework.util.MimeType; import org.springframework.util.MimeTypeUtils; import org.springframework.util.StringUtils; import org.springframework.util.comparator.CompoundComparator; /** * A sub-class of {@link MimeType} that adds support for quality parameters as defined * in the HTTP specification. * * @author Arjen Poutsma * @author Juergen Hoeller * @author Rossen Stoyanchev * @author Sebastien Deleuze * @since 3.0 * @see <a href="http://tools.ietf.org/html/rfc7231#section-3.1.1.1">HTTP 1.1: Semantics * and Content, section 3.1.1.1</a> */ public class MediaType extends MimeType implements Serializable { private static final long serialVersionUID = 2069937152339670231L; /** * Public constant media type that includes all media ranges (i.e. "&#42;/&#42;"). */ public static final MediaType ALL; /** * A String equivalent of {@link MediaType#ALL}. */ public static final String ALL_VALUE = "*/*"; /** * Public constant media type for {@code application/atom+xml}. */ public final static MediaType APPLICATION_ATOM_XML; /** * A String equivalent of {@link MediaType#APPLICATION_ATOM_XML}. */ public final static String APPLICATION_ATOM_XML_VALUE = "application/atom+xml"; /** * Public constant media type for {@code application/x-www-form-urlencoded}. */ public final static MediaType APPLICATION_FORM_URLENCODED; /** * A String equivalent of {@link MediaType#APPLICATION_FORM_URLENCODED}. */ public final static String APPLICATION_FORM_URLENCODED_VALUE = "application/x-www-form-urlencoded"; /** * Public constant media type for {@code application/json}. * @see #APPLICATION_JSON_UTF8 */ public final static MediaType APPLICATION_JSON; /** * A String equivalent of {@link MediaType#APPLICATION_JSON}. * @see #APPLICATION_JSON_UTF8_VALUE */ public final static String APPLICATION_JSON_VALUE = "application/json"; /** * Public constant media type for {@code application/json;charset=UTF-8}. */ public final static MediaType APPLICATION_JSON_UTF8; /** * A String equivalent of {@link MediaType#APPLICATION_JSON_UTF8}. */ public final static String APPLICATION_JSON_UTF8_VALUE = APPLICATION_JSON_VALUE + ";charset=UTF-8"; /** * Public constant media type for {@code application/octet-stream}. */ public final static MediaType APPLICATION_OCTET_STREAM; /** * A String equivalent of {@link MediaType#APPLICATION_OCTET_STREAM}. */ public final static String APPLICATION_OCTET_STREAM_VALUE = "application/octet-stream"; /** * Public constant media type for {@code application/pdf}. */ public final static MediaType APPLICATION_PDF; /** * A String equivalent of {@link MediaType#APPLICATION_PDF}. */ public final static String APPLICATION_PDF_VALUE = "application/pdf"; /** * Public constant media type for {@code application/xhtml+xml}. */ public final static MediaType APPLICATION_XHTML_XML; /** * A String equivalent of {@link MediaType#APPLICATION_XHTML_XML}. */ public final static String APPLICATION_XHTML_XML_VALUE = "application/xhtml+xml"; /** * Public constant media type for {@code application/xml}. */ public final static MediaType APPLICATION_XML; /** * A String equivalent of {@link MediaType#APPLICATION_XML}. */ public final static String APPLICATION_XML_VALUE = "application/xml"; /** * Public constant media type for {@code image/gif}. */ public final static MediaType IMAGE_GIF; /** * A String equivalent of {@link MediaType#IMAGE_GIF}. */ public final static String IMAGE_GIF_VALUE = "image/gif"; /** * Public constant media type for {@code image/jpeg}. */ public final static MediaType IMAGE_JPEG; /** * A String equivalent of {@link MediaType#IMAGE_JPEG}. */ public final static String IMAGE_JPEG_VALUE = "image/jpeg"; /** * Public constant media type for {@code image/png}. */ public final static MediaType IMAGE_PNG; /** * A String equivalent of {@link MediaType#IMAGE_PNG}. */ public final static String IMAGE_PNG_VALUE = "image/png"; /** * Public constant media type for {@code multipart/form-data}. */ public final static MediaType MULTIPART_FORM_DATA; /** * A String equivalent of {@link MediaType#MULTIPART_FORM_DATA}. */ public final static String MULTIPART_FORM_DATA_VALUE = "multipart/form-data"; /** * Public constant media type for {@code text/html}. */ public final static MediaType TEXT_HTML; /** * A String equivalent of {@link MediaType#TEXT_HTML}. */ public final static String TEXT_HTML_VALUE = "text/html"; /** * Public constant media type for {@code text/markdown}. */ public final static MediaType TEXT_MARKDOWN; /** * A String equivalent of {@link MediaType#TEXT_MARKDOWN}. */ public final static String TEXT_MARKDOWN_VALUE = "text/markdown"; /** * Public constant media type for {@code text/plain}. */ public final static MediaType TEXT_PLAIN; /** * A String equivalent of {@link MediaType#TEXT_PLAIN}. */ public final static String TEXT_PLAIN_VALUE = "text/plain"; /** * Public constant media type for {@code text/xml}. */ public final static MediaType TEXT_XML; /** * A String equivalent of {@link MediaType#TEXT_XML}. */ public final static String TEXT_XML_VALUE = "text/xml"; private static final String PARAM_QUALITY_FACTOR = "q"; static { ALL = valueOf(ALL_VALUE); APPLICATION_ATOM_XML = valueOf(APPLICATION_ATOM_XML_VALUE); APPLICATION_FORM_URLENCODED = valueOf(APPLICATION_FORM_URLENCODED_VALUE); APPLICATION_JSON = valueOf(APPLICATION_JSON_VALUE); APPLICATION_JSON_UTF8 = valueOf(APPLICATION_JSON_UTF8_VALUE); APPLICATION_OCTET_STREAM = valueOf(APPLICATION_OCTET_STREAM_VALUE); APPLICATION_PDF = valueOf(APPLICATION_PDF_VALUE); APPLICATION_XHTML_XML = valueOf(APPLICATION_XHTML_XML_VALUE); APPLICATION_XML = valueOf(APPLICATION_XML_VALUE); IMAGE_GIF = valueOf(IMAGE_GIF_VALUE); IMAGE_JPEG = valueOf(IMAGE_JPEG_VALUE); IMAGE_PNG = valueOf(IMAGE_PNG_VALUE); MULTIPART_FORM_DATA = valueOf(MULTIPART_FORM_DATA_VALUE); TEXT_HTML = valueOf(TEXT_HTML_VALUE); TEXT_MARKDOWN = valueOf(TEXT_MARKDOWN_VALUE); TEXT_PLAIN = valueOf(TEXT_PLAIN_VALUE); TEXT_XML = valueOf(TEXT_XML_VALUE); } /** * Create a new {@code MediaType} for the given primary type. * <p>The {@linkplain #getSubtype() subtype} is set to "&#42;", parameters empty. * @param type the primary type * @throws IllegalArgumentException if any of the parameters contain illegal characters */ public MediaType(String type) { super(type); } /** * Create a new {@code MediaType} for the given primary type and subtype. * <p>The parameters are empty. * @param type the primary type * @param subtype the subtype * @throws IllegalArgumentException if any of the parameters contain illegal characters */ public MediaType(String type, String subtype) { super(type, subtype, Collections.<String, String>emptyMap()); } /** * Create a new {@code MediaType} for the given type, subtype, and character set. * @param type the primary type * @param subtype the subtype * @param charset the character set * @throws IllegalArgumentException if any of the parameters contain illegal characters */ public MediaType(String type, String subtype, Charset charset) { super(type, subtype, charset); } /** * Create a new {@code MediaType} for the given type, subtype, and quality value. * @param type the primary type * @param subtype the subtype * @param qualityValue the quality value * @throws IllegalArgumentException if any of the parameters contain illegal characters */ public MediaType(String type, String subtype, double qualityValue) { this(type, subtype, Collections.singletonMap(PARAM_QUALITY_FACTOR, Double.toString(qualityValue))); } /** * Copy-constructor that copies the type, subtype and parameters of the given * {@code MediaType}, and allows to set the specified character set. * @param other the other media type * @param charset the character set * @throws IllegalArgumentException if any of the parameters contain illegal characters */ public MediaType(MediaType other, Charset charset) { super(other, charset); } /** * Copy-constructor that copies the type and subtype of the given {@code MediaType}, * and allows for different parameter. * @param other the other media type * @param parameters the parameters, may be {@code null} * @throws IllegalArgumentException if any of the parameters contain illegal characters */ public MediaType(MediaType other, Map<String, String> parameters) { super(other.getType(), other.getSubtype(), parameters); } /** * Create a new {@code MediaType} for the given type, subtype, and parameters. * @param type the primary type * @param subtype the subtype * @param parameters the parameters, may be {@code null} * @throws IllegalArgumentException if any of the parameters contain illegal characters */ public MediaType(String type, String subtype, Map<String, String> parameters) { super(type, subtype, parameters); } @Override protected void checkParameters(String attribute, String value) { super.checkParameters(attribute, value); if (PARAM_QUALITY_FACTOR.equals(attribute)) { value = unquote(value); double d = Double.parseDouble(value); Assert.isTrue(d >= 0D && d <= 1D, "Invalid quality value \"" + value + "\": should be between 0.0 and 1.0"); } } /** * Return the quality value, as indicated by a {@code q} parameter, if any. * Defaults to {@code 1.0}. * @return the quality factory */ public double getQualityValue() { String qualityFactory = getParameter(PARAM_QUALITY_FACTOR); return (qualityFactory != null ? Double.parseDouble(unquote(qualityFactory)) : 1D); } /** * Indicate whether this {@code MediaType} includes the given media type. * <p>For instance, {@code text/*} includes {@code text/plain} and {@code text/html}, and {@code application/*+xml} * includes {@code application/soap+xml}, etc. This method is <b>not</b> symmetric. * @param other the reference media type with which to compare * @return {@code true} if this media type includes the given media type; {@code false} otherwise */ public boolean includes(MediaType other) { return super.includes(other); } /** * Indicate whether this {@code MediaType} is compatible with the given media type. * <p>For instance, {@code text/*} is compatible with {@code text/plain}, {@code text/html}, and vice versa. * In effect, this method is similar to {@link #includes(MediaType)}, except that it <b>is</b> symmetric. * @param other the reference media type with which to compare * @return {@code true} if this media type is compatible with the given media type; {@code false} otherwise */ public boolean isCompatibleWith(MediaType other) { return super.isCompatibleWith(other); } /** * Return a replica of this instance with the quality value of the given MediaType. * @return the same instance if the given MediaType doesn't have a quality value, or a new one otherwise */ public MediaType copyQualityValue(MediaType mediaType) { if (!mediaType.getParameters().containsKey(PARAM_QUALITY_FACTOR)) { return this; } Map<String, String> params = new LinkedHashMap<String, String>(getParameters()); params.put(PARAM_QUALITY_FACTOR, mediaType.getParameters().get(PARAM_QUALITY_FACTOR)); return new MediaType(this, params); } /** * Return a replica of this instance with its quality value removed. * @return the same instance if the media type doesn't contain a quality value, or a new one otherwise */ public MediaType removeQualityValue() { if (!getParameters().containsKey(PARAM_QUALITY_FACTOR)) { return this; } Map<String, String> params = new LinkedHashMap<String, String>(getParameters()); params.remove(PARAM_QUALITY_FACTOR); return new MediaType(this, params); } /** * Parse the given String value into a {@code MediaType} object, * with this method name following the 'valueOf' naming convention * (as supported by {@link org.springframework.core.convert.ConversionService}. * @see #parseMediaType(String) */ public static MediaType valueOf(String value) { return parseMediaType(value); } /** * Parse the given String into a single {@code MediaType}. * @param mediaType the string to parse * @return the media type * @throws InvalidMediaTypeException if the string cannot be parsed */ public static MediaType parseMediaType(String mediaType) { MimeType type; try { type = MimeTypeUtils.parseMimeType(mediaType); } catch (InvalidMimeTypeException ex) { throw new InvalidMediaTypeException(ex); } try { return new MediaType(type.getType(), type.getSubtype(), type.getParameters()); } catch (IllegalArgumentException ex) { throw new InvalidMediaTypeException(mediaType, ex.getMessage()); } } /** * Parse the given, comma-separated string into a list of {@code MediaType} objects. * <p>This method can be used to parse an Accept or Content-Type header. * @param mediaTypes the string to parse * @return the list of media types * @throws IllegalArgumentException if the string cannot be parsed */ public static List<MediaType> parseMediaTypes(String mediaTypes) { if (!StringUtils.hasLength(mediaTypes)) { return Collections.emptyList(); } String[] tokens = mediaTypes.split(",\\s*"); List<MediaType> result = new ArrayList<MediaType>(tokens.length); for (String token : tokens) { result.add(parseMediaType(token)); } return result; } /** * Return a string representation of the given list of {@code MediaType} objects. * <p>This method can be used to for an {@code Accept} or {@code Content-Type} header. * @param mediaTypes the media types to create a string representation for * @return the string representation */ public static String toString(Collection<MediaType> mediaTypes) { return MimeTypeUtils.toString(mediaTypes); } /** * Sorts the given list of {@code MediaType} objects by specificity. * <p>Given two media types: * <ol> * <li>if either media type has a {@linkplain #isWildcardType() wildcard type}, then the media type without the * wildcard is ordered before the other.</li> * <li>if the two media types have different {@linkplain #getType() types}, then they are considered equal and * remain their current order.</li> * <li>if either media type has a {@linkplain #isWildcardSubtype() wildcard subtype}, then the media type without * the wildcard is sorted before the other.</li> * <li>if the two media types have different {@linkplain #getSubtype() subtypes}, then they are considered equal * and remain their current order.</li> * <li>if the two media types have different {@linkplain #getQualityValue() quality value}, then the media type * with the highest quality value is ordered before the other.</li> * <li>if the two media types have a different amount of {@linkplain #getParameter(String) parameters}, then the * media type with the most parameters is ordered before the other.</li> * </ol> * <p>For example: * <blockquote>audio/basic &lt; audio/* &lt; *&#047;*</blockquote> * <blockquote>audio/* &lt; audio/*;q=0.7; audio/*;q=0.3</blockquote> * <blockquote>audio/basic;level=1 &lt; audio/basic</blockquote> * <blockquote>audio/basic == text/html</blockquote> * <blockquote>audio/basic == audio/wave</blockquote> * @param mediaTypes the list of media types to be sorted * @see <a href="http://tools.ietf.org/html/rfc7231#section-5.3.2">HTTP 1.1: Semantics * and Content, section 5.3.2</a> */ public static void sortBySpecificity(List<MediaType> mediaTypes) { Assert.notNull(mediaTypes, "'mediaTypes' must not be null"); if (mediaTypes.size() > 1) { Collections.sort(mediaTypes, SPECIFICITY_COMPARATOR); } } /** * Sorts the given list of {@code MediaType} objects by quality value. * <p>Given two media types: * <ol> * <li>if the two media types have different {@linkplain #getQualityValue() quality value}, then the media type * with the highest quality value is ordered before the other.</li> * <li>if either media type has a {@linkplain #isWildcardType() wildcard type}, then the media type without the * wildcard is ordered before the other.</li> * <li>if the two media types have different {@linkplain #getType() types}, then they are considered equal and * remain their current order.</li> * <li>if either media type has a {@linkplain #isWildcardSubtype() wildcard subtype}, then the media type without * the wildcard is sorted before the other.</li> * <li>if the two media types have different {@linkplain #getSubtype() subtypes}, then they are considered equal * and remain their current order.</li> * <li>if the two media types have a different amount of {@linkplain #getParameter(String) parameters}, then the * media type with the most parameters is ordered before the other.</li> * </ol> * @param mediaTypes the list of media types to be sorted * @see #getQualityValue() */ public static void sortByQualityValue(List<MediaType> mediaTypes) { Assert.notNull(mediaTypes, "'mediaTypes' must not be null"); if (mediaTypes.size() > 1) { Collections.sort(mediaTypes, QUALITY_VALUE_COMPARATOR); } } /** * Sorts the given list of {@code MediaType} objects by specificity as the * primary criteria and quality value the secondary. * @see MediaType#sortBySpecificity(List) * @see MediaType#sortByQualityValue(List) */ public static void sortBySpecificityAndQuality(List<MediaType> mediaTypes) { Assert.notNull(mediaTypes, "'mediaTypes' must not be null"); if (mediaTypes.size() > 1) { Collections.sort(mediaTypes, new CompoundComparator<MediaType>( MediaType.SPECIFICITY_COMPARATOR, MediaType.QUALITY_VALUE_COMPARATOR)); } } /** * Comparator used by {@link #sortByQualityValue(List)}. */ public static final Comparator<MediaType> QUALITY_VALUE_COMPARATOR = new Comparator<MediaType>() { @Override public int compare(MediaType mediaType1, MediaType mediaType2) { double quality1 = mediaType1.getQualityValue(); double quality2 = mediaType2.getQualityValue(); int qualityComparison = Double.compare(quality2, quality1); if (qualityComparison != 0) { return qualityComparison; // audio/*;q=0.7 < audio/*;q=0.3 } else if (mediaType1.isWildcardType() && !mediaType2.isWildcardType()) { // */* < audio/* return 1; } else if (mediaType2.isWildcardType() && !mediaType1.isWildcardType()) { // audio/* > */* return -1; } else if (!mediaType1.getType().equals(mediaType2.getType())) { // audio/basic == text/html return 0; } else { // mediaType1.getType().equals(mediaType2.getType()) if (mediaType1.isWildcardSubtype() && !mediaType2.isWildcardSubtype()) { // audio/* < audio/basic return 1; } else if (mediaType2.isWildcardSubtype() && !mediaType1.isWildcardSubtype()) { // audio/basic > audio/* return -1; } else if (!mediaType1.getSubtype().equals(mediaType2.getSubtype())) { // audio/basic == audio/wave return 0; } else { int paramsSize1 = mediaType1.getParameters().size(); int paramsSize2 = mediaType2.getParameters().size(); return (paramsSize2 < paramsSize1 ? -1 : (paramsSize2 == paramsSize1 ? 0 : 1)); // audio/basic;level=1 < audio/basic } } } }; /** * Comparator used by {@link #sortBySpecificity(List)}. */ public static final Comparator<MediaType> SPECIFICITY_COMPARATOR = new SpecificityComparator<MediaType>() { @Override protected int compareParameters(MediaType mediaType1, MediaType mediaType2) { double quality1 = mediaType1.getQualityValue(); double quality2 = mediaType2.getQualityValue(); int qualityComparison = Double.compare(quality2, quality1); if (qualityComparison != 0) { return qualityComparison; // audio/*;q=0.7 < audio/*;q=0.3 } return super.compareParameters(mediaType1, mediaType2); } }; }
/* * Copyright 2015-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.store.service; import java.util.Collection; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiFunction; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; import org.onosproject.store.primitives.ConsistentMapBackedJavaMap; import com.google.common.base.Objects; /** * Test implementation of the consistent map. */ public final class TestConsistentMap<K, V> extends ConsistentMapAdapter<K, V> { private final List<MapEventListener<K, V>> listeners; private final Map<K, Versioned<V>> map; private final String mapName; private final AtomicLong counter = new AtomicLong(0); private final Serializer serializer; private TestConsistentMap(String mapName, Serializer serializer) { map = new HashMap<>(); listeners = new LinkedList<>(); this.mapName = mapName; this.serializer = serializer; } private Versioned<V> version(V v) { return new Versioned<>(v, counter.incrementAndGet(), System.currentTimeMillis()); } /** * Notify all listeners of an event. */ private void notifyListeners(String mapName, K key, Versioned<V> newvalue, Versioned<V> oldValue) { MapEvent<K, V> event = new MapEvent<>(mapName, key, newvalue, oldValue); listeners.forEach( listener -> listener.event(event) ); } @Override public int size() { return map.size(); } @Override public boolean isEmpty() { return map.isEmpty(); } @Override public boolean containsKey(K key) { return map.containsKey(key); } @Override public boolean containsValue(V value) { return map.containsValue(value); } @Override public Versioned<V> get(K key) { return map.get(key); } @Override public Versioned<V> computeIfAbsent(K key, Function<? super K, ? extends V> mappingFunction) { AtomicBoolean updated = new AtomicBoolean(false); Versioned<V> result = map.compute(key, (k, v) -> { if (v == null) { updated.set(true); return version(mappingFunction.apply(key)); } return v; }); if (updated.get()) { notifyListeners(mapName, key, result, null); } return result; } @Override public Versioned<V> compute(K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) { AtomicBoolean updated = new AtomicBoolean(false); AtomicReference<Versioned<V>> previousValue = new AtomicReference<>(); Versioned<V> result = map.compute(key, (k, v) -> { updated.set(true); previousValue.set(serializer.decode(serializer.encode(v))); return version(remappingFunction.apply(k, Versioned.valueOrNull(v))); }); if (updated.get()) { notifyListeners(mapName, key, result, previousValue.get()); } return result; } @Override public Versioned<V> computeIfPresent(K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) { AtomicBoolean updated = new AtomicBoolean(false); AtomicReference<Versioned<V>> previousValue = new AtomicReference<>(); Versioned<V> result = map.compute(key, (k, v) -> { if (v != null) { updated.set(true); previousValue.set(serializer.decode(serializer.encode(v))); return version(remappingFunction.apply(k, v.value())); } return v; }); if (updated.get()) { notifyListeners(mapName, key, result, previousValue.get()); } return result; } @Override public Versioned<V> computeIf(K key, Predicate<? super V> condition, BiFunction<? super K, ? super V, ? extends V> remappingFunction) { AtomicBoolean updated = new AtomicBoolean(false); AtomicReference<Versioned<V>> previousValue = new AtomicReference<>(); Versioned<V> result = map.compute(key, (k, v) -> { if (condition.test(Versioned.valueOrNull(v))) { previousValue.set(serializer.decode(serializer.encode(v))); updated.set(true); return version(remappingFunction.apply(k, Versioned.valueOrNull(v))); } return v; }); if (updated.get()) { notifyListeners(mapName, key, result, previousValue.get()); } return result; } @Override public Versioned<V> put(K key, V value) { Versioned<V> newValue = version(value); Versioned<V> previousValue = map.put(key, newValue); notifyListeners(mapName, key, newValue, previousValue); return previousValue; } @Override public Versioned<V> putAndGet(K key, V value) { Versioned<V> newValue = version(value); Versioned<V> previousValue = map.put(key, newValue); notifyListeners(mapName, key, newValue, previousValue); return newValue; } @Override public Versioned<V> remove(K key) { Versioned<V> result = map.remove(key); notifyListeners(mapName, key, null, result); return result; } @Override public void clear() { map.keySet().forEach(this::remove); } @Override public Set<K> keySet() { return map.keySet(); } @Override public Collection<Versioned<V>> values() { return map.values() .stream() .collect(Collectors.toList()); } @Override public Set<Map.Entry<K, Versioned<V>>> entrySet() { return map.entrySet(); } @Override public Versioned<V> putIfAbsent(K key, V value) { Versioned<V> newValue = version(value); Versioned<V> result = map.putIfAbsent(key, newValue); if (result == null) { notifyListeners(mapName, key, newValue, result); } return result; } @Override public boolean remove(K key, V value) { Versioned<V> existingValue = map.get(key); if (Objects.equal(Versioned.valueOrNull(existingValue), value)) { map.remove(key); notifyListeners(mapName, key, null, existingValue); return true; } return false; } @Override public boolean remove(K key, long version) { Versioned<V> existingValue = map.get(key); if (existingValue == null) { return false; } if (existingValue.version() == version) { map.remove(key); notifyListeners(mapName, key, null, existingValue); return true; } return false; } @Override public Versioned<V> replace(K key, V value) { Versioned<V> existingValue = map.get(key); if (existingValue == null) { return null; } Versioned<V> newValue = version(value); Versioned<V> result = map.put(key, newValue); notifyListeners(mapName, key, newValue, result); return result; } @Override public boolean replace(K key, V oldValue, V newValue) { Versioned<V> existingValue = map.get(key); if (existingValue == null || !existingValue.value().equals(oldValue)) { return false; } Versioned<V> value = version(newValue); Versioned<V> result = map.put(key, value); notifyListeners(mapName, key, value, result); return true; } @Override public boolean replace(K key, long oldVersion, V newValue) { Versioned<V> existingValue = map.get(key); if (existingValue == null || existingValue.version() != oldVersion) { return false; } Versioned<V> value = version(newValue); Versioned<V> result = map.put(key, value); notifyListeners(mapName, key, value, result); return true; } @Override public void addListener(MapEventListener<K, V> listener) { listeners.add(listener); } @Override public void removeListener(MapEventListener<K, V> listener) { listeners.remove(listener); } @Override public Map<K, V> asJavaMap() { return new ConsistentMapBackedJavaMap<>(this); } public static Builder builder() { return new Builder(); } public static class Builder<K, V> extends ConsistentMapBuilder<K, V> { @Override public ConsistentMap<K, V> build() { return new TestConsistentMap<>(name(), serializer()); } @Override public AsyncConsistentMap<K, V> buildAsyncMap() { return null; } } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. /** * AttachInternetGatewayResponseType.java * * This file was auto-generated from WSDL * by the Apache Axis2 version: 1.5.6 Built on : Aug 30, 2011 (10:01:01 CEST) */ package com.amazon.ec2; /** * AttachInternetGatewayResponseType bean class */ public class AttachInternetGatewayResponseType implements org.apache.axis2.databinding.ADBBean{ /* This type was generated from the piece of schema that had name = AttachInternetGatewayResponseType Namespace URI = http://ec2.amazonaws.com/doc/2012-08-15/ Namespace Prefix = ns1 */ private static java.lang.String generatePrefix(java.lang.String namespace) { if(namespace.equals("http://ec2.amazonaws.com/doc/2012-08-15/")){ return "ns1"; } return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } /** * field for RequestId */ protected java.lang.String localRequestId ; /** * Auto generated getter method * @return java.lang.String */ public java.lang.String getRequestId(){ return localRequestId; } /** * Auto generated setter method * @param param RequestId */ public void setRequestId(java.lang.String param){ this.localRequestId=param; } /** * field for _return */ protected boolean local_return ; /** * Auto generated getter method * @return boolean */ public boolean get_return(){ return local_return; } /** * Auto generated setter method * @param param _return */ public void set_return(boolean param){ this.local_return=param; } /** * isReaderMTOMAware * @return true if the reader supports MTOM */ public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) { boolean isReaderMTOMAware = false; try{ isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE)); }catch(java.lang.IllegalArgumentException e){ isReaderMTOMAware = false; } return isReaderMTOMAware; } /** * * @param parentQName * @param factory * @return org.apache.axiom.om.OMElement */ public org.apache.axiom.om.OMElement getOMElement ( final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{ org.apache.axiom.om.OMDataSource dataSource = new org.apache.axis2.databinding.ADBDataSource(this,parentQName){ public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { AttachInternetGatewayResponseType.this.serialize(parentQName,factory,xmlWriter); } }; return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl( parentQName,factory,dataSource); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ serialize(parentQName,factory,xmlWriter,false); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter, boolean serializeType) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ java.lang.String prefix = null; java.lang.String namespace = null; prefix = parentQName.getPrefix(); namespace = parentQName.getNamespaceURI(); if ((namespace != null) && (namespace.trim().length() > 0)) { java.lang.String writerPrefix = xmlWriter.getPrefix(namespace); if (writerPrefix != null) { xmlWriter.writeStartElement(namespace, parentQName.getLocalPart()); } else { if (prefix == null) { prefix = generatePrefix(namespace); } xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } } else { xmlWriter.writeStartElement(parentQName.getLocalPart()); } if (serializeType){ java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2012-08-15/"); if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){ writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", namespacePrefix+":AttachInternetGatewayResponseType", xmlWriter); } else { writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", "AttachInternetGatewayResponseType", xmlWriter); } } namespace = "http://ec2.amazonaws.com/doc/2012-08-15/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"requestId", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"requestId"); } } else { xmlWriter.writeStartElement("requestId"); } if (localRequestId==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("requestId cannot be null!!"); }else{ xmlWriter.writeCharacters(localRequestId); } xmlWriter.writeEndElement(); namespace = "http://ec2.amazonaws.com/doc/2012-08-15/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"return", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"return"); } } else { xmlWriter.writeStartElement("return"); } if (false) { throw new org.apache.axis2.databinding.ADBException("return cannot be null!!"); } else { xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(local_return)); } xmlWriter.writeEndElement(); xmlWriter.writeEndElement(); } /** * Util method to write an attribute with the ns prefix */ private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (xmlWriter.getPrefix(namespace) == null) { xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } xmlWriter.writeAttribute(namespace,attName,attValue); } /** * Util method to write an attribute without the ns prefix */ private void writeAttribute(java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (namespace.equals("")) { xmlWriter.writeAttribute(attName,attValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace,attName,attValue); } } /** * Util method to write an attribute without the ns prefix */ private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName, javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } } /** * method to handle Qnames */ private void writeQName(javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String namespaceURI = qname.getNamespaceURI(); if (namespaceURI != null) { java.lang.String prefix = xmlWriter.getPrefix(namespaceURI); if (prefix == null) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } else { // i.e this is the default namespace xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } else { xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } private void writeQNames(javax.xml.namespace.QName[] qnames, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (qnames != null) { // we have to store this data until last moment since it is not possible to write any // namespace data after writing the charactor data java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer(); java.lang.String namespaceURI = null; java.lang.String prefix = null; for (int i = 0; i < qnames.length; i++) { if (i > 0) { stringToWrite.append(" "); } namespaceURI = qnames[i].getNamespaceURI(); if (namespaceURI != null) { prefix = xmlWriter.getPrefix(namespaceURI); if ((prefix == null) || (prefix.length() == 0)) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } xmlWriter.writeCharacters(stringToWrite.toString()); } } /** * Register a namespace prefix */ private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException { java.lang.String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) { prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } return prefix; } /** * databinding method to get an XML representation of this object * */ public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName) throws org.apache.axis2.databinding.ADBException{ java.util.ArrayList elementList = new java.util.ArrayList(); java.util.ArrayList attribList = new java.util.ArrayList(); elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/", "requestId")); if (localRequestId != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localRequestId)); } else { throw new org.apache.axis2.databinding.ADBException("requestId cannot be null!!"); } elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/", "return")); elementList.add( org.apache.axis2.databinding.utils.ConverterUtil.convertToString(local_return)); return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray()); } /** * Factory class that keeps the parse method */ public static class Factory{ /** * static method to create the object * Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable * If this object is not an element, it is a complex type and the reader is at the event just after the outer start element * Postcondition: If this object is an element, the reader is positioned at its end element * If this object is a complex type, the reader is positioned at the end element of its outer element */ public static AttachInternetGatewayResponseType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{ AttachInternetGatewayResponseType object = new AttachInternetGatewayResponseType(); int event; java.lang.String nillableValue = null; java.lang.String prefix =""; java.lang.String namespaceuri =""; try { while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){ java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "type"); if (fullTypeName!=null){ java.lang.String nsPrefix = null; if (fullTypeName.indexOf(":") > -1){ nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":")); } nsPrefix = nsPrefix==null?"":nsPrefix; java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1); if (!"AttachInternetGatewayResponseType".equals(type)){ //find namespace for the prefix java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix); return (AttachInternetGatewayResponseType)com.amazon.ec2.ExtensionMapper.getTypeObject( nsUri,type,reader); } } } // Note all attributes that were handled. Used to differ normal attributes // from anyAttributes. java.util.Vector handledAttributes = new java.util.Vector(); reader.next(); while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","requestId").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.setRequestId( org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","return").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.set_return( org.apache.axis2.databinding.utils.ConverterUtil.convertToBoolean(content)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement()) // A start element we are not expecting indicates a trailing invalid property throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } catch (javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }//end of factory class }
// Copyright 2014 Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.buildtool; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSortedSet; import com.google.devtools.build.lib.Constants; import com.google.devtools.build.lib.analysis.BuildView; import com.google.devtools.build.lib.analysis.OutputGroupProvider; import com.google.devtools.build.lib.analysis.TopLevelArtifactContext; import com.google.devtools.build.lib.analysis.config.InvalidConfigurationException; import com.google.devtools.build.lib.exec.ExecutionOptions; import com.google.devtools.build.lib.pkgcache.LoadingPhaseRunner; import com.google.devtools.build.lib.pkgcache.PackageCacheOptions; import com.google.devtools.build.lib.runtime.BlazeCommandEventHandler; import com.google.devtools.build.lib.util.OptionsUtils; import com.google.devtools.build.lib.util.io.OutErr; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.common.options.Converter; import com.google.devtools.common.options.Converters; import com.google.devtools.common.options.Converters.RangeConverter; import com.google.devtools.common.options.Option; import com.google.devtools.common.options.OptionsBase; import com.google.devtools.common.options.OptionsClassProvider; import com.google.devtools.common.options.OptionsParsingException; import com.google.devtools.common.options.OptionsProvider; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.UUID; import java.util.concurrent.ExecutionException; /** * A BuildRequest represents a single invocation of the build tool by a user. * A request specifies a list of targets to be built for a single * configuration, a pair of output/error streams, and additional options such * as --keep_going, --jobs, etc. */ public class BuildRequest implements OptionsClassProvider { private static final String DEFAULT_SYMLINK_PREFIX_MARKER = "...---:::@@@DEFAULT@@@:::--..."; /** * A converter for symlink prefixes that defaults to {@code Constants.PRODUCT_NAME} and a * minus sign if the option is not given. * * <p>Required because you cannot specify a non-constant value in annotation attributes. */ public static class SymlinkPrefixConverter implements Converter<String> { @Override public String convert(String input) throws OptionsParsingException { return input.equals(DEFAULT_SYMLINK_PREFIX_MARKER) ? Constants.PRODUCT_NAME + "-" : input; } @Override public String getTypeDescription() { return "a string"; } } /** * Options interface--can be used to parse command-line arguments. * * See also ExecutionOptions; from the user's point of view, there's no * qualitative difference between these two sets of options. */ public static class BuildRequestOptions extends OptionsBase { /* "Execution": options related to the execution of a build: */ @Option(name = "jobs", abbrev = 'j', defaultValue = "200", category = "strategy", help = "The number of concurrent jobs to run. " + "0 means build sequentially. Values above " + MAX_JOBS + " are not allowed.") public int jobs; @Option(name = "progress_report_interval", defaultValue = "0", category = "verbosity", converter = ProgressReportIntervalConverter.class, help = "The number of seconds to wait between two reports on" + " still running jobs. The default value 0 means to use" + " the default 10:30:60 incremental algorithm.") public int progressReportInterval; @Option(name = "explain", defaultValue = "null", category = "verbosity", converter = OptionsUtils.PathFragmentConverter.class, help = "Causes Blaze to explain each executed step of the build. " + "The explanation is written to the specified log file.") public PathFragment explanationPath; @Option(name = "verbose_explanations", defaultValue = "false", category = "verbosity", help = "Increases the verbosity of the explanations issued if --explain is enabled. " + "Has no effect if --explain is not enabled.") public boolean verboseExplanations; @Deprecated @Option(name = "dump_makefile", defaultValue = "false", category = "undocumented", help = "this flag has no effect.") public boolean dumpMakefile; @Deprecated @Option(name = "dump_action_graph", defaultValue = "false", category = "undocumented", help = "this flag has no effect.") public boolean dumpActionGraph; @Deprecated @Option(name = "dump_action_graph_for_package", allowMultiple = true, defaultValue = "", category = "undocumented", help = "this flag has no effect.") public List<String> dumpActionGraphForPackage = new ArrayList<>(); @Deprecated @Option(name = "dump_action_graph_with_middlemen", defaultValue = "true", category = "undocumented", help = "this flag has no effect.") public boolean dumpActionGraphWithMiddlemen; @Deprecated @Option(name = "dump_providers", defaultValue = "false", category = "undocumented", help = "This is a no-op.") public boolean dumpProviders; @Deprecated @Option(name = "dump_targets", defaultValue = "null", category = "undocumented", help = "this flag has no effect.") public String dumpTargets; @Deprecated @Option(name = "dump_host_deps", defaultValue = "true", category = "undocumented", help = "Deprecated") public boolean dumpHostDeps; @Deprecated @Option(name = "dump_to_stdout", defaultValue = "false", category = "undocumented", help = "Deprecated") public boolean dumpToStdout; @Option(name = "analyze", defaultValue = "true", category = "undocumented", help = "Execute the analysis phase; this is the usual behaviour. " + "Specifying --noanalyze causes the build to stop before starting the " + "analysis phase, returning zero iff the package loading completed " + "successfully; this mode is useful for testing.") public boolean performAnalysisPhase; @Option(name = "build", defaultValue = "true", category = "what", help = "Execute the build; this is the usual behaviour. " + "Specifying --nobuild causes the build to stop before executing the " + "build actions, returning zero iff the package loading and analysis " + "phases completed successfully; this mode is useful for testing " + "those phases.") public boolean performExecutionPhase; @Option(name = "output_groups", converter = Converters.CommaSeparatedOptionListConverter.class, allowMultiple = true, defaultValue = "", category = "undocumented", help = "Specifies, which output groups of the top-level target to build.") public List<String> outputGroups; @Option(name = "show_result", defaultValue = "1", category = "verbosity", help = "Show the results of the build. For each " + "target, state whether or not it was brought up-to-date, and if " + "so, a list of output files that were built. The printed files " + "are convenient strings for copy+pasting to the shell, to " + "execute them.\n" + "This option requires an integer argument, which " + "is the threshold number of targets above which result " + "information is not printed. " + "Thus zero causes suppression of the message and MAX_INT " + "causes printing of the result to occur always. The default is one.") public int maxResultTargets; @Option(name = "announce", defaultValue = "false", category = "verbosity", help = "Deprecated. No-op.", deprecationWarning = "This option is now deprecated and is a no-op") public boolean announce; @Option(name = "symlink_prefix", defaultValue = DEFAULT_SYMLINK_PREFIX_MARKER, converter = SymlinkPrefixConverter.class, category = "misc", help = "The prefix that is prepended to any of the convenience symlinks that are created " + "after a build. If '/' is passed, then no symlinks are created and no warning is " + "emitted." ) public String symlinkPrefix; @Option(name = "experimental_multi_cpu", converter = Converters.CommaSeparatedOptionListConverter.class, allowMultiple = true, defaultValue = "", category = "semantics", help = "This flag allows specifying multiple target CPUs. If this is specified, " + "the --cpu option is ignored.") public List<String> multiCpus; @Option(name = "experimental_check_output_files", defaultValue = "true", category = "undocumented", help = "Check for modifications made to the output files of a build. Consider setting " + "this flag to false to see the effect on incremental build times.") public boolean checkOutputFiles; } /** * Converter for progress_report_interval: [0, 3600]. */ public static class ProgressReportIntervalConverter extends RangeConverter { public ProgressReportIntervalConverter() { super(0, 3600); } } private static final int MAX_JOBS = 2000; private static final int JOBS_TOO_HIGH_WARNING = 1000; private final UUID id; private final LoadingCache<Class<? extends OptionsBase>, Optional<OptionsBase>> optionsCache; /** A human-readable description of all the non-default option settings. */ private final String optionsDescription; /** * The name of the Blaze command that the user invoked. * Used for --announce. */ private final String commandName; private final OutErr outErr; private final List<String> targets; private long startTimeMillis = 0; // milliseconds since UNIX epoch. private boolean runningInEmacs = false; private boolean runTests = false; private static final List<Class<? extends OptionsBase>> MANDATORY_OPTIONS = ImmutableList.of( BuildRequestOptions.class, PackageCacheOptions.class, LoadingPhaseRunner.Options.class, BuildView.Options.class, ExecutionOptions.class); private BuildRequest(String commandName, final OptionsProvider options, final OptionsProvider startupOptions, List<String> targets, OutErr outErr, UUID id, long startTimeMillis) { this.commandName = commandName; this.optionsDescription = OptionsUtils.asShellEscapedString(options); this.outErr = outErr; this.targets = targets; this.id = id; this.startTimeMillis = startTimeMillis; this.optionsCache = CacheBuilder.newBuilder() .build(new CacheLoader<Class<? extends OptionsBase>, Optional<OptionsBase>>() { @Override public Optional<OptionsBase> load(Class<? extends OptionsBase> key) throws Exception { OptionsBase result = options.getOptions(key); if (result == null && startupOptions != null) { result = startupOptions.getOptions(key); } return Optional.fromNullable(result); } }); for (Class<? extends OptionsBase> optionsClass : MANDATORY_OPTIONS) { Preconditions.checkNotNull(getOptions(optionsClass)); } } /** * Returns a unique identifier that universally identifies this build. */ public UUID getId() { return id; } /** * Returns the name of the Blaze command that the user invoked. */ public String getCommandName() { return commandName; } /** * Set to true if this build request was initiated by Emacs. * (Certain output formatting may be necessary.) */ public void setRunningInEmacs() { runningInEmacs = true; } boolean isRunningInEmacs() { return runningInEmacs; } /** * Enables test execution for this build request. */ public void setRunTests() { runTests = true; } /** * Returns true if tests should be run by the build tool. */ public boolean shouldRunTests() { return runTests; } /** * Returns the (immutable) list of targets to build in commandline * form. */ public List<String> getTargets() { return targets; } /** * Returns the output/error streams to which errors and progress messages * should be sent during the fulfillment of this request. */ public OutErr getOutErr() { return outErr; } @Override @SuppressWarnings("unchecked") public <T extends OptionsBase> T getOptions(Class<T> clazz) { try { return (T) optionsCache.get(clazz).orNull(); } catch (ExecutionException e) { throw new IllegalStateException(e); } } /** * Returns the set of command-line options specified for this request. */ public BuildRequestOptions getBuildOptions() { return getOptions(BuildRequestOptions.class); } /** * Returns the set of options related to the loading phase. */ public PackageCacheOptions getPackageCacheOptions() { return getOptions(PackageCacheOptions.class); } /** * Returns the set of options related to the loading phase. */ public LoadingPhaseRunner.Options getLoadingOptions() { return getOptions(LoadingPhaseRunner.Options.class); } /** * Returns the set of command-line options related to the view specified for * this request. */ public BuildView.Options getViewOptions() { return getOptions(BuildView.Options.class); } /** * Returns the set of execution options specified for this request. */ public ExecutionOptions getExecutionOptions() { return getOptions(ExecutionOptions.class); } /** * Returns the human-readable description of the non-default options * for this build request. */ public String getOptionsDescription() { return optionsDescription; } /** * Return the time (according to System.currentTimeMillis()) at which the * service of this request was started. */ public long getStartTime() { return startTimeMillis; } /** * Validates the options for this BuildRequest. * * <p>Issues warnings or throws {@code InvalidConfigurationException} for option settings that * conflict. * * @return list of warnings */ public List<String> validateOptions() throws InvalidConfigurationException { List<String> warnings = new ArrayList<>(); // Validate "jobs". int jobs = getBuildOptions().jobs; if (jobs < 0 || jobs > MAX_JOBS) { throw new InvalidConfigurationException(String.format( "Invalid parameter for --jobs: %d. Only values 0 <= jobs <= %d are allowed.", jobs, MAX_JOBS)); } if (jobs > JOBS_TOO_HIGH_WARNING) { warnings.add( String.format("High value for --jobs: %d. You may run into memory issues", jobs)); } int localTestJobs = getExecutionOptions().localTestJobs; if (localTestJobs < 0) { throw new InvalidConfigurationException(String.format( "Invalid parameter for --local_test_jobs: %d. Only values 0 or greater are " + "allowed.", localTestJobs)); } if (localTestJobs > jobs) { warnings.add( String.format("High value for --local_test_jobs: %d. This exceeds the value for --jobs: " + "%d. Only up to %d local tests will run concurrently.", localTestJobs, jobs, jobs)); } // Validate other BuildRequest options. if (getBuildOptions().verboseExplanations && getBuildOptions().explanationPath == null) { warnings.add("--verbose_explanations has no effect when --explain=<file> is not enabled"); } return warnings; } /** Creates a new TopLevelArtifactContext from this build request. */ public TopLevelArtifactContext getTopLevelArtifactContext() { return new TopLevelArtifactContext( getOptions(ExecutionOptions.class).testStrategy.equals("exclusive"), determineOutputGroups()); } private ImmutableSortedSet<String> determineOutputGroups() { Set<String> current = new HashSet<>(OutputGroupProvider.DEFAULT_GROUPS); for (String outputGroup : getBuildOptions().outputGroups) { if (outputGroup.startsWith("-")) { current.remove(outputGroup.substring(1)); } else { current.add(outputGroup); } } return ImmutableSortedSet.copyOf(current); } public String getSymlinkPrefix() { return getBuildOptions().symlinkPrefix; } public ImmutableSortedSet<String> getMultiCpus() { return ImmutableSortedSet.copyOf(getBuildOptions().multiCpus); } public static BuildRequest create(String commandName, OptionsProvider options, OptionsProvider startupOptions, List<String> targets, OutErr outErr, UUID commandId, long commandStartTime) { BuildRequest request = new BuildRequest(commandName, options, startupOptions, targets, outErr, commandId, commandStartTime); // All this, just to pass a global boolean from the client to the server. :( if (options.getOptions(BlazeCommandEventHandler.Options.class).runningInEmacs) { request.setRunningInEmacs(); } return request; } }
/* * RED5 Open Source Media Server - https://github.com/Red5/ * * Copyright 2006-2016 by respective authors (see below). All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.red5.server; import java.beans.ConstructorProperties; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CopyOnWriteArraySet; import java.util.concurrent.Semaphore; import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.lang3.RandomStringUtils; import org.red5.server.api.IClient; import org.red5.server.api.IConnection; import org.red5.server.api.event.IEvent; import org.red5.server.api.listeners.IConnectionListener; import org.red5.server.api.scope.IBasicScope; import org.red5.server.api.scope.IBroadcastScope; import org.red5.server.api.scope.IScope; import org.red5.server.scope.Scope; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Base abstract class for connections. Adds connection specific functionality like work with clients to AttributeStore. */ public abstract class BaseConnection extends AttributeStore implements IConnection { private static final Logger log = LoggerFactory.getLogger(BaseConnection.class); /** * Connection type */ protected final String type; /** * Connection host */ protected volatile String host; /** * Connection remote address */ protected volatile String remoteAddress; /** * Connection remote addresses */ protected volatile List<String> remoteAddresses; /** * Remote port */ protected volatile int remotePort; /** * Path of scope client connected to */ protected volatile String path; /** * Connection session identifier */ protected final String sessionId; /** * Number of read messages */ protected AtomicLong readMessages = new AtomicLong(0); /** * Number of written messages */ protected AtomicLong writtenMessages = new AtomicLong(0); /** * Number of dropped messages */ protected AtomicLong droppedMessages = new AtomicLong(0); /** * Connection params passed from client with NetConnection.connect call */ @SuppressWarnings("all") protected volatile Map<String, Object> params = null; /** * Client bound to connection */ protected volatile IClient client; /** * Scope to which this connection belongs */ protected transient volatile Scope scope; /** * Set of basic scopes. The scopes may be of shared object or broadcast stream type. */ protected transient CopyOnWriteArraySet<IBasicScope> basicScopes = new CopyOnWriteArraySet<IBasicScope>(); /** * Is the connection closed? */ private volatile boolean closed; /** * Listeners */ protected transient CopyOnWriteArrayList<IConnectionListener> connectionListeners = new CopyOnWriteArrayList<IConnectionListener>(); /** * Used to protect mulit-threaded operations on write */ private final transient Semaphore writeLock = new Semaphore(1, true); // Support for stream ids private transient ThreadLocal<Number> streamLocal = new ThreadLocal<Number>(); /** {@inheritDoc} */ public Number getStreamId() { return streamLocal.get(); } /** {@inheritDoc} */ public void setStreamId(Number id) { streamLocal.set(id); } /** * Creates a new persistent base connection */ @ConstructorProperties(value = { "persistent" }) public BaseConnection() { this(PERSISTENT); } /** * Creates a new base connection with the given type. * * @param type * Connection type */ @ConstructorProperties({ "type" }) public BaseConnection(String type) { log.debug("New BaseConnection - type: {}", type); this.type = type; this.sessionId = RandomStringUtils.randomAlphanumeric(13).toUpperCase(); log.debug("Generated session id: {}", sessionId); } /** * Creates a new base connection with the given parameters. * * @param type * Connection type * @param host * Host * @param remoteAddress * Remote address * @param remotePort * Remote port * @param path * Scope path on server * @param sessionId * Session id * @param params * Params passed from client */ @ConstructorProperties({ "type", "host", "remoteAddress", "remotePort", "path", "sessionId" }) public BaseConnection(String type, String host, String remoteAddress, int remotePort, String path, String sessionId, Map<String, Object> params) { log.debug("New BaseConnection - type: {} host: {} remoteAddress: {} remotePort: {} path: {} sessionId: {}", new Object[] { type, host, remoteAddress, remotePort, path, sessionId }); log.debug("Params: {}", params); this.type = type; this.host = host; this.remoteAddress = remoteAddress; this.remoteAddresses = new ArrayList<String>(1); this.remoteAddresses.add(remoteAddress); this.remoteAddresses = Collections.unmodifiableList(this.remoteAddresses); this.remotePort = remotePort; this.path = path; this.sessionId = sessionId; this.params = params; log.debug("Generated session id: {}", sessionId); } /** {@inheritDoc} */ public void addListener(IConnectionListener listener) { this.connectionListeners.add(listener); } /** {@inheritDoc} */ public void removeListener(IConnectionListener listener) { this.connectionListeners.remove(listener); } /** * @return lock for changing state operations */ public Semaphore getLock() { return writeLock; } /** * Initializes client * * @param client * Client bound to connection */ public void initialize(IClient client) { if (log.isDebugEnabled()) { log.debug("initialize - client: {}", client); } if (this.client != null && this.client instanceof Client && !this.client.equals(client)) { // unregister old client if (log.isTraceEnabled()) { log.trace("Unregistering previous client: {}", this.client); } ((Client) this.client).unregister(this, false); } this.client = client; if (this.client instanceof Client && !((Client) this.client).isRegistered(this)) { // register new client if (log.isTraceEnabled()) { log.trace("Registering client: {}", this.client); } ((Client) this.client).register(this); } } /** * * @return type */ public String getType() { return type; } /** * * @return host */ public String getHost() { return host; } /** * * @return remote address */ public String getRemoteAddress() { return remoteAddress; } /** * @return remote address */ public List<String> getRemoteAddresses() { return remoteAddresses; } /** * * @return remote port */ public int getRemotePort() { return remotePort; } /** * * @return path */ public String getPath() { return path; } /** * * @return session id */ public String getSessionId() { return sessionId; } /** * Return connection parameters * * @return connection parameters */ public Map<String, Object> getConnectParams() { return Collections.unmodifiableMap(params); } /** {@inheritDoc} */ public void setClient(IClient client) { this.client = client; } /** {@inheritDoc} */ public IClient getClient() { return client; } /** * Check whether connection is alive * * @return true if connection is bound to scope, false otherwise */ public boolean isConnected() { //log.debug("Connected: {}", (scope != null)); return scope != null; } /** * Connect to another scope on server * * @param newScope * New scope * @return true on success, false otherwise */ public boolean connect(IScope newScope) { return connect(newScope, null); } /** * Connect to another scope on server with given parameters * * @param newScope * New scope * @param params * Parameters to connect with * @return true on success, false otherwise */ public boolean connect(IScope newScope, Object[] params) { if (log.isDebugEnabled()) { log.debug("Connect Params: {}", params); if (params != null) { for (Object e : params) { log.debug("Param: {}", e); } } } scope = (Scope) newScope; return scope.connect(this, params); } /** * Return the current scope. * * @return scope */ public IScope getScope() { return scope; } /** * Closes connection */ public void close() { if (closed) { log.debug("Already closed, nothing to do"); return; } closed = true; if (scope != null) { log.debug("Close, disconnect from scope, and children"); try { // unregister all child scopes first for (IBasicScope basicScope : basicScopes) { unregisterBasicScope(basicScope); } } catch (Exception err) { log.error("Error while unregistering basic scopes", err); } // disconnect if (scope != null) { try { scope.disconnect(this); } catch (Exception err) { log.error("Error while disconnecting from scope: {}. {}", scope, err); } scope = null; } } // unregister client if (client != null && client instanceof Client) { ((Client) client).unregister(this); } // alert our listeners if (connectionListeners != null) { for (IConnectionListener listener : connectionListeners) { listener.notifyDisconnected(this); } connectionListeners.clear(); connectionListeners = null; } } /** * Notified on event * * @param event * Event */ public void notifyEvent(IEvent event) { log.debug("Event notify was not handled: {}", event); } /** * Dispatches event * * @param event * Event */ public void dispatchEvent(IEvent event) { log.debug("Event notify was not dispatched: {}", event); } /** * Handles event * * @param event * Event * @return true if associated scope was able to handle event, false otherwise */ public boolean handleEvent(IEvent event) { return getScope().handleEvent(event); } /** * * @return basic scopes */ public Iterator<IBasicScope> getBasicScopes() { return basicScopes.iterator(); } /** * Registers basic scope * * @param basicScope * Basic scope to register */ public void registerBasicScope(IBroadcastScope basicScope) { basicScopes.add(basicScope); basicScope.addEventListener(this); } /** * Unregister basic scope * * @param basicScope * Unregister basic scope */ public void unregisterBasicScope(IBasicScope basicScope) { if (basicScope instanceof IBroadcastScope) { basicScopes.remove(basicScope); basicScope.removeEventListener(this); } } /** * * @return bytes read */ public abstract long getReadBytes(); /** * * @return bytes written */ public abstract long getWrittenBytes(); /** * * @return messages read */ public long getReadMessages() { return readMessages.get(); } /** * * @return messages written */ public long getWrittenMessages() { return writtenMessages.get(); } /** * * @return dropped messages */ public long getDroppedMessages() { return droppedMessages.get(); } /** * Returns whether or not the reader is idle. * * @return queued messages */ public boolean isReaderIdle() { return false; } /** * Returns whether or not the writer is idle. * * @return queued messages */ public boolean isWriterIdle() { return false; } /** * Returns whether or not a connection is closed. * * @return true if closed */ public boolean isClosed() { return closed; } /** * Count of outgoing messages not yet written. * * @return pending messages */ public long getPendingMessages() { return 0; } /** * Count of outgoing video messages not yet written. * * @param streamId * the id you want to know about * @return pending messages for this streamId */ public long getPendingVideoMessages(Number streamId) { return 0; } /** {@inheritDoc} */ public long getClientBytesRead() { return 0; } /* (non-Javadoc) * @see java.lang.Object#hashCode() */ @Override public int hashCode() { final int prime = 31; int result = prime * sessionId.hashCode(); return result; } /* (non-Javadoc) * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } return sessionId.equals(((BaseConnection) obj).getSessionId()); } }
package org.jgroups.blocks; import org.jgroups.Address; import org.jgroups.JChannel; import org.jgroups.MembershipListener; import org.jgroups.Message; import org.jgroups.util.*; import java.lang.reflect.Method; import java.util.Collection; import java.util.concurrent.CompletableFuture; /** * This class allows a programmer to invoke remote methods in all (or single) group members and optionally wait for * the return value(s).<p/> * An application will typically create a channel and layer the RpcDispatcher building block on top of it, which * allows it to dispatch remote methods (client role) and at the same time be called by other members (server role).<p/> * This class is derived from MessageDispatcher. * @author Bela Ban */ public class RpcDispatcher extends MessageDispatcher { protected Object server_obj; /** Marshaller to marshall requests at the caller, unmarshal requests at the receiver(s), marshall responses at the * receivers and unmarshall responses at the caller */ protected Marshaller marshaller; protected MethodLookup method_lookup; public RpcDispatcher() { } public RpcDispatcher(JChannel channel, Object server_obj) { super(channel); setRequestHandler(this); this.server_obj=server_obj; } public Marshaller getMarshaller() {return marshaller;} public RpcDispatcher setMarshaller(Marshaller m) {marshaller=m; if(corr != null) corr.setMarshaller(m); return this;} public Object getServerObject() {return server_obj;} public RpcDispatcher setServerObject(Object server_obj) {this.server_obj=server_obj; return this;} public RpcDispatcher setMembershipListener(MembershipListener l) {return (RpcDispatcher)super.setMembershipListener(l);} public MethodLookup getMethodLookup() {return method_lookup;} public RpcDispatcher setMethodLookup(MethodLookup method_lookup) {this.method_lookup=method_lookup; return this;} /** * Invokes a method in all members and expects responses from members contained in dests (or all members if dests is null). * @param dests A list of addresses. If null, we'll wait for responses from all cluster members * @param method_name The name of the target method * @param args The arguments to be passed * @param types The types of the arguments * @param options A collection of call options, e.g. sync versus async, timeout etc * @return RspList<T> A response list with results, one for each member in dests, or null if the RPC is asynchronous * @throws Exception If the sending of the message threw an exception. Note that <em>no</em> exception will be * thrown if any of the target members threw an exception, but this exception will be in the Rsp * object for the particular member in the RspList */ public <T> RspList<T> callRemoteMethods(Collection<Address> dests, String method_name, Object[] args, Class[] types, RequestOptions options) throws Exception { MethodCall method_call=new MethodCall(method_name, args, types); return callRemoteMethods(dests, method_call, options); } /** * Invokes a method in all members and expects responses from members contained in dests (or all members if dests is null). * @param dests A list of addresses. If null, we'll wait for responses from all cluster members * @param method_call The method (plus args) to be invoked * @param opts A collection of call options, e.g. sync versus async, timeout etc * @return RspList A list of return values and flags (suspected, not received) per member, or null if the RPC is * asynchronous * @throws Exception If the sending of the message threw an exception. Note that <em>no</em> exception will be * thrown if any of the target members threw an exception, but this exception will be in the Rsp * object for the particular member in the RspList * @since 2.9 */ public <T> RspList<T> callRemoteMethods(Collection<Address> dests, MethodCall method_call, RequestOptions opts) throws Exception { if(dests != null && dests.isEmpty()) { // don't send if dest list is empty log.trace("destination list of %s() is empty: no need to send message", method_call.methodName()); return empty_rsplist; } Buffer buf=methodCallToBuffer(method_call, marshaller); RspList<T> retval=super.castMessage(dests, buf, opts); if(log.isTraceEnabled()) log.trace("dests=%s, method_call=%s, options=%s, responses: %s", dests, method_call, opts, retval); return retval; } /** * Invokes a method in all members and expects responses from members contained in dests (or all members if dests is null). * @param dests A list of addresses. If null, we'll wait for responses from all cluster members * @param method_call The method (plus args) to be invoked * @param options A collection of call options, e.g. sync versus async, timeout etc * @return CompletableFuture A future from which the results can be fetched, or null if the RPC is asynchronous * @throws Exception If the sending of the message threw an exception. Note that <em>no</em> exception will be * thrown if any of the target members threw an exception; such an exception will be in the Rsp * element for the particular member in the RspList */ public <T> CompletableFuture<RspList<T>> callRemoteMethodsWithFuture(Collection<Address> dests, MethodCall method_call, RequestOptions options) throws Exception { if(dests != null && dests.isEmpty()) { // don't send if dest list is empty log.trace("destination list of %s() is empty: no need to send message", method_call.methodName()); return CompletableFuture.completedFuture(empty_rsplist); } Buffer buf=methodCallToBuffer(method_call, marshaller); CompletableFuture<RspList<T>> retval=super.castMessageWithFuture(dests, buf, options); if(log.isTraceEnabled()) log.trace("dests=%s, method_call=%s, options=%s", dests, method_call, options); return retval; } /** * Invokes a method in a cluster member and - if blocking - returns the result * @param dest The target member on which to invoke the method * @param meth The name of the method * @param args The arguments * @param types The types of the arguments * @param opts The options (e.g. blocking, timeout etc) * @return The result. Null if the call is asynchronous (non-blocking) or if the method returns void * @throws Exception Thrown if the method invocation threw an exception, either at the caller or the callee */ public <T> T callRemoteMethod(Address dest, String meth, Object[] args, Class[] types, RequestOptions opts) throws Exception { MethodCall method_call=new MethodCall(meth, args, types); return (T)callRemoteMethod(dest, method_call, opts); } /** * Invokes a method in a cluster member and - if blocking - returns the result * @param dest The target member on which to invoke the method * @param call The call to be invoked, including method are arguments * @param options The options (e.g. blocking, timeout etc) * @return The result. Null if the call is asynchronous (non-blocking) or if the method returns void * @throws Exception Thrown if the method invocation threw an exception, either at the caller or the callee */ public <T> T callRemoteMethod(Address dest, MethodCall call, RequestOptions options) throws Exception { Buffer buf=methodCallToBuffer(call, marshaller); T retval=super.sendMessage(dest, buf, options); if(log.isTraceEnabled()) log.trace("dest=%s, method_call=%s, options=%s, retval: %s", dest, call, options, retval); return retval; } /** * Invokes a method in a cluster member and - if blocking - returns the result * @param dest The target member on which to invoke the method * @param call The call to be invoked, including method are arguments * @param opts The options (e.g. blocking, timeout etc) * @return A future from which the result can be fetched. If the callee threw an invocation, an ExecutionException * will be thrown on calling Future.get(). If the invocation was asynchronous, null will be returned. * @throws Exception Thrown if the method invocation threw an exception */ public <T> CompletableFuture<T> callRemoteMethodWithFuture(Address dest, MethodCall call, RequestOptions opts) throws Exception { if(log.isTraceEnabled()) log.trace("dest=%s, method_call=%s, options=%s", dest, call, opts); Buffer buf=methodCallToBuffer(call, marshaller); return super.sendMessageWithFuture(dest, buf, opts); } /** * Message contains MethodCall. Execute it against *this* object and return result. * Use MethodCall.invoke() to do this. Return result. */ public Object handle(Message req) throws Exception { if(server_obj == null) { log.error(Util.getMessage("NoMethodHandlerIsRegisteredDiscardingRequest")); return null; } if(req == null || req.getLength() == 0) { log.error(Util.getMessage("MessageOrMessageBufferIsNull")); return null; } MethodCall method_call=methodCallFromBuffer(req.getRawBuffer(), req.getOffset(), req.getLength(), marshaller); if(log.isTraceEnabled()) log.trace("[sender=%s], method_call: %s", req.getSrc(), method_call); if(method_call.mode() == MethodCall.ID) { if(method_lookup == null) throw new Exception(String.format("MethodCall uses ID=%d, but method_lookup has not been set", method_call.methodId())); Method m=method_lookup.findMethod(method_call.methodId()); if(m == null) throw new Exception("no method found for " + method_call.methodId()); method_call.method(m); } return method_call.invoke(server_obj); } protected static Buffer methodCallToBuffer(final MethodCall call, Marshaller marshaller) throws Exception { Object[] args=call.args(); int estimated_size=64; if(args != null) for(Object arg: args) estimated_size+=marshaller != null? marshaller.estimatedSize(arg) : (arg == null? 2 : 50); ByteArrayDataOutputStream out=new ByteArrayDataOutputStream(estimated_size, true); call.writeTo(out, marshaller); return out.getBuffer(); } protected static MethodCall methodCallFromBuffer(final byte[] buf, int offset, int length, Marshaller marshaller) throws Exception { ByteArrayDataInputStream in=new ByteArrayDataInputStream(buf, offset, length); MethodCall call=new MethodCall(); call.readFrom(in, marshaller); return call; } protected void correlatorStarted() { if(corr != null) corr.setMarshaller(marshaller); } }
package com.laytonsmith.tools.docgen.localization; import com.laytonsmith.PureUtilities.CommandExecutor; import com.laytonsmith.PureUtilities.Common.Annotations.CheckOverrides; import com.laytonsmith.PureUtilities.Common.UIUtils; import com.laytonsmith.PureUtilities.GithubUtil; import java.io.File; import java.io.IOException; import java.util.List; import javax.swing.JFileChooser; import javax.swing.JScrollBar; /** * * @author Cailin */ @CheckOverrides.SuppressCheckOverrides public class ForkDatabaseWizard extends javax.swing.JDialog { private static interface Callable<T> { T call(); } private static interface IntCallable extends Callable<Integer> {} private static interface BooleanCallable extends Callable<Boolean> {} private static final String REPO = "[email protected]:LadyCailin/MethodScriptTranslationDB.git"; private static final String MASTER_REPO_OWNER = "LadyCailin"; private static final String MASTER_REPO_NAME = "MethodScriptTranslationDB"; private static class StateOptions { boolean doFork; String githubToken; File saveTo; } private final LocalizationUI parent; private final StateOptions stateOptions = new StateOptions(); private final LogViewer logViewer; /** * Creates new form ForkDatabaseWizard * @param parent */ @SuppressWarnings("LeakingThisInConstructor") public ForkDatabaseWizard(LocalizationUI parent, LogViewer logViewer) { super(parent, true); this.parent = parent; initComponents(); this.logViewer = logViewer; step3ErrorLabel.setText(""); UIUtils.centerWindowOnWindow(this, parent); for(int i = 0; i < tabbedPanel.getTabCount(); i++) { // Don't allow users to navigate tabs tabbedPanel.setEnabledAt(i, false); } writeStatus("---- Log ----"); } private void validateStep() { int index = tabbedPanel.getSelectedIndex(); boolean setEnabled = new BooleanCallable[] { () -> { // Step 1 return (createForkRadioButton.isSelected() || copyRepoRadioButton.isSelected()); }, () -> { return stateOptions.githubToken != null; }, () -> { String fl = fileLocation.getText(); boolean subpathExists = new File(new File(fileLocation.getText()), MASTER_REPO_NAME).exists(); if(subpathExists) { step3ErrorLabel.setText("The directory already contains a folder named " + MASTER_REPO_NAME); } else { step3ErrorLabel.setText(""); } return new File(fl).exists() && !subpathExists; } }[index].call(); nextButton.setEnabled(setEnabled); } private void stepTransition(boolean forward) { int current = tabbedPanel.getSelectedIndex(); int next; if(forward) { if(current == tabbedPanel.getTabCount() - 1) { doFinish(); return; } next = new IntCallable[] { () -> { // Step 1 stateOptions.doFork = createForkRadioButton.isSelected(); if(createForkRadioButton.isSelected()) { return 1; } else { return 2; } }, () -> { // Step 2 return 2; } }[current].call(); } else { next = new IntCallable[] { () -> { // Step 1 return 0; // Can't go back from here }, () -> { // Step 2 return 0; }, () -> { // Step 3 return 0; } }[current].call(); } tabbedPanel.setSelectedIndex(next); if(next == 0) { backButton.setEnabled(false); } else { backButton.setEnabled(true); } if(next == tabbedPanel.getTabCount() - 1) { nextButton.setText("Finish"); } else { nextButton.setText("Next"); } validateStep(); } private void writeStatus(String status) { outputWindow.append(status + "\n"); logViewer.pushLog("ForkDatabase: " + status); JScrollBar vertical = outputWindowScrollPanel.getVerticalScrollBar(); vertical.setValue( vertical.getMaximum() ); } private void doFinish() { cancelButton.setEnabled(false); backButton.setEnabled(false); nextButton.setEnabled(false); progressBar.setVisible(true); stateOptions.saveTo = new File(fileLocation.getText()); new Thread(() -> { if(stateOptions.doFork) { writeStatus("Will first fork database on github"); } writeStatus("Will save repo to " + new File(stateOptions.saveTo, REPO.replaceAll("(?:.*)/(.*?)", "$1"))); // Start try { File saveTo = stateOptions.saveTo; String cloneUrl = REPO; if(stateOptions.doFork) { GithubUtil.Repository fork = getExistingFork(); if(fork == null) { writeStatus("Fork not found, now creating fork"); fork = createFork(); writeStatus("Fork successfully created. Waiting 30 seconds for repo initialization..."); Thread.sleep(30000); } else { writeStatus("Fork already exists! Will reuse existing fork at " + fork.cloneUrl); } cloneUrl = fork.sshUrl; } writeStatus("Cloning..."); writeStatus(CommandExecutor.Execute(saveTo, "git", "clone", cloneUrl)); parent.initializeTranslationDb(new File(saveTo, MASTER_REPO_NAME)); } catch (InterruptedException | IOException ex) { writeStatus("Caught exception while trying to operate. Some steps may have completed successfully, and" + "were not rolled back!"); writeStatus(ex.getMessage()); return; } // End writeStatus("Repo successfully cloned! Feel free to close this dialog now."); cancelButton.setEnabled(true); cancelButton.setText("Close"); progressBar.setVisible(false); }, "ForkDatabase").start(); } private GithubUtil.Repository getExistingFork() throws GithubUtil.GithubException { GithubUtil util = new GithubUtil(stateOptions.githubToken); List<GithubUtil.Repository> r = util.listRepos(null, "owner", null, null, null); for(GithubUtil.Repository rr : r) { if(rr.fork) { try { GithubUtil.Repository repo = util.getRepo(rr.owner.login, rr.name); if((MASTER_REPO_OWNER + "/" + MASTER_REPO_NAME).equals(repo.parent.fullName)) { return rr; } } catch (GithubUtil.GithubException ex) { if(ex.getResponseCode() == 451) { continue; } throw ex; } } } return null; } private GithubUtil.Repository createFork() throws GithubUtil.GithubException { GithubUtil util = new GithubUtil(stateOptions.githubToken); return util.forkRepo(MASTER_REPO_OWNER, MASTER_REPO_NAME, null); } /** * This method is called from within the constructor to initialize the form. WARNING: Do NOT modify this code. The * content of this method is always regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { step1ButtonGroup = new javax.swing.ButtonGroup(); tabbedPanel = new javax.swing.JTabbedPane(); step1Panel = new javax.swing.JPanel(); jLabel1 = new javax.swing.JLabel(); createForkRadioButton = new javax.swing.JRadioButton(); copyRepoRadioButton = new javax.swing.JRadioButton(); step2Panel = new javax.swing.JPanel(); jLabel2 = new javax.swing.JLabel(); authorizeGithubButton = new javax.swing.JButton(); step3Panel = new javax.swing.JPanel(); jLabel3 = new javax.swing.JLabel(); fileLocation = new javax.swing.JTextField(); browseFileButton = new javax.swing.JButton(); step3ErrorLabel = new javax.swing.JLabel(); nextButton = new javax.swing.JButton(); backButton = new javax.swing.JButton(); cancelButton = new javax.swing.JButton(); progressBar = new javax.swing.JProgressBar(); outputWindowScrollPanel = new javax.swing.JScrollPane(); outputWindow = new javax.swing.JTextArea(); setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE); setTitle("Fork Database Wizard"); jLabel1.setText("<html><body>Would you like to create a fork first? You must<br>create a fork before you will be able to make contributions.<br>If you create a fork, you must have a github account. You can create one in the next step.<br>If you already have a fork, but just want to check it, select \"Create Fork\" anyways."); step1ButtonGroup.add(createForkRadioButton); createForkRadioButton.setText("Create Fork"); createForkRadioButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { createForkRadioButtonActionPerformed(evt); } }); step1ButtonGroup.add(copyRepoRadioButton); copyRepoRadioButton.setText("Just copy master repo"); copyRepoRadioButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { copyRepoRadioButtonActionPerformed(evt); } }); javax.swing.GroupLayout step1PanelLayout = new javax.swing.GroupLayout(step1Panel); step1Panel.setLayout(step1PanelLayout); step1PanelLayout.setHorizontalGroup( step1PanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(step1PanelLayout.createSequentialGroup() .addContainerGap() .addGroup(step1PanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(copyRepoRadioButton) .addComponent(createForkRadioButton) .addComponent(jLabel1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addContainerGap(22, Short.MAX_VALUE)) ); step1PanelLayout.setVerticalGroup( step1PanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(step1PanelLayout.createSequentialGroup() .addContainerGap() .addComponent(jLabel1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(18, 18, 18) .addComponent(createForkRadioButton) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(copyRepoRadioButton) .addContainerGap(42, Short.MAX_VALUE)) ); tabbedPanel.addTab("Step 1", step1Panel); jLabel2.setText("<html><body>First, we need to authorize the application to access github.<br>If you don't have an account, this will create it."); authorizeGithubButton.setText("Authorize"); authorizeGithubButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { authorizeGithubButtonActionPerformed(evt); } }); javax.swing.GroupLayout step2PanelLayout = new javax.swing.GroupLayout(step2Panel); step2Panel.setLayout(step2PanelLayout); step2PanelLayout.setHorizontalGroup( step2PanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(step2PanelLayout.createSequentialGroup() .addContainerGap() .addGroup(step2PanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(authorizeGithubButton)) .addContainerGap(168, Short.MAX_VALUE)) ); step2PanelLayout.setVerticalGroup( step2PanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(step2PanelLayout.createSequentialGroup() .addContainerGap() .addComponent(jLabel2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(authorizeGithubButton) .addContainerGap(108, Short.MAX_VALUE)) ); tabbedPanel.addTab("Step 2", step2Panel); jLabel3.setText("<html><body>Where would you like to put the repo on your local machine?<br>A new folder will automatically be created within the directory you select."); fileLocation.addKeyListener(new java.awt.event.KeyAdapter() { public void keyReleased(java.awt.event.KeyEvent evt) { fileLocationKeyReleased(evt); } }); browseFileButton.setText("Browse"); browseFileButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { browseFileButtonActionPerformed(evt); } }); step3ErrorLabel.setForeground(new java.awt.Color(255, 51, 0)); step3ErrorLabel.setText("Error Label"); javax.swing.GroupLayout step3PanelLayout = new javax.swing.GroupLayout(step3Panel); step3Panel.setLayout(step3PanelLayout); step3PanelLayout.setHorizontalGroup( step3PanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(step3PanelLayout.createSequentialGroup() .addContainerGap() .addGroup(step3PanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(step3PanelLayout.createSequentialGroup() .addComponent(fileLocation) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(browseFileButton)) .addGroup(step3PanelLayout.createSequentialGroup() .addGroup(step3PanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel3, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(step3ErrorLabel)) .addGap(0, 90, Short.MAX_VALUE))) .addContainerGap()) ); step3PanelLayout.setVerticalGroup( step3PanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(step3PanelLayout.createSequentialGroup() .addContainerGap() .addComponent(jLabel3, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(step3PanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(fileLocation, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(browseFileButton)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(step3ErrorLabel) .addContainerGap(88, Short.MAX_VALUE)) ); tabbedPanel.addTab("Step 3", step3Panel); nextButton.setText("Next"); nextButton.setEnabled(false); nextButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { nextButtonActionPerformed(evt); } }); backButton.setText("Back"); backButton.setEnabled(false); backButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { backButtonActionPerformed(evt); } }); cancelButton.setText("Cancel"); cancelButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { cancelButtonActionPerformed(evt); } }); progressBar.setIndeterminate(true); outputWindow.setEditable(false); outputWindow.setColumns(20); outputWindow.setLineWrap(true); outputWindow.setRows(5); outputWindow.setWrapStyleWord(true); outputWindowScrollPanel.setViewportView(outputWindow); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(outputWindowScrollPanel) .addComponent(tabbedPanel) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addGap(0, 0, Short.MAX_VALUE) .addComponent(progressBar, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(cancelButton) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(backButton) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(nextButton))) .addContainerGap()) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addComponent(tabbedPanel, javax.swing.GroupLayout.PREFERRED_SIZE, 208, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(nextButton) .addComponent(backButton) .addComponent(cancelButton)) .addComponent(progressBar, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(outputWindowScrollPanel, javax.swing.GroupLayout.DEFAULT_SIZE, 104, Short.MAX_VALUE) .addContainerGap()) ); progressBar.setVisible(false); pack(); }// </editor-fold>//GEN-END:initComponents private void cancelButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_cancelButtonActionPerformed this.setVisible(false); this.dispose(); }//GEN-LAST:event_cancelButtonActionPerformed private void createForkRadioButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_createForkRadioButtonActionPerformed validateStep(); }//GEN-LAST:event_createForkRadioButtonActionPerformed private void copyRepoRadioButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_copyRepoRadioButtonActionPerformed validateStep(); }//GEN-LAST:event_copyRepoRadioButtonActionPerformed private void nextButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_nextButtonActionPerformed stepTransition(true); }//GEN-LAST:event_nextButtonActionPerformed private void backButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_backButtonActionPerformed stepTransition(false); }//GEN-LAST:event_backButtonActionPerformed private void authorizeGithubButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_authorizeGithubButtonActionPerformed parent.authorizeGithub(false, (token) -> { stateOptions.githubToken = token; writeStatus("Obtained Github Authorization!"); validateStep(); }); }//GEN-LAST:event_authorizeGithubButtonActionPerformed private void fileLocationKeyReleased(java.awt.event.KeyEvent evt) {//GEN-FIRST:event_fileLocationKeyReleased validateStep(); }//GEN-LAST:event_fileLocationKeyReleased private void browseFileButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_browseFileButtonActionPerformed JFileChooser fc = new JFileChooser(); fc.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); if(JFileChooser.APPROVE_OPTION == fc.showDialog(this, "Select Directory")) { try { fileLocation.setText(fc.getSelectedFile().getCanonicalPath()); validateStep(); } catch (IOException ex) { // } } }//GEN-LAST:event_browseFileButtonActionPerformed // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton authorizeGithubButton; private javax.swing.JButton backButton; private javax.swing.JButton browseFileButton; private javax.swing.JButton cancelButton; private javax.swing.JRadioButton copyRepoRadioButton; private javax.swing.JRadioButton createForkRadioButton; private javax.swing.JTextField fileLocation; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel2; private javax.swing.JLabel jLabel3; private javax.swing.JButton nextButton; private javax.swing.JTextArea outputWindow; private javax.swing.JScrollPane outputWindowScrollPanel; private javax.swing.JProgressBar progressBar; private javax.swing.ButtonGroup step1ButtonGroup; private javax.swing.JPanel step1Panel; private javax.swing.JPanel step2Panel; private javax.swing.JLabel step3ErrorLabel; private javax.swing.JPanel step3Panel; private javax.swing.JTabbedPane tabbedPanel; // End of variables declaration//GEN-END:variables }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.metrics.statsd; import org.apache.flink.api.common.JobID; import org.apache.flink.configuration.ConfigConstants; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.MetricOptions; import org.apache.flink.metrics.Counter; import org.apache.flink.metrics.Gauge; import org.apache.flink.metrics.Metric; import org.apache.flink.metrics.MetricConfig; import org.apache.flink.metrics.MetricGroup; import org.apache.flink.metrics.SimpleCounter; import org.apache.flink.metrics.groups.UnregisteredMetricsGroup; import org.apache.flink.metrics.reporter.MetricReporter; import org.apache.flink.metrics.util.TestCounter; import org.apache.flink.metrics.util.TestHistogram; import org.apache.flink.metrics.util.TestMeter; import org.apache.flink.runtime.jobgraph.JobVertexID; import org.apache.flink.runtime.metrics.MetricRegistryConfiguration; import org.apache.flink.runtime.metrics.MetricRegistryImpl; import org.apache.flink.runtime.metrics.ReporterSetup; import org.apache.flink.runtime.metrics.groups.TaskManagerJobMetricGroup; import org.apache.flink.runtime.metrics.groups.TaskManagerMetricGroup; import org.apache.flink.runtime.metrics.groups.TaskMetricGroup; import org.apache.flink.util.AbstractID; import org.apache.flink.util.TestLogger; import org.junit.Test; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.SocketException; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeoutException; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; /** * Tests for the StatsDReporter. */ public class StatsDReporterTest extends TestLogger { @Test public void testReplaceInvalidChars() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { StatsDReporter reporter = new StatsDReporter(); assertEquals("", reporter.filterCharacters("")); assertEquals("abc", reporter.filterCharacters("abc")); assertEquals("a-b--", reporter.filterCharacters("a:b::")); } /** * Tests that the registered metrics' names don't contain invalid characters. */ @Test public void testAddingMetrics() throws Exception { Configuration configuration = new Configuration(); String taskName = "testTask"; String jobName = "testJob:-!ax..?"; String hostname = "local::host:"; String taskManagerId = "tas:kMana::ger"; String counterName = "testCounter"; configuration.setString(MetricOptions.SCOPE_NAMING_TASK, "<host>.<tm_id>.<job_name>"); configuration.setString(MetricOptions.SCOPE_DELIMITER, "_"); MetricRegistryImpl metricRegistry = new MetricRegistryImpl( MetricRegistryConfiguration.fromConfiguration(configuration), Collections.singletonList(ReporterSetup.forReporter("test", new TestingStatsDReporter()))); char delimiter = metricRegistry.getDelimiter(); TaskManagerMetricGroup tmMetricGroup = new TaskManagerMetricGroup(metricRegistry, hostname, taskManagerId); TaskManagerJobMetricGroup tmJobMetricGroup = new TaskManagerJobMetricGroup(metricRegistry, tmMetricGroup, new JobID(), jobName); TaskMetricGroup taskMetricGroup = new TaskMetricGroup(metricRegistry, tmJobMetricGroup, new JobVertexID(), new AbstractID(), taskName, 0, 0); SimpleCounter myCounter = new SimpleCounter(); taskMetricGroup.counter(counterName, myCounter); List<MetricReporter> reporters = metricRegistry.getReporters(); assertTrue(reporters.size() == 1); MetricReporter metricReporter = reporters.get(0); assertTrue("Reporter should be of type StatsDReporter", metricReporter instanceof StatsDReporter); TestingStatsDReporter reporter = (TestingStatsDReporter) metricReporter; Map<Counter, String> counters = reporter.getCounters(); assertTrue(counters.containsKey(myCounter)); String expectedCounterName = reporter.filterCharacters(hostname) + delimiter + reporter.filterCharacters(taskManagerId) + delimiter + reporter.filterCharacters(jobName) + delimiter + reporter.filterCharacters(counterName); assertEquals(expectedCounterName, counters.get(myCounter)); metricRegistry.shutdown().get(); } /** * Tests that histograms are properly reported via the StatsD reporter. */ @Test public void testStatsDHistogramReporting() throws Exception { Set<String> expectedLines = new HashSet<>(6); expectedLines.add("metric.count:1|g"); expectedLines.add("metric.mean:4.0|g"); expectedLines.add("metric.min:7|g"); expectedLines.add("metric.max:6|g"); expectedLines.add("metric.stddev:5.0|g"); expectedLines.add("metric.p75:0.75|g"); expectedLines.add("metric.p98:0.98|g"); expectedLines.add("metric.p99:0.99|g"); expectedLines.add("metric.p999:0.999|g"); expectedLines.add("metric.p95:0.95|g"); expectedLines.add("metric.p50:0.5|g"); testMetricAndAssert(new TestHistogram(), "metric", expectedLines); } @Test public void testStatsDHistogramReportingOfNegativeValues() throws Exception { TestHistogram histogram = new TestHistogram(); histogram.setCount(-101); histogram.setMean(-104); histogram.setMin(-107); histogram.setMax(-106); histogram.setStdDev(-105); Set<String> expectedLines = new HashSet<>(); expectedLines.add("metric.count:0|g"); expectedLines.add("metric.count:-101|g"); expectedLines.add("metric.mean:0|g"); expectedLines.add("metric.mean:-104.0|g"); expectedLines.add("metric.min:0|g"); expectedLines.add("metric.min:-107|g"); expectedLines.add("metric.max:0|g"); expectedLines.add("metric.max:-106|g"); expectedLines.add("metric.stddev:0|g"); expectedLines.add("metric.stddev:-105.0|g"); expectedLines.add("metric.p75:0.75|g"); expectedLines.add("metric.p98:0.98|g"); expectedLines.add("metric.p99:0.99|g"); expectedLines.add("metric.p999:0.999|g"); expectedLines.add("metric.p95:0.95|g"); expectedLines.add("metric.p50:0.5|g"); testMetricAndAssert(histogram, "metric", expectedLines); } /** * Tests that meters are properly reported via the StatsD reporter. */ @Test public void testStatsDMetersReporting() throws Exception { Set<String> expectedLines = new HashSet<>(4); expectedLines.add("metric.rate:5.0|g"); expectedLines.add("metric.count:100|g"); testMetricAndAssert(new TestMeter(), "metric", expectedLines); } @Test public void testStatsDMetersReportingOfNegativeValues() throws Exception { Set<String> expectedLines = new HashSet<>(); expectedLines.add("metric.rate:0|g"); expectedLines.add("metric.rate:-5.3|g"); expectedLines.add("metric.count:0|g"); expectedLines.add("metric.count:-50|g"); testMetricAndAssert(new TestMeter(-50, -5.3), "metric", expectedLines); } /** * Tests that counter are properly reported via the StatsD reporter. */ @Test public void testStatsDCountersReporting() throws Exception { Set<String> expectedLines = new HashSet<>(2); expectedLines.add("metric:100|g"); testMetricAndAssert(new TestCounter(100), "metric", expectedLines); } @Test public void testStatsDCountersReportingOfNegativeValues() throws Exception { Set<String> expectedLines = new HashSet<>(); expectedLines.add("metric:0|g"); expectedLines.add("metric:-51|g"); testMetricAndAssert(new TestCounter(-51), "metric", expectedLines); } @Test public void testStatsDGaugesReporting() throws Exception { Set<String> expectedLines = new HashSet<>(2); expectedLines.add("metric:75|g"); testMetricAndAssert((Gauge) () -> 75, "metric", expectedLines); } @Test public void testStatsDGaugesReportingOfNegativeValues() throws Exception { Set<String> expectedLines = new HashSet<>(); expectedLines.add("metric:0|g"); expectedLines.add("metric:-12345|g"); testMetricAndAssert((Gauge) () -> -12345, "metric", expectedLines); } private void testMetricAndAssert(Metric metric, String metricName, Set<String> expectation) throws Exception { StatsDReporter reporter = null; DatagramSocketReceiver receiver = null; Thread receiverThread = null; long timeout = 5000; long joinTimeout = 30000; try { receiver = new DatagramSocketReceiver(); receiverThread = new Thread(receiver); receiverThread.start(); int port = receiver.getPort(); MetricConfig config = new MetricConfig(); config.setProperty("host", "localhost"); config.setProperty("port", String.valueOf(port)); reporter = new StatsDReporter(); ReporterSetup.forReporter("test", config, reporter); MetricGroup metricGroup = new UnregisteredMetricsGroup(); reporter.notifyOfAddedMetric(metric, metricName, metricGroup); reporter.report(); receiver.waitUntilNumLines(expectation.size(), timeout); assertEquals(expectation, receiver.getLines()); } finally { if (reporter != null) { reporter.close(); } if (receiver != null) { receiver.stop(); } if (receiverThread != null) { receiverThread.join(joinTimeout); } } } /** * Testing StatsDReporter which disables the socket creation. */ public static class TestingStatsDReporter extends StatsDReporter { @Override public void open(MetricConfig configuration) { // disable the socket creation } public Map<Counter, String> getCounters() { return counters; } } private static class DatagramSocketReceiver implements Runnable { private static final Object obj = new Object(); private final DatagramSocket socket; private final ConcurrentHashMap<String, Object> lines; private boolean running = true; public DatagramSocketReceiver() throws SocketException { socket = new DatagramSocket(); lines = new ConcurrentHashMap<>(); } public int getPort() { return socket.getLocalPort(); } public void stop() { running = false; socket.close(); } public void waitUntilNumLines(int numberLines, long timeout) throws TimeoutException { long endTimeout = System.currentTimeMillis() + timeout; long remainingTimeout = timeout; synchronized (lines) { while (numberLines > lines.size() && remainingTimeout > 0) { try { lines.wait(remainingTimeout); } catch (InterruptedException e) { // ignore interruption exceptions } } remainingTimeout = endTimeout - System.currentTimeMillis(); } if (remainingTimeout <= 0) { throw new TimeoutException("Have not received " + numberLines + " in time."); } } public Set<String> getLines() { return lines.keySet(); } @Override public void run() { while (running) { try { byte[] buffer = new byte[1024]; DatagramPacket packet = new DatagramPacket(buffer, buffer.length); socket.receive(packet); String line = new String(packet.getData(), 0, packet.getLength(), ConfigConstants.DEFAULT_CHARSET); lines.put(line, obj); synchronized (lines) { lines.notifyAll(); } } catch (IOException ex) { // ignore the exceptions } } } } }
/* * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.imagepipeline.producers; import java.util.List; import java.util.Map; import java.util.concurrent.CancellationException; import java.util.concurrent.atomic.AtomicBoolean; import com.facebook.cache.common.CacheKey; import com.facebook.common.internal.ImmutableMap; import com.facebook.common.internal.VisibleForTesting; import com.facebook.imagepipeline.cache.BufferedDiskCache; import com.facebook.imagepipeline.cache.CacheKeyFactory; import com.facebook.imagepipeline.cache.DiskCachePolicy; import com.facebook.imagepipeline.common.ResizeOptions; import com.facebook.imagepipeline.image.EncodedImage; import com.facebook.imagepipeline.request.ImageRequest; import com.facebook.imagepipeline.request.MediaVariations; import bolts.Continuation; import bolts.Task; /** * Disk cache read producer. * * <p>This producer looks in the disk cache for variations of the original image which hasn't been * found in cache itself. * * <p>If an alternative image is found, then it is passed to the consumer. If it's big enough for * the request's {@link ResizeOptions} then the request goes no further down the pipeline. If it's * smaller than required then it will be passed as a non-final response. * * <p>If the image is not found or is sent as non-final, then the request is passed to the next * producer in the sequence. Any results that the producer returns are passed to the consumer. * * <p>This producer is used only if the media variations experiment is turned on and does nothing * unless the image request includes defined {@link MediaVariations} and {@link ResizeOptions}. */ public class MediaVariationsFallbackProducer implements Producer<EncodedImage> { public static final String PRODUCER_NAME = "MediaVariationsFallbackProducer"; public static final String EXTRA_CACHED_VALUE_FOUND = ProducerConstants.EXTRA_CACHED_VALUE_FOUND; public static final String EXTRA_CACHED_VALUE_USED_AS_LAST = "cached_value_used_as_last"; private final BufferedDiskCache mDefaultBufferedDiskCache; private final BufferedDiskCache mSmallImageBufferedDiskCache; private final CacheKeyFactory mCacheKeyFactory; private final MediaVariationsIndex mMediaVariationsIndex; private final DiskCachePolicy mDiskCachePolicy; private final Producer<EncodedImage> mInputProducer; public MediaVariationsFallbackProducer( BufferedDiskCache defaultBufferedDiskCache, BufferedDiskCache smallImageBufferedDiskCache, CacheKeyFactory cacheKeyFactory, MediaVariationsIndex mediaVariationsIndex, DiskCachePolicy diskCachePolicy, Producer<EncodedImage> inputProducer) { mDefaultBufferedDiskCache = defaultBufferedDiskCache; mSmallImageBufferedDiskCache = smallImageBufferedDiskCache; mCacheKeyFactory = cacheKeyFactory; mMediaVariationsIndex = mediaVariationsIndex; mDiskCachePolicy = diskCachePolicy; mInputProducer = inputProducer; } public void produceResults( final Consumer<EncodedImage> consumer, final ProducerContext producerContext) { final ImageRequest imageRequest = producerContext.getImageRequest(); final ResizeOptions resizeOptions = imageRequest.getResizeOptions(); final MediaVariations mediaVariations = imageRequest.getMediaVariations(); if (!imageRequest.isDiskCacheEnabled() || resizeOptions == null || resizeOptions.height <= 0 || resizeOptions.width <= 0 || mediaVariations == null) { startInputProducer(consumer, producerContext); return; } producerContext.getListener().onProducerStart(producerContext.getId(), PRODUCER_NAME); final AtomicBoolean isCancelled = new AtomicBoolean(false); if (mediaVariations.getVariants() != null) { chooseFromVariants( consumer, producerContext, mediaVariations, mediaVariations.getVariants(), imageRequest, resizeOptions, isCancelled); } else { Task<List<MediaVariations.Variant>> cachedVariantsTask = mMediaVariationsIndex.getCachedVariants(mediaVariations.getMediaId()); cachedVariantsTask.continueWith(new Continuation<List<MediaVariations.Variant>, Object>() { @Override public Object then(Task<List<MediaVariations.Variant>> task) throws Exception { if (task.isCancelled() || task.isFaulted()) { return task; } else { try { if (task.getResult() == null || task.getResult().isEmpty()) { startInputProducer(consumer, producerContext); return null; } else { return chooseFromVariants( consumer, producerContext, mediaVariations, task.getResult(), imageRequest, resizeOptions, isCancelled); } } catch (Exception e) { return null; } } } }); } subscribeTaskForRequestCancellation(isCancelled, producerContext); } private Task chooseFromVariants( final Consumer<EncodedImage> consumer, final ProducerContext producerContext, final MediaVariations mediaVariations, final List<MediaVariations.Variant> variants, final ImageRequest imageRequest, final ResizeOptions resizeOptions, final AtomicBoolean isCancelled) { final BufferedDiskCache preferredCache = imageRequest.getCacheChoice() == ImageRequest.CacheChoice.SMALL ? mSmallImageBufferedDiskCache : mDefaultBufferedDiskCache; final Object callerContext = producerContext.getCallerContext(); MediaVariations.Variant preferredVariant = null; CacheKey preferredCacheKey = null; Task<EncodedImage> diskLookupTask; for (int i = 0; i < variants.size(); i++) { final MediaVariations.Variant variant = variants.get(i); final CacheKey cacheKey = mCacheKeyFactory.getEncodedCacheKey(imageRequest, variant.getUri(), callerContext); if (preferredCache.containsSync(cacheKey)) { if (isCloserToRequest(variant, preferredVariant, resizeOptions)) { preferredVariant = variant; preferredCacheKey = cacheKey; } } } final boolean useAsLastResult; if (preferredCacheKey == null) { diskLookupTask = Task.forResult(null); useAsLastResult = false; } else { diskLookupTask = preferredCache.get(preferredCacheKey, isCancelled); useAsLastResult = !mediaVariations.shouldForceRequestForSpecifiedUri() && isBigEnoughForRequestedSize(preferredVariant, resizeOptions); } Continuation<EncodedImage, Void> continuation = onFinishDiskReads(consumer, producerContext, useAsLastResult); return diskLookupTask.continueWith(continuation); } private static boolean isBigEnoughForRequestedSize( MediaVariations.Variant variant, ResizeOptions resizeOptions) { return variant.getWidth() >= resizeOptions.width && variant.getHeight() >= resizeOptions.height; } private static boolean isCloserToRequest( MediaVariations.Variant variant, MediaVariations.Variant comparison, ResizeOptions resizeOptions) { if (comparison == null) { return true; } if (isBigEnoughForRequestedSize(comparison, resizeOptions)) { return variant.getWidth() < comparison.getWidth() && isBigEnoughForRequestedSize(variant, resizeOptions); } else { return variant.getWidth() > comparison.getWidth(); } } private Continuation<EncodedImage, Void> onFinishDiskReads( final Consumer<EncodedImage> consumer, final ProducerContext producerContext, final boolean useAsLastResult) { final String requestId = producerContext.getId(); final ProducerListener listener = producerContext.getListener(); return new Continuation<EncodedImage, Void>() { @Override public Void then(Task<EncodedImage> task) throws Exception { final boolean triggerNextProducer; if (isTaskCancelled(task)) { listener.onProducerFinishWithCancellation(requestId, PRODUCER_NAME, null); consumer.onCancellation(); triggerNextProducer = false; } else if (task.isFaulted()) { listener.onProducerFinishWithFailure(requestId, PRODUCER_NAME, task.getError(), null); startInputProducer(consumer, producerContext); triggerNextProducer = true; } else { EncodedImage cachedReference = task.getResult(); if (cachedReference != null) { listener.onProducerFinishWithSuccess( requestId, PRODUCER_NAME, getExtraMap(listener, requestId, true, useAsLastResult)); if (useAsLastResult) { consumer.onProgressUpdate(1); } consumer.onNewResult(cachedReference, useAsLastResult); cachedReference.close(); triggerNextProducer = !useAsLastResult; } else { listener.onProducerFinishWithSuccess( requestId, PRODUCER_NAME, getExtraMap(listener, requestId, false, false)); triggerNextProducer = true; } } if (triggerNextProducer) { startInputProducer(consumer, producerContext); } return null; } }; } private void startInputProducer( Consumer<EncodedImage> consumer, ProducerContext producerContext) { mInputProducer .produceResults(new MediaVariationsConsumer(consumer, producerContext), producerContext); } private static boolean isTaskCancelled(Task<?> task) { return task.isCancelled() || (task.isFaulted() && task.getError() instanceof CancellationException); } @VisibleForTesting static Map<String, String> getExtraMap( final ProducerListener listener, final String requestId, final boolean valueFound, boolean useAsLastResult) { if (!listener.requiresExtraMap(requestId)) { return null; } if (valueFound) { return ImmutableMap.of( EXTRA_CACHED_VALUE_FOUND, String.valueOf(true), EXTRA_CACHED_VALUE_USED_AS_LAST, String.valueOf(useAsLastResult)); } else { return ImmutableMap.of(EXTRA_CACHED_VALUE_FOUND, String.valueOf(false)); } } private void subscribeTaskForRequestCancellation( final AtomicBoolean isCancelled, ProducerContext producerContext) { producerContext.addCallbacks( new BaseProducerContextCallbacks() { @Override public void onCancellationRequested() { isCancelled.set(true); } }); } @VisibleForTesting class MediaVariationsConsumer extends DelegatingConsumer<EncodedImage, EncodedImage> { private final ProducerContext mProducerContext; public MediaVariationsConsumer( Consumer<EncodedImage> consumer, ProducerContext producerContext) { super(consumer); mProducerContext = producerContext; } @Override protected void onNewResultImpl(EncodedImage newResult, boolean isLast) { if (isLast && newResult != null) { storeResultInDatabase(newResult); } getConsumer().onNewResult(newResult, isLast); } private void storeResultInDatabase(EncodedImage newResult) { final ImageRequest imageRequest = mProducerContext.getImageRequest(); final MediaVariations mediaVariations = imageRequest.getMediaVariations(); if (!imageRequest.isDiskCacheEnabled() || mediaVariations == null) { return; } final ImageRequest.CacheChoice cacheChoice = mDiskCachePolicy.getCacheChoiceForResult(imageRequest, newResult); final CacheKey cacheKey = mCacheKeyFactory.getEncodedCacheKey(imageRequest, mProducerContext.getCallerContext()); mMediaVariationsIndex .saveCachedVariant(mediaVariations.getMediaId(), cacheChoice, cacheKey, newResult); } } }
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.search.querytransform.test; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import com.google.common.collect.ImmutableList; import com.yahoo.component.chain.Chain; import com.yahoo.language.Language; import com.yahoo.language.simple.SimpleLinguistics; import com.yahoo.prelude.Index; import com.yahoo.prelude.IndexFacts; import com.yahoo.prelude.IndexModel; import com.yahoo.prelude.SearchDefinition; import com.yahoo.prelude.hitfield.JSONString; import com.yahoo.prelude.hitfield.XMLString; import com.yahoo.prelude.query.AndItem; import com.yahoo.prelude.query.Item; import com.yahoo.prelude.query.WordItem; import com.yahoo.prelude.querytransform.CJKSearcher; import com.yahoo.search.Query; import com.yahoo.search.Result; import com.yahoo.search.Searcher; import com.yahoo.search.query.parser.Parsable; import com.yahoo.search.query.parser.Parser; import com.yahoo.search.query.parser.ParserEnvironment; import com.yahoo.search.query.parser.ParserFactory; import com.yahoo.search.querytransform.NGramSearcher; import com.yahoo.search.result.Hit; import com.yahoo.search.result.HitGroup; import com.yahoo.search.searchchain.Execution; import org.junit.Test; import static com.yahoo.search.searchchain.Execution.Context.createContextStub; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; /** * @author bratseth */ public class NGramSearcherTestCase { public IndexFacts createIndexFacts() { SearchDefinition sd = new SearchDefinition("default"); Index defaultIndex = new Index("default"); defaultIndex.setNGram(true, 3); defaultIndex.setDynamicSummary(true); sd.addIndex(defaultIndex); Index test = new Index("test"); test.setHighlightSummary(true); sd.addIndex(test); Index gram2 = new Index("gram2"); gram2.setNGram(true, 2); gram2.setDynamicSummary(true); sd.addIndex(gram2); Index gram3 = new Index("gram3"); gram3.setNGram(true, 3); gram3.setHighlightSummary(true); sd.addIndex(gram3); Index gram14 = new Index("gram14"); gram14.setNGram(true, 14); gram14.setDynamicSummary(true); sd.addIndex(gram14); return new IndexFacts(new IndexModel(sd)); } private Searcher createSearcher() { return new NGramSearcher(new SimpleLinguistics()); } public Execution createExecution() { return new Execution(createSearcher(), Execution.Context.createContextStub(createIndexFacts())); } private Execution createMixedSetupExecution() { SearchDefinition music = new SearchDefinition("music"); Index musicDefault = new Index("default"); musicDefault.addCommand("ngram 1"); music.addIndex(musicDefault); SearchDefinition song = new SearchDefinition("song"); Index songDefault = new Index("default"); song.addIndex(songDefault); Map<String, List<String>> clusters = new HashMap<>(); clusters.put("musicOnly", Collections.singletonList(music.getName())); clusters.put("songOnly", Collections.singletonList(song.getName())); clusters.put("musicAndSong", Arrays.asList(music.getName(), song.getName())); IndexFacts indexFacts = new IndexFacts(new IndexModel(clusters, ImmutableList.of(music, song))); return new Execution(createSearcher(), Execution.Context.createContextStub(indexFacts)); } @Test public void testMixedDocTypes() { { Query q = new Query("?query=abc&restrict=song"); createMixedSetupExecution().search(q); assertEquals("abc", q.getModel().getQueryTree().toString()); } { Query q = new Query("?query=abc&restrict=music"); createMixedSetupExecution().search(q); assertEquals("AND a b c", q.getModel().getQueryTree().toString()); } { Query q = new Query("?query=abc&search=song"); createMixedSetupExecution().search(q); assertEquals("abc", q.getModel().getQueryTree().toString()); } { Query q = new Query("?query=abc&search=music"); createMixedSetupExecution().search(q); assertEquals("AND a b c", q.getModel().getQueryTree().toString()); } } @Test public void testMixedClusters() { { Query q = new Query("?query=abc&search=songOnly"); createMixedSetupExecution().search(q); assertEquals("abc", q.getModel().getQueryTree().toString()); } { Query q = new Query("?query=abc&search=musicOnly"); createMixedSetupExecution().search(q); assertEquals("AND a b c", q.getModel().getQueryTree().toString()); } { Query q = new Query("?query=abc&search=musicAndSong&restrict=music"); createMixedSetupExecution().search(q); assertEquals("AND a b c", q.getModel().getQueryTree().toString()); } { Query q = new Query("?query=abc&search=musicAndSong&restrict=song"); createMixedSetupExecution().search(q); assertEquals("abc", q.getModel().getQueryTree().toString()); } } @Test public void testNGramRewritingMixedQuery() { Query q = new Query("?query=foo+gram3:engul+test:bar"); createExecution().search(q); assertEquals("AND foo (AND gram3:eng gram3:ngu gram3:gul) test:bar", q.getModel().getQueryTree().toString()); } @Test public void testNGramRewritingNGramOnly() { Query q = new Query("?query=gram3:engul"); createExecution().search(q); assertEquals("AND gram3:eng gram3:ngu gram3:gul", q.getModel().getQueryTree().toString()); } @Test public void testNGramRewriting2NGramsOnly() { Query q = new Query("?query=gram3:engul+gram2:123"); createExecution().search(q); assertEquals("AND (AND gram3:eng gram3:ngu gram3:gul) (AND gram2:12 gram2:23)", q.getModel().getQueryTree().toString()); } @Test public void testNGramRewritingShortOnly() { Query q = new Query("?query=gram3:en"); createExecution().search(q); assertEquals("gram3:en", q.getModel().getQueryTree().toString()); } @Test public void testNGramRewritingShortInMixes() { Query q = new Query("?query=test:a+gram3:en"); createExecution().search(q); assertEquals("AND test:a gram3:en", q.getModel().getQueryTree().toString()); } @Test public void testNGramRewritingPhrase() { Query q = new Query("?query=gram3:%22engul+a+holi%22"); createExecution().search(q); assertEquals("gram3:\"eng ngu gul a hol oli\"", q.getModel().getQueryTree().toString()); } /** * Note that single-term phrases are simplified to just the term at parse time, * so the ngram rewriter cannot know to keep the grams as a phrase in this case. */ @Test public void testNGramRewritingPhraseSingleTerm() { Query q = new Query("?query=gram3:%22engul%22"); createExecution().search(q); assertEquals("AND gram3:eng gram3:ngu gram3:gul", q.getModel().getQueryTree().toString()); } @Test public void testNGramRewritingAdditionalTermInfo() { Query q = new Query("?query=gram3:engul!50+foo+gram2:123!150"); createExecution().search(q); AndItem root = (AndItem)q.getModel().getQueryTree().getRoot(); AndItem gram3And = (AndItem)root.getItem(0); AndItem gram2And = (AndItem)root.getItem(2); assertExtraTermInfo(50, "engul", gram3And.getItem(0)); assertExtraTermInfo(50, "engul", gram3And.getItem(1)); assertExtraTermInfo(50, "engul", gram3And.getItem(2)); assertExtraTermInfo(150, "123", gram2And.getItem(0)); assertExtraTermInfo(150, "123", gram2And.getItem(1)); } private void assertExtraTermInfo(int weight, String origin, Item g) { WordItem gram = (WordItem)g; assertEquals(weight, gram.getWeight()); assertEquals(origin, gram.getOrigin().getValue()); assertTrue(gram.isProtected()); assertFalse(gram.isFromQuery()); } @Test public void testNGramRewritingExplicitDefault() { Query q = new Query("?query=default:engul"); createExecution().search(q); assertEquals("AND default:eng default:ngu default:gul" ,q.getModel().getQueryTree().toString()); } @Test public void testNGramRewritingImplicitDefault() { Query q = new Query("?query=engul"); createExecution().search(q); assertEquals("AND eng ngu gul", q.getModel().getQueryTree().toString()); } @Test public void testGramsWithSegmentation() { Searcher searcher = createSearcher(); assertGramsWithSegmentation(new Chain<>(searcher)); assertGramsWithSegmentation(new Chain<>(new CJKSearcher(), searcher)); assertGramsWithSegmentation(new Chain<>(searcher, new CJKSearcher())); } public void assertGramsWithSegmentation(Chain<Searcher> chain) { // "first" "second" and "third" are segments in the "test" language Item item = parseQuery("gram14:firstsecondthird", Query.Type.ANY); Query q = new Query("?query=ignored"); q.getModel().setLanguage(Language.UNKNOWN); q.getModel().getQueryTree().setRoot(item); new Execution(chain, createContextStub(createIndexFacts())).search(q); assertEquals("AND gram14:firstsecondthi gram14:irstsecondthir gram14:rstsecondthird", q.getModel().getQueryTree().toString()); } @Test public void testGramsWithSegmentationSingleSegment() { Searcher searcher = createSearcher(); assertGramsWithSegmentationSingleSegment(new Chain<>(searcher)); assertGramsWithSegmentationSingleSegment(new Chain<>(new CJKSearcher(), searcher)); assertGramsWithSegmentationSingleSegment(new Chain<>(searcher, new CJKSearcher())); } public void assertGramsWithSegmentationSingleSegment(Chain<Searcher> chain) { // "first" "second" and "third" are segments in the "test" language Item item = parseQuery("gram14:first", Query.Type.ANY); Query q = new Query("?query=ignored"); q.getModel().setLanguage(Language.UNKNOWN); q.getModel().getQueryTree().setRoot(item); new Execution(chain, createContextStub(createIndexFacts())).search(q); assertEquals("gram14:first", q.getModel().getQueryTree().toString()); } @Test public void testGramsWithSegmentationSubstringSegmented() { Searcher searcher = createSearcher(); assertGramsWithSegmentationSubstringSegmented(new Chain<>(searcher)); assertGramsWithSegmentationSubstringSegmented(new Chain<>(new CJKSearcher(), searcher)); assertGramsWithSegmentationSubstringSegmented(new Chain<>(searcher, new CJKSearcher())); } public void assertGramsWithSegmentationSubstringSegmented(Chain<Searcher> chain) { // "first" "second" and "third" are segments in the "test" language Item item = parseQuery("gram14:afirstsecondthirdo", Query.Type.ANY); Query q = new Query("?query=ignored"); q.getModel().setLanguage(Language.UNKNOWN); q.getModel().getQueryTree().setRoot(item); new Execution(chain, createContextStub(createIndexFacts())).search(q); assertEquals("AND gram14:afirstsecondth gram14:firstsecondthi gram14:irstsecondthir gram14:rstsecondthird gram14:stsecondthirdo", q.getModel().getQueryTree().toString()); } @Test public void testGramsWithSegmentationMixed() { Searcher searcher = createSearcher(); assertGramsWithSegmentationMixed(new Chain<>(searcher)); assertGramsWithSegmentationMixed(new Chain<>(new CJKSearcher(), searcher)); assertGramsWithSegmentationMixed(new Chain<>(searcher, new CJKSearcher())); } public void assertGramsWithSegmentationMixed(Chain<Searcher> chain) { // "first" "second" and "third" are segments in the "test" language Item item = parseQuery("a gram14:afirstsecondthird b gram14:hi", Query.Type.ALL); Query q = new Query("?query=ignored"); q.getModel().setLanguage(Language.UNKNOWN); q.getModel().getQueryTree().setRoot(item); new Execution(chain, createContextStub(createIndexFacts())).search(q); assertEquals("AND a (AND gram14:afirstsecondth gram14:firstsecondthi gram14:irstsecondthir gram14:rstsecondthird) b gram14:hi", q.getModel().getQueryTree().toString()); } @Test public void testGramsWithSegmentationMixedAndPhrases() { Searcher searcher = createSearcher(); assertGramsWithSegmentationMixedAndPhrases(new Chain<>(searcher)); assertGramsWithSegmentationMixedAndPhrases(new Chain<>(new CJKSearcher(), searcher)); assertGramsWithSegmentationMixedAndPhrases(new Chain<>(searcher, new CJKSearcher())); } public void assertGramsWithSegmentationMixedAndPhrases(Chain<Searcher> chain) { // "first" "second" and "third" are segments in the "test" language Item item = parseQuery("a gram14:\"afirstsecondthird b hi\"", Query.Type.ALL); Query q = new Query("?query=ignored"); q.getModel().setLanguage(Language.UNKNOWN); q.getModel().getQueryTree().setRoot(item); new Execution(chain, createContextStub(createIndexFacts())).search(q); assertEquals("AND a gram14:\"afirstsecondth firstsecondthi irstsecondthir rstsecondthird b hi\"", q.getModel().getQueryTree().toString()); } @Test public void testNGramRecombining() { Query q = new Query("?query=ignored"); Result r = new Execution(new Chain<>(createSearcher(), new MockBackend1()), createContextStub(createIndexFacts())).search(q); Hit h1 = r.hits().get("hit1"); assertEquals("Should be untouched,\u001feven if containing \u001f", h1.getField("test").toString()); assertTrue(h1.getField("test") instanceof String); assertEquals("Blue red Ed A",h1.getField("gram2").toString()); assertTrue(h1.getField("gram2") instanceof XMLString); assertEquals("Separators on borders work","Blue red ed a\u001f", h1.getField("gram3").toString()); assertTrue(h1.getField("gram3") instanceof String); Hit h2 = r.hits().get("hit2"); assertEquals("katt i...morgen", h2.getField("gram3").toString()); assertTrue(h2.getField("gram3") instanceof JSONString); Hit h3 = r.hits().get("hit3"); assertEquals("\u001ffin\u001f \u001fen\u001f \u001fa\u001f", h3.getField("gram2").toString()); assertEquals("#Logging in #Java is like that \"Judean P\u001fopul\u001far Front\" scene from \"Life of Brian\".", h3.getField("gram3").toString()); } private Item parseQuery(String query, Query.Type type) { Parser parser = ParserFactory.newInstance(type, new ParserEnvironment().setIndexFacts(createIndexFacts())); return parser.parse(new Parsable().setQuery(query).setLanguage(Language.UNKNOWN)).getRoot(); } private static class MockBackend1 extends Searcher { @Override public Result search(Query query, Execution execution) { Result r = new Result(query); HitGroup g = new HitGroup(); r.hits().add(g); Hit h1 = new Hit("hit1"); h1.setField(Hit.SDDOCNAME_FIELD, "default"); h1.setField("test", "Should be untouched,\u001feven if containing \u001f"); h1.setField("gram2", new XMLString("\uFFF9Bl\uFFFAbl\uFFFBluue reed \uFFF9Ed\uFFFAed\uFFFB \uFFF9A\uFFFAa\uFFFB")); h1.setField("gram3", "\uFFF9Blu\uFFFAblu\uFFFBlue red ed a\u001f"); // separator on borders should not trip anything g.add(h1); Hit h2 = new Hit("hit2"); h2.setField(Hit.SDDOCNAME_FIELD, "default"); h2.setField("gram3", new JSONString("katatt i...mororgrgegen")); r.hits().add(h2); // Test bolding Hit h3 = new Hit("hit3"); h3.setField(Hit.SDDOCNAME_FIELD, "default"); // the result of searching for "fin en a" h3.setField("gram2", "\u001ffi\u001f\u001fin\u001f \u001fen\u001f \u001fa\u001f"); // the result from Juniper from of bolding the substring "opul": h3.setField("gram3", "#Logoggggigining in #Javava is likike thahat \"Jududedeaean Pop\u001fopu\u001f\u001fpul\u001fulalar Froronont\" scecenene frorom \"Lifife of Bririaian\"."); r.hits().add(h3); return r; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.entries; // DO NOT modify this class. It was generated from LeafRegionEntry.cpp import java.util.UUID; import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import java.util.concurrent.atomic.AtomicLongFieldUpdater; import org.apache.geode.internal.cache.InternalRegion; import org.apache.geode.internal.cache.RegionEntryContext; import org.apache.geode.internal.cache.Token; import org.apache.geode.internal.cache.eviction.EvictionController; import org.apache.geode.internal.cache.eviction.EvictionNode; import org.apache.geode.internal.cache.persistence.DiskRecoveryStore; import org.apache.geode.internal.offheap.OffHeapRegionEntryHelper; import org.apache.geode.internal.offheap.annotations.Released; import org.apache.geode.internal.offheap.annotations.Retained; import org.apache.geode.internal.offheap.annotations.Unretained; import org.apache.geode.internal.util.concurrent.CustomEntryConcurrentHashMap.HashEntry; /* * macros whose definition changes this class: * * disk: DISK lru: LRU stats: STATS versioned: VERSIONED offheap: OFFHEAP * * One of the following key macros must be defined: * * key object: KEY_OBJECT key int: KEY_INT key long: KEY_LONG key uuid: KEY_UUID key string1: * KEY_STRING1 key string2: KEY_STRING2 */ /** * Do not modify this class. It was generated. Instead modify LeafRegionEntry.cpp and then run * ./dev-tools/generateRegionEntryClasses.sh (it must be run from the top level directory). */ public class VMThinLRURegionEntryOffHeapUUIDKey extends VMThinLRURegionEntryOffHeap { // --------------------------------------- common fields ---------------------------------------- private static final AtomicLongFieldUpdater<VMThinLRURegionEntryOffHeapUUIDKey> LAST_MODIFIED_UPDATER = AtomicLongFieldUpdater.newUpdater(VMThinLRURegionEntryOffHeapUUIDKey.class, "lastModified"); protected int hash; private HashEntry<Object, Object> nextEntry; @SuppressWarnings("unused") private volatile long lastModified; // --------------------------------------- offheap fields --------------------------------------- /** * All access done using OFF_HEAP_ADDRESS_UPDATER so it is used even though the compiler can not * tell it is. */ @SuppressWarnings("unused") @Retained @Released private volatile long offHeapAddress; /** * I needed to add this because I wanted clear to call setValue which normally can only be called * while the re is synced. But if I sync in that code it causes a lock ordering deadlock with the * disk regions because they also get a rw lock in clear. Some hardware platforms do not support * CAS on a long. If gemfire is run on one of those the AtomicLongFieldUpdater does a sync on the * RegionEntry and we will once again be deadlocked. I don't know if we support any of the * hardware platforms that do not have a 64bit CAS. If we do then we can expect deadlocks on disk * regions. */ private static final AtomicLongFieldUpdater<VMThinLRURegionEntryOffHeapUUIDKey> OFF_HEAP_ADDRESS_UPDATER = AtomicLongFieldUpdater.newUpdater(VMThinLRURegionEntryOffHeapUUIDKey.class, "offHeapAddress"); // --------------------------------------- key fields ------------------------------------------- // DO NOT modify this class. It was generated from LeafRegionEntry.cpp private final long keyMostSigBits; private final long keyLeastSigBits; public VMThinLRURegionEntryOffHeapUUIDKey(final RegionEntryContext context, final UUID key, @Retained final Object value) { super(context, value); // DO NOT modify this class. It was generated from LeafRegionEntry.cpp this.keyMostSigBits = key.getMostSignificantBits(); this.keyLeastSigBits = key.getLeastSignificantBits(); } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp @Override public Token getValueAsToken() { return OffHeapRegionEntryHelper.getValueAsToken(this); } @Override protected Object getValueField() { return OffHeapRegionEntryHelper._getValue(this); } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp @Override @Unretained protected void setValueField(@Unretained final Object value) { OffHeapRegionEntryHelper.setValue(this, value); } @Override @Retained public Object getValueRetain(final RegionEntryContext context, final boolean decompress) { return OffHeapRegionEntryHelper._getValueRetain(this, decompress, context); } @Override public long getAddress() { return OFF_HEAP_ADDRESS_UPDATER.get(this); } @Override public boolean setAddress(final long expectedAddress, long newAddress) { return OFF_HEAP_ADDRESS_UPDATER.compareAndSet(this, expectedAddress, newAddress); } @Override @Released public void release() { OffHeapRegionEntryHelper.releaseEntry(this); } @Override public void returnToPool() { // never implemented } @Override protected long getLastModifiedField() { return LAST_MODIFIED_UPDATER.get(this); } @Override protected boolean compareAndSetLastModifiedField(final long expectedValue, final long newValue) { return LAST_MODIFIED_UPDATER.compareAndSet(this, expectedValue, newValue); } @Override public int getEntryHash() { return this.hash; } @Override protected void setEntryHash(final int hash) { this.hash = hash; } @Override public HashEntry<Object, Object> getNextEntry() { return this.nextEntry; } @Override public void setNextEntry(final HashEntry<Object, Object> nextEntry) { this.nextEntry = nextEntry; } // --------------------------------------- eviction code ---------------------------------------- // DO NOT modify this class. It was generated from LeafRegionEntry.cpp @Override public void setDelayedDiskId(final DiskRecoveryStore diskRecoveryStore) { // nothing needed for LRUs with no disk } @Override public synchronized int updateEntrySize(final EvictionController evictionController) { // OFFHEAP: getValue ok w/o incing refcount because we are synced and only getting the size return updateEntrySize(evictionController, getValue()); } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp @Override public synchronized int updateEntrySize(final EvictionController evictionController, final Object value) { int oldSize = getEntrySize(); int newSize = evictionController.entrySize(getKeyForSizing(), value); setEntrySize(newSize); int delta = newSize - oldSize; return delta; } @Override public boolean isRecentlyUsed() { return areAnyBitsSet(RECENTLY_USED); } @Override public void setRecentlyUsed(RegionEntryContext context) { if (!isRecentlyUsed()) { setBits(RECENTLY_USED); context.incRecentlyUsed(); } } @Override public void unsetRecentlyUsed() { clearBits(~RECENTLY_USED); } @Override public boolean isEvicted() { return areAnyBitsSet(EVICTED); } @Override public void setEvicted() { setBits(EVICTED); } @Override public void unsetEvicted() { clearBits(~EVICTED); } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp private EvictionNode nextEvictionNode; private EvictionNode previousEvictionNode; private int size; @Override public void setNext(final EvictionNode nextEvictionNode) { this.nextEvictionNode = nextEvictionNode; } @Override public EvictionNode next() { return this.nextEvictionNode; } @Override public void setPrevious(final EvictionNode previousEvictionNode) { this.previousEvictionNode = previousEvictionNode; } @Override public EvictionNode previous() { return this.previousEvictionNode; } @Override public int getEntrySize() { return this.size; } protected void setEntrySize(final int size) { this.size = size; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp @Override public Object getKeyForSizing() { // inline keys always report null for sizing since the size comes from the entry size return null; } // ----------------------------------------- key code ------------------------------------------- // DO NOT modify this class. It was generated from LeafRegionEntry.cpp @Override public Object getKey() { return new UUID(this.keyMostSigBits, this.keyLeastSigBits); } @Override public boolean isKeyEqual(final Object key) { if (key instanceof UUID) { UUID uuid = (UUID) key; return uuid.getLeastSignificantBits() == this.keyLeastSigBits && uuid.getMostSignificantBits() == this.keyMostSigBits; } return false; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp }
package fi.csc.microarray.client.cli; import java.io.File; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.net.MalformedURLException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Scanner; import javax.jms.JMSException; import javax.swing.SwingUtilities; import net.sourceforge.argparse4j.ArgumentParsers; import net.sourceforge.argparse4j.impl.action.StoreTrueArgumentAction; import net.sourceforge.argparse4j.inf.Argument; import net.sourceforge.argparse4j.inf.ArgumentParser; import net.sourceforge.argparse4j.inf.ArgumentParserException; import net.sourceforge.argparse4j.inf.Namespace; import net.sourceforge.argparse4j.inf.Subparser; import net.sourceforge.argparse4j.inf.Subparsers; import net.sourceforge.argparse4j.internal.HelpScreenException; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.text.WordUtils; import org.joda.time.DateTime; import org.yaml.snakeyaml.DumperOptions; import org.yaml.snakeyaml.DumperOptions.FlowStyle; import org.yaml.snakeyaml.Yaml; import fi.csc.microarray.client.dataimport.ImportItem; import fi.csc.microarray.client.operation.Operation; import fi.csc.microarray.client.operation.OperationDefinition; import fi.csc.microarray.client.operation.OperationRecord; import fi.csc.microarray.client.operation.OperationRecord.InputRecord; import fi.csc.microarray.client.operation.OperationRecord.ParameterRecord; import fi.csc.microarray.client.operation.ToolCategory; import fi.csc.microarray.client.operation.ToolModule; import fi.csc.microarray.client.operation.parameter.DecimalParameter; import fi.csc.microarray.client.operation.parameter.EnumParameter; import fi.csc.microarray.client.operation.parameter.EnumParameter.SelectionOption; import fi.csc.microarray.client.operation.parameter.IntegerParameter; import fi.csc.microarray.client.operation.parameter.MetaColnameParameter; import fi.csc.microarray.client.operation.parameter.Parameter; import fi.csc.microarray.client.operation.parameter.PercentageParameter; import fi.csc.microarray.client.operation.parameter.StringParameter; import fi.csc.microarray.client.tasks.Task; import fi.csc.microarray.config.ConfigurationLoader.IllegalConfigurationException; import fi.csc.microarray.config.DirectoryLayout; import fi.csc.microarray.databeans.ContentType; import fi.csc.microarray.databeans.DataBean; import fi.csc.microarray.databeans.DataBean.DataNotAvailableHandling; import fi.csc.microarray.databeans.DataBean.Link; import fi.csc.microarray.exception.MicroarrayException; import fi.csc.microarray.filebroker.ChecksumInputStream; import fi.csc.microarray.filebroker.DbSession; import fi.csc.microarray.filebroker.FileBrokerException; import fi.csc.microarray.messaging.AuthCancelledException; import fi.csc.microarray.messaging.auth.SimpleAuthenticationRequestListener; import fi.csc.microarray.util.Strings; /** * Run in project root with command * * java -cp bin/eclipse:ext/lib/* fi.csc.microarray.client.cli.CliClient * * @author klemela * */ public class CliClient { private static final String KEY_INPUTS = "inputs"; private static final String KEY_NOTES = "notes"; private static final String KEY_SIZE = "size"; private static final String KEY_VALUE = "value"; private static final String KEY_DATE = "date"; private static final String KEY_PARAMETERS = "parameters"; private static final String KEY_TOOL = "tool"; private static final String KEY_HELP = "help"; private static final String KEY_DESCRIPTION = "description"; private static final String KEY_NAME = "name"; private static final String KEY_PARAM = "parameter"; private static final String KEY_OPTIONS = "options"; private static final String KEY_MAX = "max"; private static final String KEY_MIN = "min"; private static final String KEY_DEFAULT = "default"; private static final String KEY_TYPE = "type"; private static final String KEY_OPTION = "option"; private static final String KEY_INTEGER = "INTEGER"; private static final String KEY_DECIMAL = "DECIMAL"; private static final String KEY_PERCENT = "PERCENT"; private static final String KEY_STRING = "STRING"; private static final String KEY_ENUM = "ENUM"; private static final String KEY_METACOLUMN_SEL = "METACOLUMN_SEL "; private static final String OPT_CONFIG = "config"; private static final String OPT_USERNAME = "username"; private static final String OPT_PASSWORD = "password"; private static final String OPT_VERBOSE = "verbose"; private static final String OPT_QUIET = "quiet"; private static final String OPT_YAML = "yaml"; private static final String OPT_WORKING_COPY = "working-copy"; private static final String OPT_LOCAL = "local"; private static final String OPT_CLOUD = "cloud"; private static final String OPT_SHOW_ID = "show-id"; private static final String CMD_INTERACTIVE = "interactive"; private static final String CMD_EXIT = "exit"; private static final String CMD_LIST_DATASETS = "list-datasets"; private static final String CMD_DATASET = "dataset"; private static final String CMD_PRINT = "print"; private static final String CMD_HISTORY = "history"; private static final String CMD_RENAME = "rename"; private static final String CMD_DELETE = "delete"; private static final String CMD_IMPORT = "import"; private static final String CMD_EXPORT = "export"; private static final String CMD_LIST_TOOLS = "list-tools"; private static final String CMD_TOOL = "tool"; private static final String CMD_RUN = "run"; private static final String CMD_SAVE_WORKFLOW = "save-workflow"; private static final String CMD_RUN_WORKFLOW = "run-workflow"; private static final String ARG_SEARCH_TERM = "search-term"; private static final String ARG_TOOL_ID = "tool-id"; private static final String ARG_DATASET = "dataset"; private static final String ARG_FILE = "file"; private static final String ARG_OLD_NAME = "old-name"; private static final String ARG_NEW_NAME = "new-name"; private static final String ARG_PARAMETER = "parameter"; private static final String ARG_SESSION = "session"; private static final String CMD_OPEN_SESSION = "open-session"; private static final String CMD_SAVE_SESSION = "save-session"; private static final String CMD_LIST_SESSIONS = "list-sessions"; private static final String CMD_DELETE_SESSION = "delete-session"; private static final String CMD_CLEAR_SESSION = "clear-session"; private static final String DEFAULT_WORKING_COPY = "cli-working-copy.zip"; // without headless mode OSX will show this process in the dock and grab the focus static { System.setProperty("java.awt.headless", "true"); } public static void main(String[] args) throws InvocationTargetException, InterruptedException { new CliClient(args).runCliClient(); } private Namespace nameSpace; private CliClientApplication app; private String[] args; private ArgumentParser parser; public CliClient(String[] args) { this.args = args; } private void runCliClient() throws InvocationTargetException, InterruptedException { int exitValue = 1; try { this.parse(); exitValue = 0; } catch (UserErrorException e) { System.err.println(e.getMessage()); } catch (Exception e) { e.printStackTrace(); } // shutdown in EDT to prevent EDT callbacks from running code after // messaging is closed SwingUtilities.invokeAndWait(new ShutdownRunnable(exitValue)); } public class ShutdownRunnable implements Runnable { private int exitValue; public ShutdownRunnable(int exitValue) { this.exitValue = exitValue; } @Override public void run() { if (app != null) { app.quit(); } System.exit(exitValue); } } private void parse() throws JMSException, Exception { /* * Argument '@file' will read parameters from the file. From argparse4j manual: * * "The each line of the file is treated as one argument. Please be aware * that trailing empty lines or line with only white spaces are also considered * as arguments, although it is not readily noticeable to the user. The empty * line is treated as empty string." */ parser = ArgumentParsers.newArgumentParser("Chipster command line client", true, "-", "@"); addStringOption(parser, "-c", OPT_CONFIG, "chipster client configuration file"); addStringOption(parser, "-u", OPT_USERNAME, "chipster username"); addStringOption(parser, "-p", OPT_PASSWORD, "chipster password"); addStringOption(parser, "-W", OPT_WORKING_COPY, "name of the working copy session, either zip or cloud session").setDefault(DEFAULT_WORKING_COPY); addBooleanOption(parser, "-v", OPT_VERBOSE, "more verbose output"); addBooleanOption(parser, "-q", OPT_QUIET, "uppress status messages and print only requested data"); addBooleanOption(parser, "-y", OPT_YAML, "output in yaml format for programmatical access"); Subparsers subparsers = parser.addSubparsers(); subparsers.title("commands"); addCommand(subparsers, CMD_LIST_DATASETS, "list datasets"); addCommand(subparsers, CMD_DATASET, "view dataset details", ARG_DATASET); addCommand(subparsers, CMD_PRINT, "output dataset contents", ARG_DATASET); addCommand(subparsers, CMD_HISTORY, "view history, set verbose to view also source codes", ARG_DATASET); addCommand(subparsers, CMD_RENAME, "rename dataset", ARG_OLD_NAME).addArgument(ARG_NEW_NAME); addCommand(subparsers, CMD_DELETE, "delete dataset", ARG_DATASET); addCommand(subparsers, CMD_IMPORT, "import file", ARG_FILE); addCommand(subparsers, CMD_EXPORT, "export dataset to file", ARG_DATASET); addCommand(subparsers, CMD_LIST_TOOLS, "list tools, search term is optional").addArgument(ARG_SEARCH_TERM).nargs("?"); addCommand(subparsers, CMD_TOOL, "show tool details, set verbose to view parameter help texts.", ARG_TOOL_ID); Subparser run = addCommand(subparsers, CMD_RUN, "run tool"); run.addArgument(ARG_TOOL_ID).required(true); // options instead of positional arguments because these are lists run.addArgument("--" + ARG_DATASET).nargs("*").help("input dataset(s) for a tool"); run.addArgument("--" + ARG_PARAMETER).nargs("*").help("set parameters for a tool, e.g. parameter=VALUE"); // default help has tool-id in the end, which won't work run.usage("run [-h] tool-id [--dataset [DATASET [DATASET ...]]] [--parameter [PARAMETER [PARAMETER ...]]]"); Subparser saveWorkflow = addCommand(subparsers, CMD_SAVE_WORKFLOW, "save workflow"); saveWorkflow.addArgument(ARG_FILE).help("save workflow to this file").required(true); saveWorkflow.addArgument(ARG_DATASET).help("start saving from this dataset").required(true); Subparser runWorkflow = addCommand(subparsers, CMD_RUN_WORKFLOW, "run workflow"); runWorkflow.addArgument(ARG_FILE).help("run workflow of this file").required(true); runWorkflow.addArgument(ARG_DATASET).help("start running from this dataset").required(true); addCommand(subparsers, CMD_OPEN_SESSION, "open zip session or cloud session", ARG_SESSION, OPT_CLOUD, OPT_LOCAL); addCommand(subparsers, CMD_SAVE_SESSION, "save zip session or cloud session", ARG_SESSION, OPT_CLOUD, OPT_LOCAL, OPT_SHOW_ID); addCommand(subparsers, CMD_CLEAR_SESSION, "delete all datasets of the working copy session"); addCommand(subparsers, CMD_LIST_SESSIONS, "list cloud sessions"); addCommand(subparsers, CMD_DELETE_SESSION, "delete cloud session", ARG_SESSION, OPT_CLOUD, OPT_LOCAL); addCommand(subparsers, CMD_INTERACTIVE, "enter interactive mode"); addCommand(subparsers, CMD_EXIT, "quit interactive mode").aliases("quit"); parser.epilog("use 'COMMAND -h' to show command arguments"); // parse the command line arguments try { parseArgs(); } catch (HelpScreenException e) { // parser has printed help text, we are done return; } initClient(); String workingCopy = openWorkingCopySession(); if (isCommand(CMD_INTERACTIVE)) { Scanner scanner = new Scanner(System.in); try { printlnStatus("Chipster command line client in interactive mode, type '-h' for help or 'exit' to quit"); while(!Thread.currentThread().isInterrupted()) { // process command line args on the first round try { parseArgs(); if (isCommand(CMD_EXIT)) { break; } execute(); saveWorkingCopySession(workingCopy); } catch (UserErrorException e) { System.err.println(e.getMessage()); } catch (HelpScreenException e) { // skip execute() when parser shows a help text, because // it doesn't update nameSpace } printStatus(">>>"); String lineString = scanner.nextLine(); args = Strings.splitConsideringQuotes(lineString, ' ').toArray(new String[0]); } } finally { scanner.close(); } } else { execute(); saveWorkingCopySession(workingCopy); } } private boolean isBooleanOption(String option) { Boolean value = nameSpace.getBoolean(option); if (value != null) { return value; } return false; } private boolean isStringOption(String option) { return nameSpace.getString(option) != null; } private boolean isCommand(String cmd) { return nameSpace.getAttrs().containsKey(cmd); } private Argument addBooleanOption(ArgumentParser parser, String shortOption, String longOption, String help) { return addStringOption(parser, shortOption, longOption, help).action(new StoreTrueArgumentAction()); } private Argument addStringOption(ArgumentParser parser, String shortOption, String longOption, String help) { return parser.addArgument(shortOption, "--" + longOption).dest(longOption).help(help); } private Subparser addCommand(Subparsers subparsers, String command, String help, String argument, String... options) { Subparser subparser = addCommand(subparsers, command, help); subparser.addArgument(argument); for (String option : options) { subparser.addArgument("--" + option).dest(option).action(new StoreTrueArgumentAction()); } return subparser; } private Subparser addCommand(Subparsers subparsers, String command, String help, String argument) { Subparser subparser = addCommand(subparsers, command, help); subparser.addArgument(argument); return subparser; } private Subparser addCommand(Subparsers subparsers, String command, String help) { return subparsers.addParser(command).help(help).setDefault(command, true); } /** * @throws HelpScreenException help text printed, nameSpace is not updated! * @throws UserErrorException */ private void parseArgs() throws HelpScreenException, UserErrorException { try { nameSpace = parser.parseArgs(args); } catch (ArgumentParserException e) { if (e instanceof HelpScreenException) { throw (HelpScreenException)e; } else { throw new UserErrorException(e.getMessage()); } } } private void execute() throws JMSException, Exception { boolean yaml = isBooleanOption(OPT_YAML); if (isCommand(CMD_LIST_DATASETS)) { listDatasets(yaml); } else if (isCommand(CMD_LIST_SESSIONS)) { listSessions(yaml); } else if (isCommand(CMD_DELETE_SESSION)) { deleteSession(nameSpace.getString(ARG_SESSION)); } else if (isCommand(CMD_PRINT)) { String dataset = nameSpace.getString(ARG_DATASET); printDataset(dataset); } else if (isCommand(CMD_LIST_TOOLS)) { tools(nameSpace.getString(ARG_SEARCH_TERM), yaml); } else if (isCommand(CMD_TOOL)) { String tool = nameSpace.getString(ARG_TOOL_ID); tool(tool, yaml); } else if (isCommand(CMD_EXPORT)) { String dataset = nameSpace.getString(ARG_DATASET); exportDataset(dataset); } else if (isCommand(CMD_IMPORT)) { String filename = nameSpace.getString(ARG_FILE); importDataset(filename); } else if (isCommand(CMD_RENAME)) { renameDataset(nameSpace.getString(ARG_OLD_NAME), nameSpace.getString(ARG_NEW_NAME)); } else if (isCommand(CMD_RUN)) { String tool = nameSpace.getString(ARG_TOOL_ID); List<String> datasets = nameSpace.<String> getList(ARG_DATASET); List<String> parameters = nameSpace.<String> getList(ARG_PARAMETER); run(tool, datasets, parameters); } else if (isCommand(CMD_CLEAR_SESSION)) { clearSession(); } else if (isCommand(CMD_SAVE_WORKFLOW)) { String data = nameSpace.getString(ARG_DATASET); String file = nameSpace.getString(ARG_FILE); saveWorkflow(data, file); } else if (isCommand(CMD_RUN_WORKFLOW)) { String data = nameSpace.getString(ARG_DATASET); String file = nameSpace.getString(ARG_FILE); runWorkflow(data, file); } else if (isCommand(CMD_DELETE)) { deleteDataset(nameSpace.getString(ARG_DATASET)); } else if (isCommand(CMD_HISTORY)) { String dataset = nameSpace.getString(ARG_DATASET); historyOfDataset(dataset, yaml); } else if (isCommand(CMD_OPEN_SESSION)) { openSession(nameSpace.getString(ARG_SESSION)); } else if (isCommand(CMD_SAVE_SESSION)) { saveSession(nameSpace.getString(ARG_SESSION)); } else if (isCommand(CMD_DATASET)) { /* this must be after all command having "dataset" argument, because * isCommand() can't tell the difference betweeen command and * argument. */ String dataset = nameSpace.getString(ARG_DATASET); viewDataset(dataset, yaml); } } private void initClient() throws UserErrorException, IOException, IllegalConfigurationException, MicroarrayException { if (!isStringOption(OPT_CONFIG)) { throw new UserErrorException("config not set"); } if (!isStringOption(OPT_USERNAME)) { throw new UserErrorException("username not set"); } if (!isStringOption(OPT_PASSWORD)) { throw new UserErrorException("password not set"); } DirectoryLayout.initialiseClientLayout(nameSpace.getString(OPT_CONFIG)); SimpleAuthenticationRequestListener auth = new SimpleAuthenticationRequestListener(nameSpace.getString(OPT_USERNAME), nameSpace.getString(OPT_PASSWORD)); app = new CliClientApplication(auth, isBooleanOption(OPT_VERBOSE), isBooleanOption(OPT_QUIET)); app.initialiseApplication(true); } private String openWorkingCopySession() throws UserErrorException, JMSException, Exception { String sessionName = nameSpace.getString(OPT_WORKING_COPY); if (isLocalSession(sessionName, false)) { File session = new File(sessionName); if (session.exists()) { // dataless session app.getSessionManager().loadSessionAndWait(session, null, true, false, false); } } else { try { String sessionId = getSessionId(sessionName); // throws UserErrorException if not found app.getSessionManager().loadSessionAndWait(null, sessionId, true, false, false); } catch (UserErrorException e) { // working copy doesn't exist, will be created when saved } } return sessionName; } private void openSession(String sessionName) throws UserErrorException, JMSException, Exception { if (isLocalSession(sessionName, true)) { File session = new File(sessionName); if (session.exists()) { app.getSessionManager().loadSessionAndWait(session, null, false, false, false); } else { throw new UserErrorException("session not found: " + sessionName); } } else { String sessionId = getSessionId(sessionName); // throws UserErrorException if not found app.getSessionManager().loadSessionAndWait(null, sessionId, true, false, false); } } private void checkCloudConfiguration() throws UserErrorException { if (!app.getSessionManager().areCloudSessionsEnabled()) { throw new UserErrorException("cloud sessions are disabled on this server, use local .zip sessions instead"); } } private void deleteSession(String sessionName) throws JMSException, Exception { if (isLocalSession(sessionName, true)) { File session = new File(sessionName); session.delete(); } else { String sessionId = getSessionId(sessionName); app.getSessionManager().removeRemoteSession(sessionId); } } private void saveWorkingCopySession(String workingCopy) throws Exception { if (isLocalSession(workingCopy, false)) { app.getSessionManager().saveLightweightSession(new File(workingCopy)); } else { saveSession(workingCopy); } } private void saveSession(String workingCopy) throws Exception { if (isLocalSession(workingCopy, true)) { app.getSessionManager().saveSessionAndWait(false, new File(workingCopy), null); } else { checkCloudConfiguration(); app.getSessionManager().saveSessionAndWait(true, null, workingCopy); if (isBooleanOption(OPT_SHOW_ID)) { System.out.println(app.getSessionManager().getSessionId()); } } } private boolean isLocalSession(String sessionName, boolean enableOptions) { if (enableOptions) { if (isBooleanOption(OPT_CLOUD)) { return false; } else if (isBooleanOption(OPT_LOCAL)) { return true; } } return sessionName.endsWith(".zip"); } private void listSessions(boolean yaml) throws JMSException, Exception { List<DbSession> sessions = app.getSessionManager().listRemoteSessions(); ArrayList<String> list = new ArrayList<String>(); for (DbSession session : sessions) { list.add(session.getName()); } print(list, yaml); } private void deleteDataset(String name) throws UserErrorException { DataBean bean = getDataset(name); app.deleteDatasWithoutConfirming(bean); } private void clearSession() throws MalformedURLException, FileBrokerException, AuthCancelledException { app.getSessionManager().clearSessionWithoutConfirming(); } private void saveWorkflow(String dataset, String filename) throws IOException, UserErrorException { DataBean bean = getDataset(dataset); app.getSelectionManager().selectSingle(bean, this); app.saveWorkflow(new File(filename)); } private void runWorkflow(String dataset, String filename) throws IOException, UserErrorException, InterruptedException { DataBean bean = getDataset(dataset); printlnStatus("Running workflow..."); app.getSelectionManager().selectSingle(bean, this); app.runWorkflowAndWait(new File(filename).toURI().toURL()); } private void historyOfDataset(String dataset, boolean yaml) throws UserErrorException { DataBean bean = getDataset(dataset); if (yaml) { System.err.println("yaml output format isn't impelemented for history"); } System.out.println(app.getHistoryText(bean, true, true, true, true, true, isBooleanOption(OPT_VERBOSE), true, true)); } private void renameDataset(String oldName, String newName) throws UserErrorException { DataBean old = getDataset(oldName); app.renameDataItem(old, newName); } private void run(String toolId, List<String> datasets, List<String> parameters) throws MicroarrayException, UserErrorException { ArrayList<DataBean> inputs = new ArrayList<>(); if (datasets != null) { for(String name : datasets) { inputs.add(getDataset(name)); } } OperationDefinition tool = getTool(toolId); Operation operation = new Operation(tool, inputs.toArray(new DataBean[0])); if (parameters != null) { for (String param : parameters) { String[] nameAndValue = param.split("="); try { String name = nameAndValue[0]; String value = nameAndValue[1]; Object valueObj = getParameterValueObject(operation.getParameter(name), value); operation.setParameter(name, valueObj); } catch (Exception e) { throw new UserErrorException("illegal parameter: " + param + " (" + e.toString() + ")"); } } } Task task = app.executeOperation(operation); printStatus("Running..."); try { while (app.getTaskExecutor().getTasks(true, true).contains(task)) { printStatus("."); Thread.sleep(1000); } } catch (InterruptedException e) { } printlnStatus(""); } private void printStatus(String status) { if (!isBooleanOption(OPT_QUIET)) { System.out.print(status); } } private void printlnStatus(String status) { if (!isBooleanOption(OPT_QUIET)) { System.out.println(status); } } private Object getParameterValueObject(Parameter parameter, String value) { if (parameter instanceof IntegerParameter) { //IntegerParameter integer = (IntegerParameter) parameter; return Integer.parseInt(value); } else if (parameter instanceof DecimalParameter) { //DecimalParameter number = (DecimalParameter) parameter; return Float.parseFloat(value); } else if (parameter instanceof PercentageParameter) { //PercentageParameter number = (PercentageParameter) parameter; return Integer.parseInt(value); } else if (parameter instanceof EnumParameter) { // only single selection for now EnumParameter enumParam = (EnumParameter) parameter; for (SelectionOption opt : ((SelectionOption[])enumParam.getOptions())) { if (opt.getValue().equals(value)) { return opt; } } } else if (parameter instanceof StringParameter) { //StringParameter stringParam = (StringParameter) parameter; return value; } else { throw new IllegalArgumentException("The given Parameter object, " + parameter.getID() + ", was not of recognized type!"); } return null; } private void exportDataset(String dataset) throws UserErrorException { DataBean source = getDataset(dataset); File destination = new File(dataset); app.exportToFileAndWait(source, destination); } private void importDataset(String filename) { File file = new File(filename); ContentType type = app.getDataManager().guessContentType(file); ImportItem item = new ImportItem(file, file.getName(), type); ArrayList<ImportItem> group = new ArrayList<>(); group.add(item); app.importGroupAndWait(group, null); } private void tool(String tool, boolean yaml) throws UserErrorException { OperationDefinition oper = getTool(tool); HashMap<String, Object> map = new HashMap<String, Object>(); map.put(KEY_TOOL, oper.getID()); map.put(KEY_NAME, oper.getFullName()); map.put(KEY_DESCRIPTION, oper.getDescription()); map.put(KEY_HELP, oper.getHelpURL()); ArrayList<HashMap<String, Object>> parameters = getParameters(oper); map.put(KEY_PARAMETERS, parameters); if (yaml) { dumpYaml(map); } else { System.out.print(StringUtils.rightPad((String) map.get(KEY_TOOL), 50)); System.out.print(StringUtils.rightPad((String) map.get(KEY_NAME), 50)); System.out.println(); System.out.println(); wrapAndPrint(map.get(KEY_DESCRIPTION).toString()); System.out.println(map.get(KEY_HELP)); System.out.println("PARAMETERS"); System.out.println(); for (HashMap<String, Object> paramMap : parameters) { System.out.print(parameterToString(paramMap)); if (isBooleanOption(OPT_VERBOSE)) { System.out.println(); wrapAndPrint((String) paramMap.get(KEY_DESCRIPTION)); } System.out.println(); } } } private void wrapAndPrint(String text) { String wrapped = WordUtils.wrap(text, 60); wrapped = " " + wrapped.replace("\n", "\n "); System.out.println(wrapped); } private ArrayList<HashMap<String, Object>> getParameters(OperationDefinition oper) { ArrayList<HashMap<String, Object>> parameters = new ArrayList<>(); for (Parameter parameter : oper.getParameters()) { HashMap<String, Object> parameterMap = new HashMap<>(); parameterMap.put(KEY_PARAM, parameter.getID()); parameterMap.put(KEY_NAME, parameter.getDisplayName()); parameterMap.put(KEY_DESCRIPTION, parameter.getDescription()); HashMap<String, Object> type = parameterToYaml(parameter); parameterMap.putAll(type); parameters.add(parameterMap); } return parameters; } private ArrayList<HashMap<String, String>> getParameters(OperationRecord oper) { ArrayList<HashMap<String, String>> parameters = new ArrayList<>(); for (ParameterRecord parameter : oper.getParameters()) { HashMap<String, String> parameterMap = new HashMap<String, String>(); parameterMap.put(KEY_PARAM, parameter.getNameID().getID()); parameterMap.put(KEY_NAME, parameter.getNameID().getDisplayName()); parameterMap.put(KEY_DESCRIPTION, parameter.getNameID().getDescription()); parameterMap.put(KEY_VALUE, parameter.getValue()); parameters.add(parameterMap); } return parameters; } private void tools(String searchTerm, boolean yaml) { HashMap<String, YamlModule> yamlModules = new HashMap<>(); for (ToolModule chipsterModule : app.getToolModules()) { YamlModule yamlModule = new YamlModule(); for (ToolCategory chipsterCategory : chipsterModule.getVisibleCategories()) { YamlCategory yamlCategory = new YamlCategory(); for (OperationDefinition chipsterTool : chipsterCategory.getToolList()) { YamlTool yamlTool = new YamlTool(); String toolId = chipsterTool.getID(); String toolName = chipsterTool.getDisplayName(); yamlTool.put(KEY_TOOL, toolId); yamlTool.put(KEY_NAME, toolName); if (searchTerm == null || toolId.toLowerCase().contains(searchTerm.toLowerCase()) || toolName.toLowerCase().contains(searchTerm.toLowerCase())) { yamlCategory.add(yamlTool); } } yamlModule.put(chipsterCategory.getName(), yamlCategory); } yamlModules.put(chipsterModule.getModuleName(), yamlModule); } if (yaml) { dumpYaml(yamlModules); } else { for (String yamlModuleName : yamlModules.keySet()) { YamlModule yamlModule = yamlModules.get(yamlModuleName); for (String yamlCategoryName : yamlModule.keySet()) { YamlCategory yamlCategory = yamlModule.get(yamlCategoryName); for (YamlTool yamlTool : yamlCategory) { System.out.print(StringUtils.rightPad(yamlModuleName, 20)); System.out.print(StringUtils.rightPad(yamlCategoryName, 40)); System.out.print(StringUtils.rightPad(yamlTool.get(CMD_TOOL), 50)); System.out.print(StringUtils.rightPad(yamlTool.get(KEY_NAME), 40)); System.out.println(); } } } } } // unwind nested generics private static class YamlTool extends HashMap<String, String> {} private static class YamlCategory extends ArrayList<YamlTool> {} private static class YamlModule extends HashMap<String, YamlCategory> {} private void printDataset(String dataset) throws IOException, UserErrorException { DataBean bean = getDataset(dataset); ChecksumInputStream stream = app.getDataManager().getContentStream(bean, DataNotAvailableHandling.EXCEPTION_ON_NA); try { IOUtils.copy(stream, System.out); } finally { stream.close(); } } private void viewDataset(String dataset, boolean yaml) throws UserErrorException { DataBean bean = getDataset(dataset); HashMap<String, Object> map = new HashMap<String, Object>(); map.put(KEY_NAME, bean.getName()); map.put(KEY_DATE, new DateTime(bean.getDate()).toString()); map.put(KEY_SIZE, bean.getSize()); // Long map.put(KEY_NOTES, bean.getNotes()); OperationRecord oper = bean.getOperationRecord(); map.put(KEY_TOOL, oper.getNameID().getID()); ArrayList<String> inputs = new ArrayList<>(); for (InputRecord input : oper.getInputRecords()) { String inputName = ""; if (input.getValue() != null) { inputName = input.getValue().getName(); } else { inputName = input.getDataId(); } inputs.add(inputName); } map.put(KEY_INPUTS, inputs); ArrayList<HashMap<String, String>> params = getParameters(oper); map.put(KEY_PARAMETERS, params); HashMap<String, ArrayList<String>> outputTools = new HashMap<>(); // group output datasets by operation for (DataBean result : bean.getLinkSources(Link.derivationalTypes())) { String outputTool = result.getOperationRecord().getNameID().getID(); if (!outputTools.containsKey(outputTool)) { outputTools.put(outputTool, new ArrayList<String>()); } outputTools.get(outputTool).add(result.getName()); } map.put("input of", outputTools); if (yaml) { dumpYaml(map); } else { System.out.println("Dataset " + map.get(KEY_NAME)); System.out.println("Date " + map.get(KEY_DATE)); System.out.println("Size " + map.get(KEY_SIZE) + " bytes"); System.out.println("Notes " + map.get(KEY_NOTES)); System.out.println("Produced by tool " + map.get(CMD_TOOL)); System.out.print( "Using inputs "); System.out.println(Strings.delimit(inputs, " ")); System.out.print( "Parameters "); for (HashMap<String, String> param : params) { System.out.print(param.get(KEY_PARAM) + "=" + param.get(KEY_VALUE) + " "); } System.out.println(); // new line after loop System.out.println(); System.out.print(StringUtils.rightPad("INPUT OF", 50)); System.out.print("OUTPUT DATASETS"); System.out.println(); for (String outputTool : outputTools.keySet()) { System.out.print(StringUtils.rightPad(outputTool, 50)); ArrayList<String> outputDatasets = outputTools.get(outputTool); System.out.println(Strings.delimit(outputDatasets, " ")); } } } private void listDatasets(boolean yaml) { ArrayList<String> list = new ArrayList<String>(); for (DataBean bean : app.getDataManager().databeans()) { list.add(bean.getName()); } print(list, yaml); } private void print(ArrayList<String> list, boolean yaml) { if (yaml) { dumpYaml(list); } else { for (String item : list) { System.out.println(item); } } } private void dumpYaml(Object yaml) { DumperOptions options = new DumperOptions(); options.setDefaultFlowStyle(FlowStyle.BLOCK); Yaml yamlLib = new Yaml(options); System.out.print(yamlLib.dump(yaml)); } @SuppressWarnings("unchecked") private String parameterToString(HashMap<String, Object> map) { String str = ""; str += StringUtils.rightPad((String) map.get(KEY_PARAM), 30); String type = ""; switch ((String)map.get(KEY_TYPE)) { case KEY_INTEGER: type = "integer" + minMaxDefaultToString(map); break; case KEY_DECIMAL: type = "decimal" + minMaxDefaultToString(map); break; case KEY_PERCENT: type = "percent" + minMaxDefaultToString(map); break; case KEY_STRING: type = "string" + minMaxDefaultToString(map); break; case KEY_METACOLUMN_SEL: type = "phenodata column selection" + minMaxDefaultToString(map); break; case KEY_ENUM: if (map.containsKey(KEY_MAX) && ((Integer)map.get(KEY_MAX)) > 1) { type = "multiple selection"; } else { type = "single selection"; } if (map.containsKey(KEY_DEFAULT)) { type += ", default " + map.get(KEY_DEFAULT); } break; } str += StringUtils.rightPad(type, 40); str += "\n"; str += " " + (String) map.get(KEY_NAME) + "\n"; if (map.containsKey(KEY_OPTIONS)) { str += "\n"; str += " " + StringUtils.rightPad("OPTION", 60) + "NAME\n"; for (HashMap<String, String> option: (ArrayList<HashMap<String, String>>)map.get(KEY_OPTIONS)) { str += " "; str += StringUtils.rightPad(option.get(KEY_OPTION), 60); str += option.get(KEY_NAME) + "\n"; //str += " " + option.get(KEY_OPTION) + "\t\t\t\t\t\t\t" + option.get(KEY_NAME) + "\n"; } } return str; } private String minMaxDefaultToString(HashMap<String, Object> map) { String str = ""; if (map.containsKey(KEY_MIN) || map.containsKey(KEY_MAX)) { str += " " + map.get(KEY_MIN) + "-" + map.get(KEY_MAX); } if (map.containsKey(KEY_DEFAULT)) { str += ", default " + map.get(KEY_DEFAULT); } return str; } private HashMap<String, Object> parameterToYaml(Parameter parameter) { HashMap<String, Object> map = new HashMap<>(); if (parameter instanceof IntegerParameter) { map.put(KEY_TYPE, KEY_INTEGER); IntegerParameter number = (IntegerParameter) parameter; map.put(KEY_DEFAULT, number.getIntegerValue()); if (number.getMinValue() != Integer.MIN_VALUE) { map.put(KEY_MIN, number.getMinValue()); } if (number.getMaxValue() != Integer.MAX_VALUE) { map.put(KEY_MAX, number.getMaxValue()); } return map; } else if (parameter instanceof DecimalParameter) { map.put(KEY_TYPE, KEY_DECIMAL); DecimalParameter number = (DecimalParameter) parameter; map.put(KEY_DEFAULT, number.getDecimalValue()); // Float if (number.getMinValue() != Float.MIN_VALUE) { map.put(KEY_MIN, number.getMinValue()); } if (number.getMaxValue() != Float.MAX_VALUE) { map.put(KEY_MAX, number.getMaxValue()); } return map; } else if (parameter instanceof PercentageParameter) { map.put(KEY_TYPE, KEY_PERCENT); PercentageParameter number = (PercentageParameter) parameter; map.put(KEY_DEFAULT, number.getIntegerValue()); if (number.getMinValue() != Integer.MIN_VALUE) { map.put(KEY_MIN, number.getMinValue()); } if (number.getMaxValue() != Integer.MAX_VALUE) { map.put(KEY_MAX, number.getMaxValue()); } return map; } else if (parameter instanceof StringParameter) { map.put(KEY_TYPE, KEY_STRING); map.put(KEY_DEFAULT, parameter.getValueAsString()); return map; // how about COLUMN_SEL? } else if (parameter instanceof MetaColnameParameter) { map.put(KEY_TYPE, KEY_METACOLUMN_SEL); map.put(KEY_DEFAULT, parameter.getValueAsString()); return map; } else if (parameter instanceof EnumParameter) { //also DataSelectionParameter EnumParameter enumParam = (EnumParameter) parameter; map.put(KEY_TYPE, KEY_ENUM); map.put(KEY_DEFAULT, enumParam.getValueAsString()); map.put(KEY_MIN, enumParam.getMinCount()); map.put(KEY_MAX, enumParam.getMaxCount()); map.put(KEY_OPTIONS, enumOptionsToYaml(enumParam)); return map; } else { throw new IllegalArgumentException("The given Parameter object, " + parameter.getID() + ", was not of recognized type!"); } } private ArrayList<HashMap<String, String>> enumOptionsToYaml(EnumParameter enumParam) { ArrayList<HashMap<String, String>> options = new ArrayList<>(); if (enumParam.getOptions() != null ) { for (SelectionOption opt : ((SelectionOption[])enumParam.getOptions())) { HashMap<String, String> map = new HashMap<>(); map.put(KEY_OPTION, opt.getValue()); map.put(KEY_NAME, opt.toString()); options.add(map); } } return options; } private DataBean getDataset(String name) throws UserErrorException { DataBean bean = app.getDataManager().getDataBean(name); if (bean == null) { throw new UserErrorException("dataset not found: " + name); } return bean; } private String getSessionId(String sessionName) throws JMSException, Exception { List<DbSession> sessions = app.getSessionManager().listRemoteSessions(); for (DbSession session : sessions) { if (sessionName.equals(session.getName())) { return session.getDataId(); } } throw new UserErrorException("session not found: " + sessionName); } private OperationDefinition getTool(String name) throws UserErrorException { OperationDefinition tool = app.getOperationDefinition(name); if (tool == null) { throw new UserErrorException("tool not found: " + name); } return tool; } }
/* The following code was generated by JFlex 1.7.0-SNAPSHOT tweaked for IntelliJ platform */ package claims.bold.intellij.avro.idl; import com.intellij.lexer.FlexLexer; import com.intellij.psi.tree.IElementType; import static com.intellij.psi.TokenType.BAD_CHARACTER; import static com.intellij.psi.TokenType.WHITE_SPACE; import static claims.bold.intellij.avro.idl.psi.AvroIdlTypes.*; /** * This class is a scanner generated by * <a href="http://www.jflex.de/">JFlex</a> 1.7.0-SNAPSHOT * from the specification file <tt>_AvroIdlLexer.flex</tt> */ public class _AvroIdlLexer implements FlexLexer { /** This character denotes the end of file */ public static final int YYEOF = -1; /** initial size of the lookahead buffer */ private static final int ZZ_BUFFERSIZE = 16384; /** lexical states */ public static final int YYINITIAL = 0; /** * ZZ_LEXSTATE[l] is the state in the DFA for the lexical state l * ZZ_LEXSTATE[l+1] is the state in the DFA for the lexical state l * at the beginning of a line * l is of the form l = 2*k, k a non negative integer */ private static final int ZZ_LEXSTATE[] = { 0, 0 }; /** * Translates characters to character classes * Chosen bits are [12, 6, 3] * Total runtime size is 15488 bytes */ public static int ZZ_CMAP(int ch) { return ZZ_CMAP_A[(ZZ_CMAP_Y[(ZZ_CMAP_Z[ch>>9]<<6)|((ch>>3)&0x3f)]<<3)|(ch&0x7)]; } /* The ZZ_CMAP_Z table has 2176 entries */ static final char ZZ_CMAP_Z[] = zzUnpackCMap( "\1\0\1\1\1\2\1\3\1\4\1\5\1\6\1\7\1\10\1\11\1\12\1\13\1\14\1\15\1\16\1\17\1"+ "\20\5\21\1\22\1\23\1\24\1\21\14\25\1\26\50\25\1\27\2\25\1\30\1\31\1\32\1\33"+ "\25\25\1\34\20\21\1\35\1\36\1\37\1\40\1\41\1\42\1\43\1\21\1\44\1\45\1\46\1"+ "\21\1\47\1\50\1\51\1\52\1\53\3\21\1\25\1\54\1\55\5\21\2\25\1\56\31\21\1\25"+ "\1\57\1\21\1\60\40\21\1\61\17\21\1\62\1\63\1\64\1\65\13\21\1\66\10\21\123"+ "\25\1\67\7\25\1\70\1\71\37\21\1\25\1\71\u0582\21\1\72\u017f\21"); /* The ZZ_CMAP_Y table has 3776 entries */ static final char ZZ_CMAP_Y[] = zzUnpackCMap( "\1\0\1\1\1\0\1\2\1\3\1\4\1\5\1\6\1\7\1\10\1\11\1\12\1\13\1\14\1\15\1\16\1"+ "\17\3\0\1\20\1\21\1\22\1\23\2\11\1\24\3\11\1\24\71\11\1\25\1\11\1\26\1\27"+ "\1\30\1\31\2\27\16\0\1\32\1\33\1\34\1\35\2\11\1\36\11\11\1\37\21\11\1\40\1"+ "\41\23\11\1\27\1\42\3\11\1\24\1\43\1\42\4\11\1\44\1\45\4\0\1\46\1\47\1\27"+ "\3\11\2\50\1\27\1\51\1\52\1\0\1\53\5\11\1\54\2\0\1\55\1\56\1\57\13\11\1\60"+ "\1\46\1\61\1\62\1\55\1\63\1\27\1\64\1\65\3\11\3\0\1\66\12\11\1\67\1\0\1\70"+ "\1\27\1\55\1\71\3\11\1\54\1\72\1\23\2\11\1\67\1\73\1\74\1\75\2\27\3\11\1\76"+ "\10\27\1\77\1\30\6\27\1\100\2\0\1\101\1\102\6\11\1\103\2\0\1\104\1\11\1\105"+ "\1\55\2\42\1\106\1\107\1\110\2\11\1\77\1\111\1\112\1\113\1\114\1\64\1\115"+ "\1\105\1\55\1\116\1\52\1\106\1\117\1\110\2\11\1\77\1\120\1\121\1\122\1\123"+ "\1\124\1\125\1\126\1\55\1\127\1\27\1\106\1\37\1\36\2\11\1\77\1\130\1\112\1"+ "\46\1\131\1\132\1\27\1\105\1\55\1\43\1\27\1\106\1\107\1\110\2\11\1\77\1\130"+ "\1\112\1\113\1\123\1\133\1\115\1\105\1\55\1\43\1\27\1\134\1\135\1\136\1\137"+ "\1\140\1\135\1\11\1\141\1\142\1\143\1\144\1\27\1\126\1\55\1\27\1\43\1\106"+ "\1\32\1\77\2\11\1\77\1\145\1\146\1\147\1\143\1\150\1\26\1\105\1\55\2\27\1"+ "\151\1\32\1\77\2\11\1\77\1\145\1\112\1\147\1\143\1\150\1\34\1\105\1\55\1\152"+ "\1\27\1\151\1\32\1\77\4\11\1\153\1\147\1\154\1\64\1\27\1\105\1\55\1\27\1\41"+ "\1\151\1\11\1\24\1\41\2\11\1\36\1\155\1\24\1\156\1\157\1\0\1\160\1\161\1\162"+ "\1\27\1\42\5\11\1\163\1\164\1\165\1\101\1\55\1\166\4\27\1\167\1\170\1\171"+ "\1\42\1\172\1\173\1\163\1\174\1\175\1\176\1\55\1\177\4\27\1\132\2\27\1\200"+ "\1\55\1\166\1\201\1\202\1\11\1\42\3\11\1\30\1\45\1\0\1\147\1\203\1\0\1\45"+ "\3\0\1\51\1\204\7\27\5\11\1\54\1\0\1\205\1\55\1\166\1\67\1\206\1\207\1\210"+ "\1\211\1\11\1\212\1\213\1\55\1\214\4\11\1\37\1\22\5\11\1\215\51\11\1\136\1"+ "\24\1\136\5\11\1\136\4\11\1\136\1\24\1\136\1\11\1\24\7\11\1\136\10\11\1\216"+ "\4\27\2\11\2\27\12\11\1\30\1\27\1\42\114\11\1\107\2\11\1\217\2\11\1\50\11"+ "\11\1\135\1\132\1\27\1\11\1\32\1\220\1\27\2\11\1\220\1\27\2\11\1\221\1\27"+ "\1\11\1\32\1\222\1\27\6\11\1\223\3\0\1\224\1\225\1\55\1\166\3\27\1\226\1\55"+ "\1\166\13\11\1\27\5\11\1\227\10\11\1\230\1\27\3\11\1\30\1\0\1\2\1\0\1\2\1"+ "\126\1\55\3\11\1\230\1\30\1\27\5\11\1\116\2\0\1\57\1\200\1\55\1\166\4\27\2"+ "\11\1\165\1\2\6\11\1\203\1\101\3\0\1\113\1\55\1\166\1\55\1\166\1\44\13\27"+ "\1\231\5\11\1\223\1\0\1\231\1\116\1\55\1\166\1\27\1\232\1\2\1\27\1\233\3\11"+ "\1\104\1\210\1\55\1\71\4\11\1\67\1\0\1\2\1\27\4\11\1\223\2\0\1\27\1\55\1\234"+ "\1\55\1\71\3\11\1\230\12\27\1\235\2\0\1\236\1\237\1\27\30\11\4\0\1\101\2\27"+ "\1\100\42\11\2\230\4\11\2\230\1\11\1\240\3\11\1\230\6\11\1\32\1\175\1\241"+ "\1\30\1\242\1\116\1\11\1\30\1\241\1\30\1\243\1\244\3\27\1\245\1\27\1\44\1"+ "\132\1\27\1\246\1\247\1\51\1\250\1\43\1\44\2\27\1\11\1\30\3\11\1\50\2\27\1"+ "\0\1\51\1\251\1\0\1\252\1\27\1\253\1\41\1\155\1\254\1\31\1\255\1\11\1\256"+ "\1\257\1\260\2\27\5\11\1\132\116\27\5\11\1\24\5\11\1\24\20\11\1\30\1\261\1"+ "\262\1\27\4\11\1\37\1\22\7\11\1\44\1\27\1\64\2\11\1\24\1\27\10\24\4\0\5\27"+ "\1\44\72\27\1\263\3\27\1\42\1\212\1\254\1\30\1\42\11\11\1\24\1\264\1\42\12"+ "\11\1\215\1\257\4\11\1\230\1\42\12\11\1\24\2\27\3\11\1\50\6\27\170\11\1\230"+ "\11\27\71\11\1\30\6\27\21\11\1\30\10\27\5\11\1\230\41\11\1\30\2\11\1\55\1"+ "\265\2\27\5\11\1\165\1\100\1\266\3\11\1\64\12\11\1\200\3\27\1\44\1\11\1\41"+ "\14\11\1\267\1\116\1\27\1\11\1\50\11\27\1\11\1\270\1\271\2\11\1\54\2\27\1"+ "\132\6\11\1\116\1\27\1\272\5\11\1\223\1\0\1\51\1\27\1\55\1\166\2\0\1\272\1"+ "\52\1\55\1\71\2\11\1\67\1\176\2\11\1\165\1\0\1\2\1\27\3\11\1\30\1\102\5\11"+ "\1\54\1\0\1\252\1\44\1\55\1\166\2\27\1\161\1\273\5\11\1\104\1\101\1\27\1\271"+ "\1\274\1\55\1\166\2\11\1\24\1\275\6\11\1\207\1\276\1\227\2\27\1\277\1\11\1"+ "\54\1\300\1\27\3\301\1\27\2\24\22\27\4\11\1\54\1\302\1\55\1\166\64\11\1\116"+ "\1\27\2\11\1\24\1\303\5\11\1\116\40\27\55\11\1\230\15\11\1\26\4\27\1\24\1"+ "\27\1\303\1\304\1\11\1\77\1\24\1\175\1\305\15\11\1\26\3\27\1\303\54\11\1\230"+ "\2\27\10\11\1\41\6\11\5\27\1\11\1\30\2\0\2\27\1\101\1\27\1\140\2\27\1\257"+ "\3\27\1\43\1\32\20\11\1\306\1\246\1\27\1\55\1\166\1\42\2\11\1\117\1\42\2\11"+ "\1\50\1\307\12\11\1\24\3\41\1\310\1\311\2\27\1\312\1\11\1\145\2\11\1\24\2"+ "\11\1\313\1\11\1\230\1\11\1\230\4\27\17\11\1\50\10\27\6\11\1\30\20\27\1\314"+ "\20\27\3\11\1\30\6\11\1\132\5\27\3\11\1\24\2\27\3\11\1\50\6\27\3\11\1\230"+ "\4\11\1\116\1\11\1\254\5\27\23\11\1\230\1\55\1\166\52\27\1\230\1\77\4\11\1"+ "\37\1\315\2\11\1\230\25\27\2\11\1\230\1\27\3\11\1\26\10\27\7\11\1\307\10\27"+ "\1\316\1\100\1\145\1\42\2\11\1\116\1\122\4\27\3\11\1\30\20\27\6\11\1\230\1"+ "\27\2\11\1\230\1\27\2\11\1\50\21\27\11\11\1\132\66\27\1\233\6\11\1\0\1\101"+ "\3\27\1\126\1\55\2\27\1\233\5\11\1\0\1\317\2\27\3\11\1\132\1\55\1\166\1\233"+ "\3\11\1\165\1\0\1\320\1\55\10\27\1\233\5\11\1\54\1\0\1\321\1\27\1\55\1\166"+ "\42\27\1\161\1\273\72\27\1\161\1\273\56\27\1\161\1\273\4\27\5\11\1\54\1\0"+ "\1\27\1\55\1\166\14\27\1\161\1\273\64\27\1\161\1\273\42\27\55\11\1\24\22\27"+ "\14\11\1\50\63\27\5\11\1\24\72\27\7\11\1\132\4\27\1\161\1\273\34\27\1\161"+ "\1\273\64\27\10\11\1\30\1\27\1\104\4\0\1\101\1\27\1\64\1\233\1\11\14\27\1"+ "\26\153\27\1\322\1\323\2\0\1\324\1\2\3\27\1\325\22\27\1\326\67\27\12\11\1"+ "\32\10\11\1\32\1\327\1\330\1\11\1\331\1\145\7\11\1\37\1\332\2\32\3\11\1\333"+ "\1\175\1\41\1\77\51\11\1\230\3\11\1\77\2\11\1\215\3\11\1\215\2\11\1\32\3\11"+ "\1\32\2\11\1\24\3\11\1\24\3\11\1\77\3\11\1\77\2\11\1\215\1\334\6\55\1\145"+ "\3\11\1\167\1\42\1\215\1\335\1\253\1\336\1\167\1\240\1\167\2\215\1\125\1\11"+ "\1\36\1\11\1\116\1\337\1\36\1\11\1\116\50\27\32\11\1\24\5\27\106\11\1\30\1"+ "\27\33\11\1\230\74\27\1\124\3\27\14\0\20\27\36\0\2\27"); /* The ZZ_CMAP_A table has 1792 entries */ static final char ZZ_CMAP_A[] = zzUnpackCMap( "\11\10\1\2\4\1\6\10\4\0\1\2\1\0\1\11\1\0\1\7\3\0\1\35\1\36\1\5\1\32\1\44\1"+ "\13\1\30\1\3\1\14\11\15\1\46\1\45\1\41\1\47\1\42\1\0\1\43\4\7\1\31\3\7\1\22"+ "\4\7\1\20\14\7\1\37\1\12\1\40\1\0\1\70\1\6\1\21\1\51\1\65\1\55\1\54\1\24\1"+ "\63\1\66\1\25\2\7\1\53\1\57\1\23\1\52\1\62\1\7\1\50\1\60\1\26\1\56\1\67\1"+ "\64\1\61\1\27\1\7\1\33\1\0\1\34\1\0\6\10\1\4\2\10\1\2\1\0\4\7\4\0\1\7\2\0"+ "\1\10\7\0\1\7\4\0\1\7\5\0\7\7\1\0\2\7\4\0\4\7\16\0\5\7\7\0\1\7\1\0\1\7\1\0"+ "\5\7\1\0\2\7\2\0\4\7\10\0\1\7\1\0\3\7\1\0\1\7\1\0\4\7\1\0\13\7\1\0\3\7\1\0"+ "\5\10\2\0\6\7\1\0\7\7\1\0\1\7\15\0\1\7\1\0\15\10\1\0\1\10\1\0\2\10\1\0\2\10"+ "\1\0\1\10\3\7\5\0\5\10\6\0\1\7\4\0\3\10\5\0\3\7\5\10\12\17\4\0\2\7\1\10\13"+ "\7\1\0\1\7\7\10\2\7\2\10\1\0\4\10\2\7\2\17\3\7\2\0\1\7\7\0\1\10\1\7\1\10\6"+ "\7\3\10\2\0\11\7\3\10\1\7\6\0\2\17\6\7\4\10\2\7\2\0\2\10\1\7\11\10\1\7\3\10"+ "\1\7\5\10\2\0\1\7\3\10\4\0\1\7\1\0\6\7\4\0\13\10\1\0\4\10\6\7\3\10\1\7\2\10"+ "\1\7\7\10\2\7\2\10\2\0\2\17\1\0\3\10\1\0\10\7\2\0\2\7\2\0\6\7\1\0\1\7\3\0"+ "\4\7\2\0\1\10\1\7\7\10\2\0\2\10\2\0\3\10\1\7\5\0\2\7\1\0\5\7\4\0\3\7\4\0\2"+ "\7\1\0\2\7\1\0\2\7\1\0\2\7\2\0\1\10\1\0\5\10\4\0\2\10\2\0\3\10\3\0\1\10\7"+ "\0\4\7\1\0\1\7\7\0\2\17\2\10\3\7\1\10\2\0\1\7\1\0\2\7\1\0\3\7\2\10\1\0\3\10"+ "\2\0\1\7\15\0\2\10\2\0\1\10\1\7\1\0\6\7\3\0\3\7\1\0\4\7\3\0\2\7\1\0\1\7\1"+ "\0\2\7\3\0\2\7\3\0\2\7\4\0\5\10\3\0\3\10\1\0\4\10\2\0\1\7\6\0\1\10\4\7\1\0"+ "\5\7\3\0\1\7\7\10\1\0\2\10\5\0\2\10\3\0\2\10\1\0\3\7\1\0\2\7\5\0\3\7\2\0\1"+ "\7\3\10\1\0\4\10\1\7\1\0\4\7\1\0\1\7\4\0\1\10\4\0\6\10\1\0\1\10\7\0\12\16"+ "\2\0\2\10\4\0\1\7\1\10\2\7\7\10\4\0\10\7\1\10\2\17\7\0\2\7\1\0\1\7\2\0\2\7"+ "\1\0\1\7\2\0\1\7\6\0\4\7\1\0\3\7\1\0\1\7\1\0\1\7\2\0\2\7\1\0\3\7\2\10\1\0"+ "\2\10\1\7\2\0\5\7\1\0\1\7\1\0\6\10\2\0\2\17\2\0\4\7\2\10\13\0\1\10\1\0\1\10"+ "\1\0\1\10\4\0\2\10\5\7\3\10\6\0\1\10\1\0\7\10\1\7\2\10\4\7\3\10\1\7\3\10\2"+ "\7\7\10\3\7\4\10\5\7\14\10\1\7\1\10\2\17\4\10\2\0\3\7\1\0\7\7\2\0\3\10\1\2"+ "\11\7\3\10\3\0\2\7\2\10\4\0\1\7\1\0\2\10\4\0\4\7\10\10\3\0\1\7\3\0\2\7\1\10"+ "\5\0\3\10\2\0\1\7\1\10\1\7\5\0\6\7\2\0\5\10\3\7\3\0\10\10\5\7\2\17\3\0\3\7"+ "\3\10\1\0\5\10\4\7\1\10\4\7\3\10\2\7\2\0\1\7\1\0\1\7\1\0\1\7\1\0\1\7\2\0\3"+ "\7\1\0\6\7\2\0\2\7\13\2\5\10\2\1\5\10\1\2\4\0\1\7\12\0\1\2\2\0\6\10\1\0\1"+ "\10\3\0\4\10\11\0\1\7\4\0\1\7\1\0\5\7\2\0\1\7\1\0\4\7\1\0\3\7\2\0\4\7\5\0"+ "\5\7\4\0\1\7\4\0\4\7\3\10\2\7\4\0\1\2\4\0\3\7\1\0\2\10\2\0\3\7\2\17\2\7\4"+ "\0\6\10\1\0\2\7\2\0\4\7\1\0\2\7\1\10\3\7\1\10\4\7\1\10\4\7\2\10\6\7\2\16\6"+ "\0\4\7\2\10\4\0\1\7\1\10\4\0\1\10\5\7\2\10\3\0\3\7\4\0\3\7\2\10\2\0\6\7\1"+ "\0\3\10\1\0\2\10\5\0\5\7\5\0\1\7\1\10\3\7\1\0\2\7\1\0\7\7\2\0\1\10\6\0\2\7"+ "\2\0\3\7\3\0\2\7\3\0\2\7\2\0\3\10\4\0\3\7\1\0\2\7\1\0\1\7\5\0\1\10\2\0\1\7"+ "\3\0\1\7\2\0\2\7\3\10\1\0\2\10\1\0\3\10\2\0\1\10\2\0\5\10\1\0\2\17\1\10\4"+ "\7\10\0\5\10\3\0\6\10\2\0\3\10\2\0\4\10\4\0\3\10\5\0\1\7\2\0\2\7\2\0\4\7\1"+ "\0\4\7\1\0\1\7\1\0\6\7\2\0\5\7\1\0\4\7\1\0\4\7\2\0\2\17\1\0\1\7\1\0\1\7\5"+ "\0\1\7\1\0\1\7\1\0\3\7\1\0\3\7\1\0\3\7"); /** * Translates DFA states to action switch labels. */ private static final int [] ZZ_ACTION = zzUnpackAction(); private static final String ZZ_ACTION_PACKED_0 = "\1\0\1\1\1\2\2\1\1\3\2\1\2\4\5\3"+ "\1\5\1\6\1\7\1\10\1\11\1\12\1\13\1\14"+ "\1\15\1\16\1\17\1\20\1\21\13\3\1\22\2\0"+ "\1\3\1\0\1\23\6\0\33\3\2\0\1\23\2\24"+ "\2\0\1\24\1\0\1\24\5\3\1\25\1\26\17\3"+ "\1\27\4\3\2\0\1\30\1\0\1\24\1\0\1\3"+ "\1\31\5\3\1\32\5\3\1\33\1\34\1\3\1\35"+ "\6\3\1\36\1\30\1\37\1\0\1\40\1\41\1\42"+ "\1\43\5\3\1\44\2\3\1\45\2\3\1\46\3\3"+ "\1\0\1\47\2\3\1\50\1\51\1\3\1\52\1\53"+ "\1\3\1\54\1\55\1\3\1\0\1\3\1\56\1\57"+ "\1\60\1\3\1\0\1\3\1\61\3\3\1\62"; private static int [] zzUnpackAction() { int [] result = new int[193]; int offset = 0; offset = zzUnpackAction(ZZ_ACTION_PACKED_0, offset, result); return result; } private static int zzUnpackAction(String packed, int offset, int [] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int count = packed.charAt(i++); int value = packed.charAt(i++); do result[j++] = value; while (--count > 0); } return j; } /** * Translates a state to a row index in the transition table */ private static final int [] ZZ_ROWMAP = zzUnpackRowMap(); private static final String ZZ_ROWMAP_PACKED_0 = "\0\0\0\71\0\162\0\253\0\344\0\u011d\0\u0156\0\u018f"+ "\0\u01c8\0\u0201\0\u023a\0\u0273\0\u02ac\0\u02e5\0\u031e\0\71"+ "\0\71\0\71\0\71\0\71\0\71\0\71\0\71\0\71"+ "\0\71\0\71\0\71\0\71\0\u0357\0\u0390\0\u03c9\0\u0402"+ "\0\u043b\0\u0474\0\u04ad\0\u04e6\0\u051f\0\u0558\0\u0591\0\u05ca"+ "\0\u0603\0\344\0\71\0\u0156\0\71\0\u063c\0\u0675\0\u06ae"+ "\0\u06e7\0\u0720\0\u0759\0\u0792\0\u07cb\0\u0804\0\u083d\0\u0876"+ "\0\u08af\0\u08e8\0\u0921\0\u095a\0\u0993\0\u09cc\0\u0a05\0\u0a3e"+ "\0\u0a77\0\u0ab0\0\u0ae9\0\u0b22\0\u0b5b\0\u0b94\0\u0bcd\0\u0c06"+ "\0\u0c3f\0\u0c78\0\u0cb1\0\u0cea\0\u0d23\0\u0d5c\0\u0d95\0\u0dce"+ "\0\u0156\0\u01c8\0\u0e07\0\u0e40\0\u0e79\0\u0eb2\0\u0eeb\0\u0eeb"+ "\0\u0f24\0\u0f5d\0\u0f96\0\u0fcf\0\u1008\0\u011d\0\u011d\0\u1041"+ "\0\u107a\0\u10b3\0\u10ec\0\u1125\0\u115e\0\u1197\0\u11d0\0\u1209"+ "\0\u1242\0\u127b\0\u12b4\0\u12ed\0\u1326\0\u135f\0\u011d\0\u1398"+ "\0\u13d1\0\u140a\0\u1443\0\u147c\0\u14b5\0\u14b5\0\u14ee\0\71"+ "\0\u1527\0\u1560\0\u011d\0\u1599\0\u15d2\0\u160b\0\u1644\0\u167d"+ "\0\u011d\0\u16b6\0\u16ef\0\u1728\0\u1761\0\u179a\0\u011d\0\u011d"+ "\0\u17d3\0\u011d\0\u180c\0\u1845\0\u187e\0\u18b7\0\u18f0\0\u1929"+ "\0\u011d\0\71\0\71\0\u1962\0\u011d\0\u011d\0\u011d\0\u011d"+ "\0\u199b\0\u19d4\0\u1a0d\0\u1a46\0\u1a7f\0\u011d\0\u1ab8\0\u1af1"+ "\0\u011d\0\u1b2a\0\u1b63\0\u011d\0\u1b9c\0\u1bd5\0\u1c0e\0\u1c47"+ "\0\u011d\0\u1c80\0\u1cb9\0\u011d\0\u011d\0\u1cf2\0\u011d\0\u011d"+ "\0\u1d2b\0\u011d\0\u011d\0\u1d64\0\u1d9d\0\u1dd6\0\u011d\0\u011d"+ "\0\u011d\0\u1e0f\0\u1e48\0\u1e81\0\u011d\0\u1eba\0\u1ef3\0\u1f2c"+ "\0\u011d"; private static int [] zzUnpackRowMap() { int [] result = new int[193]; int offset = 0; offset = zzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result); return result; } private static int zzUnpackRowMap(String packed, int offset, int [] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int high = packed.charAt(i++) << 16; result[j++] = high | packed.charAt(i++); } return j; } /** * The transition table of the DFA */ private static final int [] ZZ_TRANS = zzUnpackTrans(); private static final String ZZ_TRANS_PACKED_0 = "\1\2\2\3\1\4\1\3\1\2\1\5\1\6\1\2"+ "\1\7\1\2\1\10\1\11\1\12\2\2\1\6\1\13"+ "\1\6\1\14\1\15\1\16\1\17\1\6\1\2\1\6"+ "\1\2\1\20\1\21\1\22\1\23\1\24\1\25\1\26"+ "\1\27\1\30\1\31\1\32\1\33\1\34\1\35\1\36"+ "\1\37\1\40\1\41\1\42\1\43\1\44\1\45\1\6"+ "\1\46\4\6\1\47\1\6\72\0\2\3\1\0\1\3"+ "\67\0\1\50\1\0\1\51\63\0\6\52\1\53\62\52"+ "\4\0\1\6\2\0\2\6\2\0\3\6\1\0\13\6"+ "\16\0\21\6\11\54\1\55\1\56\56\54\13\0\1\57"+ "\1\11\1\12\2\0\1\60\1\0\1\61\76\0\1\62"+ "\1\63\22\0\1\63\30\0\4\12\10\0\1\62\1\63"+ "\22\0\1\63\20\0\1\6\2\0\2\6\2\0\3\6"+ "\1\0\13\6\16\0\1\64\20\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\13\6\16\0\6\6\1\65"+ "\12\6\4\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\2\6\1\66\3\6\1\67\4\6\16\0\3\6\1\70"+ "\15\6\4\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\4\6\1\71\6\6\16\0\5\6\1\72\1\6\1\73"+ "\11\6\4\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\6\6\1\74\4\6\16\0\1\75\15\6\1\76\2\6"+ "\4\0\1\6\2\0\2\6\2\0\3\6\1\0\13\6"+ "\16\0\4\6\1\77\14\6\4\0\1\6\2\0\2\6"+ "\2\0\3\6\1\0\10\6\1\100\2\6\16\0\2\6"+ "\1\101\16\6\4\0\1\6\2\0\2\6\2\0\3\6"+ "\1\0\4\6\1\102\6\6\16\0\21\6\4\0\1\6"+ "\2\0\2\6\2\0\3\6\1\0\13\6\16\0\2\6"+ "\1\103\16\6\4\0\1\6\2\0\2\6\2\0\3\6"+ "\1\0\4\6\1\104\6\6\16\0\1\105\20\6\4\0"+ "\1\6\2\0\2\6\2\0\3\6\1\0\2\6\1\106"+ "\10\6\16\0\2\6\1\107\1\6\1\110\14\6\4\0"+ "\1\6\2\0\2\6\2\0\3\6\1\0\4\6\1\111"+ "\6\6\16\0\21\6\4\0\1\6\2\0\2\6\2\0"+ "\3\6\1\0\2\6\1\112\10\6\16\0\21\6\4\0"+ "\1\6\2\0\2\6\2\0\3\6\1\0\7\6\1\113"+ "\3\6\16\0\15\6\1\114\3\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\13\6\16\0\1\115\20\6"+ "\4\0\1\6\2\0\2\6\2\0\3\6\1\0\13\6"+ "\16\0\2\6\1\116\16\6\1\50\1\0\2\50\1\0"+ "\64\50\5\117\1\120\63\117\11\54\1\121\1\56\56\54"+ "\14\0\1\122\1\123\74\0\1\124\72\0\1\125\61\0"+ "\4\126\64\0\1\127\4\130\12\0\1\127\42\0\1\6"+ "\2\0\2\6\2\0\3\6\1\0\13\6\16\0\1\131"+ "\20\6\4\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\13\6\16\0\3\6\1\132\15\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\13\6\16\0\3\6\1\133"+ "\15\6\4\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\13\6\16\0\11\6\1\134\7\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\13\6\16\0\2\6\1\135"+ "\16\6\4\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\7\6\1\136\3\6\16\0\21\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\13\6\16\0\3\6\1\137"+ "\15\6\4\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\13\6\16\0\12\6\1\140\6\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\13\6\16\0\7\6\1\141"+ "\11\6\4\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\13\6\16\0\6\6\1\142\12\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\13\6\16\0\1\143\20\6"+ "\4\0\1\6\2\0\2\6\2\0\3\6\1\0\13\6"+ "\16\0\15\6\1\144\3\6\4\0\1\6\2\0\2\6"+ "\2\0\3\6\1\0\7\6\1\145\3\6\16\0\21\6"+ "\4\0\1\6\2\0\2\6\2\0\3\6\1\0\13\6"+ "\16\0\2\6\1\146\16\6\4\0\1\6\2\0\2\6"+ "\2\0\3\6\1\0\13\6\16\0\4\6\1\147\14\6"+ "\4\0\1\6\2\0\2\6\2\0\3\6\1\0\4\6"+ "\1\150\6\6\16\0\21\6\4\0\1\6\2\0\2\6"+ "\2\0\3\6\1\0\13\6\16\0\6\6\1\151\12\6"+ "\4\0\1\6\2\0\2\6\2\0\3\6\1\0\13\6"+ "\16\0\1\152\20\6\4\0\1\6\2\0\2\6\2\0"+ "\3\6\1\0\7\6\1\153\3\6\16\0\21\6\4\0"+ "\1\6\2\0\2\6\2\0\3\6\1\0\13\6\16\0"+ "\6\6\1\154\12\6\4\0\1\6\2\0\2\6\2\0"+ "\3\6\1\0\13\6\16\0\15\6\1\155\3\6\4\0"+ "\1\6\2\0\2\6\2\0\3\6\1\0\6\6\1\156"+ "\4\6\16\0\21\6\4\0\1\6\2\0\2\6\2\0"+ "\3\6\1\0\13\6\16\0\12\6\1\157\6\6\4\0"+ "\1\6\2\0\2\6\2\0\3\6\1\0\13\6\16\0"+ "\1\160\20\6\4\0\1\6\2\0\2\6\2\0\3\6"+ "\1\0\13\6\16\0\16\6\1\161\2\6\4\0\1\6"+ "\2\0\2\6\2\0\3\6\1\0\13\6\16\0\2\6"+ "\1\162\16\6\4\0\1\6\2\0\2\6\2\0\3\6"+ "\1\0\6\6\1\163\4\6\16\0\21\6\5\117\1\164"+ "\63\117\3\165\1\166\1\165\1\167\63\165\14\0\4\123"+ "\10\0\1\62\1\63\22\0\1\63\34\0\1\170\74\0"+ "\1\171\60\0\4\126\11\0\1\63\22\0\1\63\30\0"+ "\4\130\55\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\2\6\1\172\10\6\16\0\21\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\13\6\16\0\3\6\1\173"+ "\15\6\4\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\13\6\16\0\10\6\1\174\10\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\13\6\16\0\4\6\1\175"+ "\14\6\4\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\2\6\1\176\10\6\16\0\21\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\13\6\16\0\2\6\1\177"+ "\16\6\4\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\13\6\16\0\4\6\1\200\14\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\13\6\16\0\4\6\1\201"+ "\14\6\4\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\13\6\16\0\2\6\1\202\16\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\13\6\16\0\2\6\1\203"+ "\16\6\4\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\13\6\16\0\4\6\1\204\14\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\13\6\16\0\3\6\1\205"+ "\15\6\4\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\13\6\16\0\14\6\1\206\4\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\13\6\16\0\13\6\1\207"+ "\5\6\4\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\13\6\16\0\7\6\1\210\11\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\13\6\16\0\2\6\1\211"+ "\16\6\4\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\13\6\16\0\4\6\1\212\14\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\13\6\16\0\1\6\1\213"+ "\17\6\4\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\6\6\1\214\4\6\16\0\21\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\13\6\16\0\2\6\1\215"+ "\16\6\4\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\6\6\1\216\4\6\16\0\21\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\13\6\16\0\4\6\1\217"+ "\14\6\4\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\7\6\1\220\3\6\16\0\21\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\13\6\16\0\5\6\1\221"+ "\13\6\3\117\1\222\1\117\1\164\63\117\5\165\1\167"+ "\66\165\1\223\1\165\1\167\63\165\25\0\1\224\47\0"+ "\1\6\2\0\2\6\2\0\3\6\1\0\10\6\1\225"+ "\2\6\16\0\21\6\4\0\1\6\2\0\2\6\2\0"+ "\3\6\1\0\13\6\16\0\4\6\1\226\14\6\4\0"+ "\1\6\2\0\2\6\2\0\3\6\1\0\13\6\16\0"+ "\5\6\1\227\13\6\4\0\1\6\2\0\2\6\2\0"+ "\3\6\1\0\7\6\1\230\3\6\16\0\21\6\4\0"+ "\1\6\2\0\2\6\2\0\3\6\1\0\13\6\16\0"+ "\1\231\20\6\4\0\1\6\2\0\2\6\2\0\3\6"+ "\1\0\13\6\16\0\10\6\1\232\7\6\1\233\4\0"+ "\1\6\2\0\2\6\2\0\3\6\1\0\13\6\16\0"+ "\14\6\1\234\4\6\4\0\1\6\2\0\2\6\2\0"+ "\3\6\1\0\13\6\16\0\1\235\20\6\4\0\1\6"+ "\2\0\2\6\2\0\3\6\1\0\13\6\16\0\10\6"+ "\1\236\10\6\4\0\1\6\2\0\2\6\2\0\3\6"+ "\1\0\13\6\16\0\4\6\1\237\14\6\4\0\1\6"+ "\2\0\2\6\2\0\3\6\1\0\2\6\1\240\10\6"+ "\16\0\21\6\4\0\1\6\2\0\2\6\2\0\3\6"+ "\1\0\13\6\16\0\1\241\20\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\13\6\16\0\3\6\1\242"+ "\15\6\4\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\13\6\16\0\7\6\1\243\11\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\4\6\1\244\6\6\16\0"+ "\21\6\4\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\4\6\1\245\6\6\16\0\21\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\13\6\16\0\7\6\1\246"+ "\11\6\4\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\13\6\16\0\2\6\1\247\16\6\23\0\1\250\51\0"+ "\1\6\2\0\2\6\2\0\3\6\1\0\7\6\1\251"+ "\3\6\16\0\21\6\4\0\1\6\2\0\2\6\2\0"+ "\3\6\1\0\7\6\1\252\3\6\16\0\21\6\4\0"+ "\1\6\2\0\2\6\2\0\3\6\1\0\13\6\16\0"+ "\7\6\1\253\11\6\4\0\1\6\2\0\2\6\2\0"+ "\3\6\1\0\13\6\16\0\10\6\1\254\10\6\4\0"+ "\1\6\2\0\2\6\2\0\3\6\1\0\13\6\16\0"+ "\5\6\1\255\13\6\4\0\1\6\2\0\2\6\2\0"+ "\3\6\1\0\2\6\1\256\10\6\16\0\21\6\4\0"+ "\1\6\2\0\2\6\2\0\3\6\1\0\10\6\1\257"+ "\2\6\16\0\21\6\4\0\1\6\2\0\2\6\2\0"+ "\3\6\1\0\13\6\16\0\4\6\1\260\14\6\4\0"+ "\1\6\2\0\2\6\2\0\3\6\1\0\2\6\1\261"+ "\10\6\16\0\21\6\4\0\1\6\2\0\2\6\2\0"+ "\3\6\1\0\13\6\16\0\13\6\1\262\5\6\4\0"+ "\1\6\2\0\2\6\2\0\3\6\1\0\2\6\1\263"+ "\10\6\16\0\21\6\4\0\1\6\2\0\2\6\2\0"+ "\3\6\1\0\13\6\16\0\15\6\1\264\3\6\25\0"+ "\1\265\47\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\2\6\1\266\10\6\16\0\21\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\13\6\16\0\10\6\1\267"+ "\10\6\4\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\4\6\1\270\6\6\16\0\21\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\13\6\16\0\3\6\1\271"+ "\15\6\4\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\13\6\16\0\2\6\1\272\16\6\26\0\1\273\46\0"+ "\1\6\2\0\2\6\2\0\3\6\1\0\13\6\16\0"+ "\7\6\1\274\11\6\4\0\1\6\2\0\2\6\2\0"+ "\3\6\1\0\13\6\16\0\3\6\1\275\15\6\27\0"+ "\1\170\45\0\1\6\2\0\2\6\2\0\3\6\1\0"+ "\13\6\16\0\12\6\1\276\6\6\4\0\1\6\2\0"+ "\2\6\2\0\3\6\1\0\13\6\16\0\20\6\1\277"+ "\4\0\1\6\2\0\2\6\2\0\3\6\1\0\13\6"+ "\16\0\7\6\1\300\11\6\4\0\1\6\2\0\2\6"+ "\2\0\3\6\1\0\13\6\16\0\10\6\1\301\10\6"; private static int [] zzUnpackTrans() { int [] result = new int[8037]; int offset = 0; offset = zzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result); return result; } private static int zzUnpackTrans(String packed, int offset, int [] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int count = packed.charAt(i++); int value = packed.charAt(i++); value--; do result[j++] = value; while (--count > 0); } return j; } /* error codes */ private static final int ZZ_UNKNOWN_ERROR = 0; private static final int ZZ_NO_MATCH = 1; private static final int ZZ_PUSHBACK_2BIG = 2; /* error messages for the codes above */ private static final String[] ZZ_ERROR_MSG = { "Unknown internal scanner error", "Error: could not match input", "Error: pushback value was too large" }; /** * ZZ_ATTRIBUTE[aState] contains the attributes of state <code>aState</code> */ private static final int [] ZZ_ATTRIBUTE = zzUnpackAttribute(); private static final String ZZ_ATTRIBUTE_PACKED_0 = "\1\0\1\11\15\1\15\11\14\1\2\0\1\11\1\0"+ "\1\11\6\0\33\1\2\0\3\1\2\0\1\1\1\0"+ "\34\1\2\0\1\1\1\0\1\11\1\0\30\1\2\11"+ "\1\0\23\1\1\0\14\1\1\0\5\1\1\0\6\1"; private static int [] zzUnpackAttribute() { int [] result = new int[193]; int offset = 0; offset = zzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result); return result; } private static int zzUnpackAttribute(String packed, int offset, int [] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int count = packed.charAt(i++); int value = packed.charAt(i++); do result[j++] = value; while (--count > 0); } return j; } /** the input device */ private java.io.Reader zzReader; /** the current state of the DFA */ private int zzState; /** the current lexical state */ private int zzLexicalState = YYINITIAL; /** this buffer contains the current text to be matched and is the source of the yytext() string */ private CharSequence zzBuffer = ""; /** the textposition at the last accepting state */ private int zzMarkedPos; /** the current text position in the buffer */ private int zzCurrentPos; /** startRead marks the beginning of the yytext() string in the buffer */ private int zzStartRead; /** endRead marks the last character in the buffer, that has been read from input */ private int zzEndRead; /** * zzAtBOL == true <=> the scanner is currently at the beginning of a line */ private boolean zzAtBOL = true; /** zzAtEOF == true <=> the scanner is at the EOF */ private boolean zzAtEOF; /** denotes if the user-EOF-code has already been executed */ private boolean zzEOFDone; /* user code: */ public _AvroIdlLexer() { this((java.io.Reader)null); } /** * Creates a new scanner * * @param in the java.io.Reader to read input from. */ public _AvroIdlLexer(java.io.Reader in) { this.zzReader = in; } /** * Unpacks the compressed character translation table. * * @param packed the packed character translation table * @return the unpacked character translation table */ private static char [] zzUnpackCMap(String packed) { int size = 0; for (int i = 0, length = packed.length(); i < length; i += 2) { size += packed.charAt(i); } char[] map = new char[size]; int i = 0; /* index in packed string */ int j = 0; /* index in unpacked array */ while (i < packed.length()) { int count = packed.charAt(i++); char value = packed.charAt(i++); do map[j++] = value; while (--count > 0); } return map; } public final int getTokenStart() { return zzStartRead; } public final int getTokenEnd() { return getTokenStart() + yylength(); } public void reset(CharSequence buffer, int start, int end, int initialState) { zzBuffer = buffer; zzCurrentPos = zzMarkedPos = zzStartRead = start; zzAtEOF = false; zzAtBOL = true; zzEndRead = end; yybegin(initialState); } /** * Refills the input buffer. * * @return <code>false</code>, iff there was new input. * * @exception java.io.IOException if any I/O-Error occurs */ private boolean zzRefill() throws java.io.IOException { return true; } /** * Returns the current lexical state. */ public final int yystate() { return zzLexicalState; } /** * Enters a new lexical state * * @param newState the new lexical state */ public final void yybegin(int newState) { zzLexicalState = newState; } /** * Returns the text matched by the current regular expression. */ public final CharSequence yytext() { return zzBuffer.subSequence(zzStartRead, zzMarkedPos); } /** * Returns the character at position <tt>pos</tt> from the * matched text. * * It is equivalent to yytext().charAt(pos), but faster * * @param pos the position of the character to fetch. * A value from 0 to yylength()-1. * * @return the character at position pos */ public final char yycharat(int pos) { return zzBuffer.charAt(zzStartRead+pos); } /** * Returns the length of the matched text region. */ public final int yylength() { return zzMarkedPos-zzStartRead; } /** * Reports an error that occured while scanning. * * In a wellformed scanner (no or only correct usage of * yypushback(int) and a match-all fallback rule) this method * will only be called with things that "Can't Possibly Happen". * If this method is called, something is seriously wrong * (e.g. a JFlex bug producing a faulty scanner etc.). * * Usual syntax/scanner level error handling should be done * in error fallback rules. * * @param errorCode the code of the errormessage to display */ private void zzScanError(int errorCode) { String message; try { message = ZZ_ERROR_MSG[errorCode]; } catch (ArrayIndexOutOfBoundsException e) { message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR]; } throw new Error(message); } /** * Pushes the specified amount of characters back into the input stream. * * They will be read again by then next call of the scanning method * * @param number the number of characters to be read again. * This number must not be greater than yylength()! */ public void yypushback(int number) { if ( number > yylength() ) zzScanError(ZZ_PUSHBACK_2BIG); zzMarkedPos -= number; } /** * Resumes scanning until the next regular expression is matched, * the end of input is encountered or an I/O-Error occurs. * * @return the next token * @exception java.io.IOException if any I/O-Error occurs */ public IElementType advance() throws java.io.IOException { int zzInput; int zzAction; // cached fields: int zzCurrentPosL; int zzMarkedPosL; int zzEndReadL = zzEndRead; CharSequence zzBufferL = zzBuffer; int [] zzTransL = ZZ_TRANS; int [] zzRowMapL = ZZ_ROWMAP; int [] zzAttrL = ZZ_ATTRIBUTE; while (true) { zzMarkedPosL = zzMarkedPos; zzAction = -1; zzCurrentPosL = zzCurrentPos = zzStartRead = zzMarkedPosL; zzState = ZZ_LEXSTATE[zzLexicalState]; // set up zzAction for empty match case: int zzAttributes = zzAttrL[zzState]; if ( (zzAttributes & 1) == 1 ) { zzAction = zzState; } zzForAction: { while (true) { if (zzCurrentPosL < zzEndReadL) { zzInput = Character.codePointAt(zzBufferL, zzCurrentPosL/*, zzEndReadL*/); zzCurrentPosL += Character.charCount(zzInput); } else if (zzAtEOF) { zzInput = YYEOF; break zzForAction; } else { // store back cached positions zzCurrentPos = zzCurrentPosL; zzMarkedPos = zzMarkedPosL; boolean eof = zzRefill(); // get translated positions and possibly new buffer zzCurrentPosL = zzCurrentPos; zzMarkedPosL = zzMarkedPos; zzBufferL = zzBuffer; zzEndReadL = zzEndRead; if (eof) { zzInput = YYEOF; break zzForAction; } else { zzInput = Character.codePointAt(zzBufferL, zzCurrentPosL/*, zzEndReadL*/); zzCurrentPosL += Character.charCount(zzInput); } } int zzNext = zzTransL[ zzRowMapL[zzState] + ZZ_CMAP(zzInput) ]; if (zzNext == -1) break zzForAction; zzState = zzNext; zzAttributes = zzAttrL[zzState]; if ( (zzAttributes & 1) == 1 ) { zzAction = zzState; zzMarkedPosL = zzCurrentPosL; if ( (zzAttributes & 8) == 8 ) break zzForAction; } } } // store back cached position zzMarkedPos = zzMarkedPosL; if (zzInput == YYEOF && zzStartRead == zzCurrentPos) { zzAtEOF = true; return null; } else { switch (zzAction < 0 ? zzAction : ZZ_ACTION[zzAction]) { case 1: { return BAD_CHARACTER; } case 51: break; case 2: { return WHITE_SPACE; } case 52: break; case 3: { return IDENTIFIER; } case 53: break; case 4: { return INT_LITERAL; } case 54: break; case 5: { return LEFT_BRACE; } case 55: break; case 6: { return RIGHT_BRACE; } case 56: break; case 7: { return LEFT_PAREN; } case 57: break; case 8: { return RIGHT_PAREN; } case 58: break; case 9: { return LEFT_BRACKET; } case 59: break; case 10: { return RIGHT_BRACKET; } case 60: break; case 11: { return LEFT_ANGLE; } case 61: break; case 12: { return RIGHT_ANGLE; } case 62: break; case 13: { return AT; } case 63: break; case 14: { return COMMA; } case 64: break; case 15: { return SEMICOLON; } case 65: break; case 16: { return COLON; } case 66: break; case 17: { return EQUALS; } case 67: break; case 18: { return LINE_COMMENT; } case 68: break; case 19: { return STRING_LITERAL; } case 69: break; case 20: { return FLOAT_LITERAL; } case 70: break; case 21: { return INT; } case 71: break; case 22: { return IDL; } case 72: break; case 23: { return MAP; } case 73: break; case 24: { return BLOCK_COMMENT; } case 74: break; case 25: { return NULL; } case 75: break; case 26: { return TRUE; } case 76: break; case 27: { return LONG; } case 77: break; case 28: { return ENUM; } case 78: break; case 29: { return DATE; } case 79: break; case 30: { return VOID; } case 80: break; case 31: { return DOC_COMMENT; } case 81: break; case 32: { return ARRAY; } case 82: break; case 33: { return FALSE; } case 83: break; case 34: { return FIXED; } case 84: break; case 35: { return FLOAT; } case 85: break; case 36: { return BYTES; } case 86: break; case 37: { return ERROR; } case 87: break; case 38: { return UNION; } case 88: break; case 39: { return IMPORT; } case 89: break; case 40: { return THROWS; } case 90: break; case 41: { return RECORD; } case 91: break; case 42: { return ONEWAY; } case 92: break; case 43: { return DOUBLE; } case 93: break; case 44: { return STRING; } case 94: break; case 45: { return SCHEMA; } case 95: break; case 46: { return TIME; } case 96: break; case 47: { return BOOLEAN; } case 97: break; case 48: { return DECIMAL; } case 98: break; case 49: { return PROTOCOL; } case 99: break; case 50: { return TIMESTAMP; } case 100: break; default: zzScanError(ZZ_NO_MATCH); } } } } }
package jas.arith; import jas.structure.RingElem; import java.math.BigInteger; /** * ModLong class with RingElem interface. Objects of this class are immutable. * * @author Heinz Kredel * @see ModInteger */ public final class ModLong implements RingElem<ModLong>, Modular { /** * ModLongRing reference. */ public final ModLongRing ring; /** * Value part of the element data structure. */ public final long val; /** * The constructor creates a ModLong object from a ModLongRing and a value * part. * * @param m ModLongRing. * @param a math.JasBigInteger. */ public ModLong(ModLongRing m, BigInteger a) { this(m, a.mod(m.getModul()).longValue()); } /** * The constructor creates a ModLong object from a ModLongRing and a long * value part. * * @param m ModLongRing. * @param a long. */ public ModLong(ModLongRing m, long a) { ring = m; long v = a % ring.modul; val = (v >= 0L ? v : v + ring.modul); } /** * The constructor creates a ModLong object from a ModLongRing and a Long * value part. * * @param m ModLongRing. * @param a Long. */ private ModLong(ModLongRing m, Long a) { this(m, a.longValue()); } /** * The constructor creates a ModLong object from a ModLongRing and a String * value part. * * @param m ModLongRing. * @param s String. */ public ModLong(ModLongRing m, String s) { this(m, new Long(s.trim())); } /** * Get the corresponding element factory. * * @return factory for this Element. */ public ModLongRing factory() { return ring; } /** * Return a symmetric JasBigInteger from this Element. * * @return a symmetric JasBigInteger of this. */ public JasBigInteger getSymmetricInteger() { long v = val; if ((val + val) > ring.modul) { // val > m/2 as 2*val > m, make symmetric to 0 v = val - ring.modul; } return new JasBigInteger(v); } /** * Is ModLong number zero. * * @return If this is 0 then true is returned, else false. * @see jas.structure.RingElem#isZERO() */ public boolean isZERO() { return val == 0L; } /** * Is ModLong number one. * * @return If this is 1 then true is returned, else false. * @see jas.structure.RingElem#isONE() */ public boolean isONE() { return val == 1L; } /** * Is ModLong number a unit. * * @return If this is a unit then true is returned, else false. * @see jas.structure.RingElem#isUnit() */ public boolean isUnit() { if (isZERO()) { return false; } if (ring.isField()) { return true; } long g = gcd(ring.modul, val); return (g == 1L || g == -1L); } @Override public int compareTo(ModLong b) { long v = b.val; if (ring != b.ring) { v = v % ring.modul; } if (val > v) { return 1; } return (val < v ? -1 : 0); } @Override public boolean equals(Object b) { return b instanceof ModLong && (0 == compareTo((ModLong) b)); } /** * Hash code for this ModLong. * * @see java.lang.Object#hashCode() */ @Override public int hashCode() { return (int) val; } /** * ModLong absolute value. * * @return the absolute value of this. * @see jas.structure.RingElem#abs() */ public ModLong abs() { return new ModLong(ring, (val < 0 ? -val : val)); } /** * ModLong negative. * * @return -this. * @see jas.structure.RingElem#negate() */ public ModLong negate() { return new ModLong(ring, -val); } /** * ModLong signum. * * @return signum(this). * @see jas.structure.RingElem#signum() */ public int signum() { if (val > 0L) { return 1; } return (val < 0L ? -1 : 0); } /** * ModLong subtraction. * * @param S ModLong. * @return this-S. */ public ModLong subtract(ModLong S) { return new ModLong(ring, val - S.val); } /** * ModLong divide. * * @param S ModLong. * @return this/S. */ public ModLong divide(ModLong S) { try { return multiply(S.inverse()); } catch (ArithmeticException e) { try { if ((val % S.val) == 0L) { return new ModLong(ring, val / S.val); } throw new ArithmeticException(); } catch (ArithmeticException a) { throw new ArithmeticException(); } } } /** * ModLong inverse. * * @return S with S=1/this if defined. * @see jas.structure.RingElem#inverse() */ public ModLong inverse() /*throws NotInvertibleException*/ { try { return new ModLong(ring, modInverse(val, ring.modul)); } catch (ArithmeticException e) { gcd(val, ring.modul); throw e; } } /** * ModLong remainder. * * @param S ModLong. * @return remainder(this, S). */ public ModLong remainder(ModLong S) { if (S == null || S.isZERO()) { throw new ArithmeticException("division by zero"); } if (S.isONE()) { return ring.getZERO(); } if (S.isUnit()) { return ring.getZERO(); } return new ModLong(ring, val % S.val); } /** * ModLong multiply. * * @param S ModLong. * @return this*S. */ public ModLong multiply(ModLong S) { return new ModLong(ring, val * S.val); } /** * ModLong summation. * * @param S ModLong. * @return this+S. */ public ModLong sum(ModLong S) { return new ModLong(ring, val + S.val); } /** * ModInteger greatest common divisor. * * @param S ModInteger. * @return [ gcd(this,S), a, b ] with a*this + b*S = gcd(this,S). */ public ModLong gcd(ModLong S) { if (S.isZERO()) { return this; } if (isZERO()) { return S; } if (isUnit() || S.isUnit()) { return ring.getONE(); } return new ModLong(ring, gcd(val, S.val)); } /** * ModInteger extended greatest common divisor. * * @param S ModInteger. * @return [ gcd(this,S), a, b ] with a*this + b*S = gcd(this,S). */ public ModLong[] egcd(ModLong S) { ModLong[] ret = new ModLong[3]; ret[0] = null; ret[1] = null; ret[2] = null; if (S == null || S.isZERO()) { ret[0] = this; return ret; } if (isZERO()) { ret[0] = S; return ret; } if (isUnit() || S.isUnit()) { ret[0] = ring.getONE(); if (isUnit() && S.isUnit()) { //ModLong half = (new ModLong(ring, 2L)).inverse(); //ret[1] = this.inverse().multiply(half); //ret[2] = S.inverse().multiply(half); // (1-1*this)/S ret[1] = ring.getONE(); ModLong x = ret[0].subtract(ret[1].multiply(this)); ret[2] = x.divide(S); return ret; } if (isUnit()) { // oder inverse(S-1)? ret[1] = this.inverse(); ret[2] = ring.getZERO(); return ret; } // if ( s.isUnit() ) { // oder inverse(this-1)? ret[1] = ring.getZERO(); ret[2] = S.inverse(); return ret; //} } //System.out.println("this = " + this + ", S = " + S); long q = this.val; long r = S.val; long c1 = 1L; // JasBigInteger.ONE.val; long d1 = 0L; // JasBigInteger.ZERO.val; long c2 = 0L; // JasBigInteger.ZERO.val; long d2 = 1L; // JasBigInteger.ONE.val; long x1; long x2; while (r != 0L) { //qr = q.divideAndRemainder(r); long a = q / r; long b = q % r; q = a; x1 = c1 - q * d1; x2 = c2 - q * d2; c1 = d1; c2 = d2; d1 = x1; d2 = x2; q = r; r = b; } //System.out.println("q = " + q + "\n c1 = " + c1 + "\n c2 = " + c2); ret[0] = new ModLong(ring, q); ret[1] = new ModLong(ring, c1); ret[2] = new ModLong(ring, c2); return ret; } /** * Long greatest common divisor. * * @param T long. * @param S long. * @return gcd(T, S). */ long gcd(long T, long S) { if (S == 0L) { return T; } if (T == 0L) { return S; } long a = T; long b = S; while (b != 0L) { long r = a % b; a = b; b = r; } return a; } /** * Long half extended greatest common divisor. * * @param T long. * @param S long. * @return [ gcd(T,S), a ] with a*T + b*S = gcd(T,S). */ long[] hegcd(long T, long S) { long[] ret = new long[2]; if (S == 0L) { ret[0] = T; ret[1] = 1L; return ret; } if (T == 0L) { ret[0] = S; ret[1] = 0L; return ret; } //System.out.println("hegcd, T = " + T + ", S = " + S); long a = T; long b = S; long a1 = 1L; long b1 = 0L; while (b != 0L) { long q = a / b; long r = a % b; a = b; b = r; long r1 = a1 - q * b1; a1 = b1; b1 = r1; } if (a1 < 0L) { a1 += S; } ret[0] = a; ret[1] = a1; return ret; } /** * Long modular inverse. * * @param T long. * @param m long. * @return a with with a*T = 1 mod m. */ long modInverse(long T, long m) { if (T == 0L) { throw new ArithmeticException("zero is not invertible"); } long[] hegcd = hegcd(T, m); long a = hegcd[0]; if (!(a == 1L || a == -1L)) { // gcd != 1 throw new ArithmeticException("element not invertible, gcd != 1"); } long b = hegcd[1]; if (b == 0L) { // when m divides this, e.g. m.isUnit() throw new ArithmeticException("element not invertible, divisible by modul"); } if (b < 0L) { b += m; } return b; } }
// Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.firstrun; import android.os.SystemClock; import android.text.TextUtils; import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; import org.chromium.base.Callback; import org.chromium.base.CallbackController; import org.chromium.base.Log; import org.chromium.base.metrics.RecordHistogram; import org.chromium.base.supplier.OneshotSupplier; import org.chromium.base.supplier.OneshotSupplierImpl; import org.chromium.chrome.browser.enterprise.util.EnterpriseInfo; import org.chromium.components.policy.PolicyService; /** * Class that listens to signals related to ToSDialogBehavior. Supplies whether ToS dialog should be * skipped given policy settings. * * To be more specific: * - Supplies [True] if the ToS dialog is not enabled by policy while device is fully managed; * - Supplies [False] otherwise. */ public class SkipTosDialogPolicyListener implements OneshotSupplier<Boolean> { private static final String TAG = "SkipTosPolicy"; /** * Interface that provides histogram to be recorded when signals are available in this listener. */ interface HistogramNameProvider { /** * Name of time histogram to be recorded when signal "whether device is fully managed" is * available. The duration between creation of {@link SkipTosDialogPolicyListener} and * signal received will be recorded. * * @return Name of histogram to be recorded when signal is available. */ String getOnDeviceOwnedDetectedTimeHistogramName(); /** * Name of time histogram to be recorded when signal "whether the Tos dialog is enabled on * the device" is available. This histogram is not recorded when the value of policy * TosDialogBehavior is not used to determine the output of this listener. * * The duration between creation of {@link SkipTosDialogPolicyListener} and signal received * will be recorded. * * @return Name of histogram to be recorded when signal is available. */ String getOnPolicyAvailableTimeHistogramName(); } private final CallbackController mCallbackController = new CallbackController(); private final OneshotSupplierImpl<Boolean> mSkipTosDialogPolicySupplier = new OneshotSupplierImpl<>(); private final long mObjectCreatedTimeMs; private final @Nullable HistogramNameProvider mHistNameProvider; /** * This could be null when the policy load listener is provided and owned by other components. */ private @Nullable PolicyLoadListener mPolicyLoadListener; /** * The value of whether the ToS dialog is enabled on the device. If the value is false, it means * TosDialogBehavior policy is found and set to SKIP. This can be null when this information * is not ready yet. */ private @Nullable Boolean mTosDialogEnabled; /** * Whether the current device is organization owned. This will start null before the check * occurs. The FRE can only be skipped if the device is organization owned. */ private @Nullable Boolean mIsDeviceOwned; /** * @param firstRunAppRestrictionInfo Source that providers app restriction information. * @param policyServiceSupplier Supplier that providers PolicyService when native initialized. * @param enterpriseInfo Source that provides whether device is managed. * @param histogramNameProvider Provider that provides histogram names when signals are * available. */ public SkipTosDialogPolicyListener(FirstRunAppRestrictionInfo firstRunAppRestrictionInfo, OneshotSupplier<PolicyService> policyServiceSupplier, EnterpriseInfo enterpriseInfo, @Nullable HistogramNameProvider histogramNameProvider) { mObjectCreatedTimeMs = SystemClock.elapsedRealtime(); mHistNameProvider = histogramNameProvider; mPolicyLoadListener = new PolicyLoadListener(firstRunAppRestrictionInfo, policyServiceSupplier); initInternally(enterpriseInfo, mPolicyLoadListener); } /** * @param policyLoadListener Supplier that provides a boolean value *whether reading policy from * policy service is necessary*. See {@link PolicyLoadListener} for more information. * @param enterpriseInfo Source that provides whether device is managed. * @param histogramNameProvider Provider that provides histogram names when signals are * available. */ public SkipTosDialogPolicyListener(OneshotSupplier<Boolean> policyLoadListener, EnterpriseInfo enterpriseInfo, @Nullable HistogramNameProvider histogramNameProvider) { mObjectCreatedTimeMs = SystemClock.elapsedRealtime(); mHistNameProvider = histogramNameProvider; initInternally(enterpriseInfo, policyLoadListener); } private void initInternally( EnterpriseInfo enterpriseInfo, OneshotSupplier<Boolean> policyLoadListener) { Boolean hasPolicy = policyLoadListener.onAvailable( mCallbackController.makeCancelable(this::onPolicyLoadListenerAvailable)); if (hasPolicy != null) { onPolicyLoadListenerAvailable(hasPolicy); } // Check EnterpriseInfo if still needed. if (mSkipTosDialogPolicySupplier.get() == null) { enterpriseInfo.getDeviceEnterpriseInfo( mCallbackController.makeCancelable(this::onIsDeviceOwnedDetected)); } } /** * Destroy the instance and remove all its dependencies. */ public void destroy() { mCallbackController.destroy(); if (mPolicyLoadListener != null) { mPolicyLoadListener.destroy(); mPolicyLoadListener = null; } } @Override public Boolean onAvailable(Callback<Boolean> callback) { // This supplier posts callbacks to an inner Handler to avoid reentrancy, but this opens the // possibility of set -> destroy -> callback run, which would violate our public interface. // Wrapping incoming callback to ensure it cannot be run after destroy(). return mSkipTosDialogPolicySupplier.onAvailable( mCallbackController.makeCancelable(callback)); } /** * @return Whether the ToS dialog should be skipped given settings on device. */ @Override public Boolean get() { return mSkipTosDialogPolicySupplier.get(); } private void onPolicyLoadListenerAvailable(boolean mightHavePolicy) { if (mTosDialogEnabled != null) return; if (!mightHavePolicy) { mTosDialogEnabled = true; } else { mTosDialogEnabled = FirstRunUtils.isCctTosDialogEnabled(); if (mHistNameProvider != null) { String histogramOnPolicyLoaded = mHistNameProvider.getOnPolicyAvailableTimeHistogramName(); assert !TextUtils.isEmpty(histogramOnPolicyLoaded); RecordHistogram.recordTimesHistogram(histogramOnPolicyLoaded, SystemClock.elapsedRealtime() - mObjectCreatedTimeMs); } } setSupplierIfDecidable(); } private void onIsDeviceOwnedDetected(EnterpriseInfo.OwnedState ownedState) { if (mIsDeviceOwned != null) return; mIsDeviceOwned = ownedState != null && ownedState.mDeviceOwned; if (mHistNameProvider != null) { String histogramOnEnterpriseInfoLoaded = mHistNameProvider.getOnDeviceOwnedDetectedTimeHistogramName(); assert !TextUtils.isEmpty(histogramOnEnterpriseInfoLoaded); RecordHistogram.recordTimesHistogram(histogramOnEnterpriseInfoLoaded, SystemClock.elapsedRealtime() - mObjectCreatedTimeMs); } setSupplierIfDecidable(); } private void setSupplierIfDecidable() { if (mSkipTosDialogPolicySupplier.get() != null) return; boolean confirmedDeviceNotOwned = mIsDeviceOwned != null && !mIsDeviceOwned; boolean confirmedTosDialogEnabled = mTosDialogEnabled != null && mTosDialogEnabled; boolean hasOutstandingSignal = mIsDeviceOwned == null || mTosDialogEnabled == null; if (!hasOutstandingSignal) { Log.i(TAG, "Supplier available, <TosDialogEnabled>=" + mTosDialogEnabled + " <IsDeviceOwned>=" + mIsDeviceOwned); mSkipTosDialogPolicySupplier.set(!mTosDialogEnabled && mIsDeviceOwned); } else if (confirmedTosDialogEnabled || confirmedDeviceNotOwned) { Log.i(TAG, "Supplier early out, <confirmedTosDialogEnabled>=" + confirmedTosDialogEnabled + " <confirmedDeviceNotOwned>=" + confirmedDeviceNotOwned); mSkipTosDialogPolicySupplier.set(false); } } @VisibleForTesting public PolicyLoadListener getPolicyLoadListenerForTesting() { return mPolicyLoadListener; } }
/* * Copyright 2014 Ben Manes. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.benmanes.caffeine; import static com.github.benmanes.caffeine.IsValidSingleConsumerQueue.validate; import static com.github.benmanes.caffeine.testing.IsEmptyIterable.deeplyEmpty; import static com.google.common.collect.Iterators.elementsEqual; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; import java.util.Objects; import java.util.Queue; import java.util.concurrent.atomic.AtomicInteger; import org.testng.IInvokedMethod; import org.testng.IInvokedMethodListener; import org.testng.ITestResult; import org.testng.annotations.DataProvider; import org.testng.annotations.Listeners; import org.testng.annotations.Test; import com.github.benmanes.caffeine.SingleConsumerQueue.LinearizableNode; import com.github.benmanes.caffeine.SingleConsumerQueueTest.ValidatingQueueListener; import com.github.benmanes.caffeine.testing.Awaits; import com.github.benmanes.caffeine.testing.ConcurrentTestHarness; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.testing.SerializableTester; /** * @author [email protected] (Ben Manes) */ @Listeners(ValidatingQueueListener.class) public class SingleConsumerQueueTest { private static final int PRODUCE = 10_000; private static final int NUM_PRODUCERS = 10; private static final int POPULATED_SIZE = 10; @Test(dataProvider = "empty") public void clear_whenEmpty(Queue<?> queue) { queue.clear(); assertThat(queue, is(deeplyEmpty())); } @Test(dataProvider = "populated") public void clear_whenPopulated(Queue<?> queue) { queue.clear(); assertThat(queue, is(deeplyEmpty())); } @Test(dataProvider = "empty") public void isEmpty_whenEmpty(Queue<?> queue) { assertThat(queue.isEmpty(), is(true)); } @Test(dataProvider = "populated") public void isEmpty_whenPopulated(Queue<?> queue) { assertThat(queue.isEmpty(), is(false)); } @Test(dataProvider = "empty") public void size_whenEmpty(Queue<?> queue) { assertThat(queue.size(), is(0)); assertThat(queue.size(), is(equalTo(Iterables.size(queue)))); } @Test(dataProvider = "populated") public void size_whenPopulated(Queue<?> queue) { assertThat(queue.size(), is(POPULATED_SIZE)); assertThat(Iterables.size(queue), is(POPULATED_SIZE)); assertThat(queue.size(), is(equalTo(Iterables.size(queue)))); } /* ---------------- Contains -------------- */ @Test(dataProvider = "empty") public void contains_withNull(Queue<?> queue) { assertThat(queue.contains(null), is(false)); } @Test(dataProvider = "populated") public void contains_whenFound(Queue<Integer> queue) { assertThat(queue.contains(Iterables.get(queue, POPULATED_SIZE / 2)), is(true)); } @Test(dataProvider = "populated") public void contains_whenNotFound(Queue<Integer> queue) { assertThat(queue.contains(-1), is(false)); } @Test(dataProvider = "empty", expectedExceptions = NullPointerException.class) public void containsAll_withNull(Queue<?> queue) { queue.containsAll(null); } @Test(dataProvider = "populated") public void containsAll_whenFound(Queue<Integer> queue) { assertThat(queue.containsAll( ImmutableList.of(0, POPULATED_SIZE / 2, POPULATED_SIZE - 1)), is(true)); assertThat(queue.containsAll(queue), is(true)); } @Test(dataProvider = "populated") public void containsAll_whenNotFound(Queue<Integer> queue) { assertThat(queue.containsAll( ImmutableList.of(-1, -(POPULATED_SIZE / 2), -POPULATED_SIZE)), is(false)); } /* ---------------- Peek -------------- */ @Test(dataProvider = "empty") public void peek_whenEmpty(Queue<Integer> queue) { assertThat(queue.peek(), is(nullValue())); } @Test(dataProvider = "populated") public void peek_whenPopulated(SingleConsumerQueue<Integer> queue) { Integer first = queue.head.next.value; assertThat(queue.peek(), is(first)); assertThat(queue, hasSize(POPULATED_SIZE)); assertThat(queue.contains(first), is(true)); } /* ---------------- Element -------------- */ @Test(dataProvider = "empty", expectedExceptions = NoSuchElementException.class) public void element_whenEmpty(Queue<Integer> queue) { queue.element(); } @Test(dataProvider = "populated") public void element_whenPopulated(SingleConsumerQueue<Integer> queue) { Integer first = queue.head.next.value; assertThat(queue.element(), is(first)); assertThat(queue, hasSize(POPULATED_SIZE)); assertThat(queue.contains(first), is(true)); } /* ---------------- Offer -------------- */ @Test(dataProvider = "empty") public void offer_whenEmpty(Queue<Integer> queue) { assertThat(queue.offer(1), is(true)); assertThat(queue, hasSize(1)); } @Test(dataProvider = "populated") public void offer_whenPopulated(Queue<Integer> queue) { assertThat(queue.offer(1), is(true)); assertThat(queue, hasSize(POPULATED_SIZE + 1)); } /* ---------------- Add -------------- */ @Test(dataProvider = "empty") public void add_whenEmpty(Queue<Integer> queue) { assertThat(queue.add(1), is(true)); assertThat(queue.peek(), is(1)); assertThat(Iterables.getLast(queue), is(1)); assertThat(queue, hasSize(1)); assertThat(queue.size(), is(equalTo(Iterables.size(queue)))); } @Test(dataProvider = "populated") public void add_whenPopulated(Queue<Integer> queue) { assertThat(queue.add(-1), is(true)); assertThat(queue.peek(), is(not(-1))); assertThat(Iterables.getLast(queue), is(-1)); assertThat(queue, hasSize(POPULATED_SIZE + 1)); assertThat(queue.size(), is(equalTo(Iterables.size(queue)))); } @Test(dataProvider = "empty") public void addAll_whenEmpty(Queue<Integer> queue) { List<Integer> list = new ArrayList<>(); populate(list, POPULATED_SIZE); assertThat(queue.addAll(list), is(true)); assertThat(queue.peek(), is(0)); assertThat(Iterables.getLast(queue), is(POPULATED_SIZE - 1)); assertThat(String.format("%nExpected: %s%n but: %s", queue, list), elementsEqual(queue.iterator(), list.iterator())); } @Test(dataProvider = "singleton,populated") public void addAll_whenPopulated(Queue<Integer> queue) { List<Integer> list = ImmutableList.of(POPULATED_SIZE, POPULATED_SIZE + 1, POPULATED_SIZE + 2); List<Integer> expect = ImmutableList.copyOf(Iterables.concat(queue, list)); assertThat(queue.addAll(list), is(true)); assertThat(queue.peek(), is(0)); assertThat(Iterables.getLast(queue), is(POPULATED_SIZE + 2)); assertThat(String.format("%nExpected: %s%n but: %s", queue, expect), elementsEqual(queue.iterator(), expect.iterator())); } /* ---------------- Poll -------------- */ @Test(dataProvider = "empty") public void poll_whenEmpty(Queue<Integer> queue) { assertThat(queue.poll(), is(nullValue())); } @Test(dataProvider = "populated") public void poll_whenPopulated(Queue<Integer> queue) { Integer first = queue.peek(); assertThat(queue.poll(), is(first)); assertThat(queue, hasSize(POPULATED_SIZE - 1)); assertThat(queue.contains(first), is(false)); } @Test(dataProvider = "populated") public void poll_toEmpty(Queue<Integer> queue) { Integer value; while ((value = queue.poll()) != null) { assertThat(queue.contains(value), is(false)); } assertThat(queue, is(deeplyEmpty())); } /* ---------------- Remove -------------- */ @Test(dataProvider = "empty", expectedExceptions = NoSuchElementException.class) public void remove_whenEmpty(Queue<Integer> queue) { queue.remove(); } @Test(dataProvider = "populated") public void remove_whenPopulated(Queue<Integer> queue) { Integer first = queue.peek(); assertThat(queue.remove(), is(first)); assertThat(queue, hasSize(POPULATED_SIZE - 1)); assertThat(queue.contains(first), is(false)); } @Test(dataProvider = "populated") public void remove_toEmpty(Queue<Integer> queue) { while (!queue.isEmpty()) { Integer value = queue.remove(); assertThat(queue.contains(value), is(false)); } assertThat(queue, is(deeplyEmpty())); } @Test(dataProvider = "empty,singleton,populated") public void removeElement_notFound(Queue<Integer> queue) { assertThat(queue.remove(-1), is(false)); } @Test(dataProvider = "populated") public void removeElement_whenFound(Queue<Integer> queue) { Integer first = queue.peek(); assertThat(queue.remove(first), is(true)); assertThat(queue, hasSize(POPULATED_SIZE - 1)); assertThat(queue.contains(first), is(false)); } @Test(dataProvider = "populated") public void removeElement_toEmpty(Queue<Integer> queue) { while (!queue.isEmpty()) { Integer value = queue.peek(); assertThat(queue.remove(value), is(true)); assertThat(queue.contains(value), is(false)); } assertThat(queue, is(deeplyEmpty())); } @Test(dataProvider = "empty") public void removeAll_withEmpty(Queue<Integer> queue) { assertThat(queue.removeAll(ImmutableList.of()), is(false)); assertThat(queue, is(deeplyEmpty())); } @Test(dataProvider = "populated") public void removeAll_withPopulated(Queue<Integer> queue) { Integer first = queue.peek(); assertThat(queue.removeAll(ImmutableList.of(first)), is(true)); assertThat(queue, hasSize(POPULATED_SIZE - 1)); assertThat(queue.contains(first), is(false)); } @Test(dataProvider = "populated") public void removeAll_toEmpty(Queue<Integer> queue) { assertThat(queue.removeAll(ImmutableList.copyOf(queue)), is(true)); assertThat(queue, is(deeplyEmpty())); } /* ---------------- Retain -------------- */ @Test(dataProvider = "empty") public void retainAll_withEmpty(Queue<Integer> queue) { assertThat(queue.retainAll(ImmutableList.of()), is(false)); assertThat(queue, is(deeplyEmpty())); } @Test(dataProvider = "populated") public void retainAll_withPopulated(Queue<Integer> queue) { Integer first = queue.peek(); assertThat(queue.retainAll(ImmutableList.of(first)), is(true)); assertThat(queue, hasSize(1)); assertThat(queue.contains(first), is(true)); } @Test(dataProvider = "populated") public void retainAll_toEmpty(Queue<Integer> queue) { assertThat(queue.retainAll(ImmutableList.of()), is(true)); assertThat(queue, is(deeplyEmpty())); } /* ---------------- Iterators -------------- */ @Test(dataProvider = "empty", expectedExceptions = NoSuchElementException.class) public void iterator_noMoreElements(Queue<Integer> queue) { queue.iterator().next(); } @Test(dataProvider = "empty") public void iterator_whenEmpty(Queue<Integer> queue) { assertThat(queue.iterator().hasNext(), is(false)); } @Test(dataProvider = "singleton,populated") public void iterator_whenPopulated(Queue<Integer> queue) { List<Integer> copy = new ArrayList<>(); populate(copy, queue.size()); assertThat(String.format("\nExpected: %s%n but: %s", queue, copy), elementsEqual(queue.iterator(), copy.iterator())); } @Test(dataProvider = "populated", expectedExceptions = IllegalStateException.class) public void iterator_removal_unread(Queue<Integer> queue) { queue.iterator().remove(); } @Test(dataProvider = "populated", expectedExceptions = IllegalStateException.class) public void iterator_removal_duplicate(Queue<Integer> queue) { Iterator<Integer> it = queue.iterator(); it.next(); it.remove(); it.remove(); } @Test(dataProvider = "populated") public void iterator_removal(Queue<Integer> queue) { Iterator<Integer> it = queue.iterator(); it.next(); it.remove(); } @Test(dataProvider = "populated") public void iterator_removal_toEmpty(Queue<Integer> queue) { for (Iterator<Integer> it = queue.iterator(); it.hasNext();) { it.next(); it.remove(); } assertThat(queue, is(deeplyEmpty())); } /* ---------------- toArray -------------- */ @Test(dataProvider = "empty,singleton,populated") public void toArray(Queue<Integer> queue) { Object[] expect = new ArrayList<>(queue).toArray(); Object[] actual = queue.toArray(); assertThat(actual, queue.isEmpty() ? emptyArray() : arrayContaining(expect)); } @Test(dataProvider = "empty,singleton,populated") public void toTypedArray(Queue<Integer> queue) { Integer[] expect = new ArrayList<>(queue).toArray(new Integer[] {}); Integer[] actual = queue.toArray(new Integer[] {}); assertThat(actual, queue.isEmpty() ? emptyArray() : arrayContaining(expect)); } /* ---------------- toString -------------- */ @Test(dataProvider = "empty,singleton,populated") public void toString(Queue<Integer> queue) { List<Integer> list = new ArrayList<>(); populate(list, queue.size()); assertThat(queue, hasToString(list.toString())); } /* ---------------- Serialization -------------- */ @Test(dataProvider = "empty,singleton,populated") public void serializable(Queue<Integer> queue) { Queue<Integer> copy = SerializableTester.reserialize(queue); assertThat(String.format("%nExpected: %s%n but: %s", queue, copy), elementsEqual(queue.iterator(), copy.iterator())); } /* ---------------- Concurrency -------------- */ @Test(dataProvider = "empty") public void oneProducer_oneConsumer(Queue<Integer> queue) { AtomicInteger started = new AtomicInteger(); AtomicInteger finished = new AtomicInteger(); ConcurrentTestHarness.execute(() -> { started.incrementAndGet(); Awaits.await().untilAtomic(started, is(2)); for (int i = 0; i < PRODUCE; i++) { queue.add(i); } finished.incrementAndGet(); }); ConcurrentTestHarness.execute(() -> { started.incrementAndGet(); Awaits.await().untilAtomic(started, is(2)); for (int i = 0; i < PRODUCE; i++) { while (queue.poll() == null) {} } finished.incrementAndGet(); }); Awaits.await().untilAtomic(finished, is(2)); assertThat(queue, is(deeplyEmpty())); } @Test(dataProvider = "empty") public void manyProducers_noConsumer(Queue<Integer> queue) { ConcurrentTestHarness.timeTasks(NUM_PRODUCERS, () -> { for (int i = 0; i < PRODUCE; i++) { queue.add(i); } }); assertThat(queue, hasSize(NUM_PRODUCERS * PRODUCE)); assertThat(queue.size(), is(equalTo(Iterables.size(queue)))); } @Test(dataProvider = "empty") public void manyProducers_oneConsumer(Queue<Integer> queue) { AtomicInteger started = new AtomicInteger(); AtomicInteger finished = new AtomicInteger(); ConcurrentTestHarness.execute(() -> { started.incrementAndGet(); Awaits.await().untilAtomic(started, is(NUM_PRODUCERS + 1)); for (int i = 0; i < (NUM_PRODUCERS * PRODUCE); i++) { while (queue.poll() == null) {} } finished.incrementAndGet(); }); ConcurrentTestHarness.timeTasks(NUM_PRODUCERS, () -> { started.incrementAndGet(); Awaits.await().untilAtomic(started, is(NUM_PRODUCERS + 1)); for (int i = 0; i < PRODUCE; i++) { queue.add(i); } finished.incrementAndGet(); }); Awaits.await().untilAtomic(finished, is(NUM_PRODUCERS + 1)); assertThat(queue, is(deeplyEmpty())); } /* ---------------- Queue providers -------------- */ @DataProvider(name = "empty") public Object[][] providesEmpty() { return new Object[][] {{ makePopulated(0, true) }, { makePopulated(0, false) }}; } @DataProvider(name = "singleton") public Object[][] providesSingleton() { return new Object[][] {{ makePopulated(1, true) }, { makePopulated(1, false) }}; } @DataProvider(name = "populated") public Object[][] providesPopulated() { return new Object[][] { { makePopulated(POPULATED_SIZE, true) }, { makePopulated(POPULATED_SIZE, false) }}; } @DataProvider(name = "singleton,populated") public Object[][] providesSingletonAndPopulated() { return new Object[][] { { makePopulated(1, true) }, { makePopulated(1, false) }, { makePopulated(POPULATED_SIZE, true) }, { makePopulated(POPULATED_SIZE, false) }}; } @DataProvider(name = "empty,singleton,populated") public Object[][] providesEmptyAndSingletonAndPopulated() { return new Object[][] { { makePopulated(0, true) }, { makePopulated(0, false) }, { makePopulated(1, true) }, { makePopulated(1, false) }, { makePopulated(POPULATED_SIZE, true) }, { makePopulated(POPULATED_SIZE, false) }}; } static SingleConsumerQueue<Integer> makePopulated(int size, boolean optimistic) { SingleConsumerQueue<Integer> queue = optimistic ? SingleConsumerQueue.optimistic() : SingleConsumerQueue.linearizable(); populate(queue, size); return queue; } static void populate(Collection<Integer> collection, int start) { for (int i = 0; i < start; i++) { collection.add(i); } } /** A listener that validates the internal structure after a successful test execution. */ public static final class ValidatingQueueListener implements IInvokedMethodListener { @Override public void beforeInvocation(IInvokedMethod method, ITestResult testResult) {} @Override public void afterInvocation(IInvokedMethod method, ITestResult testResult) { try { if (testResult.isSuccess()) { for (Object param : testResult.getParameters()) { if (param instanceof SingleConsumerQueue<?>) { assertThat((SingleConsumerQueue<?>) param, is(validate())); } } } } catch (AssertionError caught) { testResult.setStatus(ITestResult.FAILURE); testResult.setThrowable(caught); } finally { cleanUp(testResult); } } } /** Free memory by clearing unused resources after test execution. */ static void cleanUp(ITestResult testResult) { Object[] params = testResult.getParameters(); for (int i = 0; i < params.length; i++) { Object param = params[i]; if ((param instanceof SingleConsumerQueue<?>)) { boolean linearizable = (((SingleConsumerQueue<?>) param).factory.apply(null) instanceof LinearizableNode<?>); params[i] = param.getClass().getSimpleName() + "_" + (linearizable ? "linearizable" : "optimistic"); } else { params[i] = Objects.toString(param); } } } }
package hex.svd; import hex.DataInfo; import hex.SplitFrame; import hex.svd.SVDModel.SVDParameters; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.junit.Ignore; import water.DKV; import water.Key; import water.Scope; import water.TestUtil; import water.fvec.Frame; import water.util.FrameUtils; import water.util.Log; import java.util.Arrays; import java.util.concurrent.ExecutionException; public class SVDTest extends TestUtil { public static final double TOLERANCE = 1e-6; @BeforeClass public static void setup() { stall_till_cloudsize(1); } @Test public void testArrests() throws InterruptedException, ExecutionException { // Expected right singular values and vectors double[] d_expected = new double[] {1419.06139510, 194.82584611, 45.66133763, 18.06955662}; double[][] v_expected = ard(ard(-0.04239181, 0.01616262, -0.06588426, 0.99679535), ard(-0.94395706, 0.32068580, 0.06655170, -0.04094568), ard(-0.30842767, -0.93845891, 0.15496743, 0.01234261), ard(-0.10963744, -0.12725666, -0.98347101, -0.06760284)); SVDModel model = null; Frame train = null; try { train = parse_test_file(Key.make("arrests.hex"), "smalldata/pca_test/USArrests.csv"); SVDModel.SVDParameters parms = new SVDModel.SVDParameters(); parms._train = train._key; parms._nv = 4; parms._seed = 1234; parms._only_v = false; parms._transform = DataInfo.TransformType.NONE; parms._svd_method = SVDParameters.Method.GramSVD; parms._save_v_frame = false; SVD job = new SVD(parms); try { model = job.trainModel().get(); TestUtil.checkEigvec(v_expected, model._output._v, TOLERANCE); Assert.assertArrayEquals(d_expected, model._output._d, TOLERANCE); } catch (Throwable t) { t.printStackTrace(); throw new RuntimeException(t); } finally { job.remove(); } } catch (Throwable t) { t.printStackTrace(); throw new RuntimeException(t); } finally { if (train != null) train.delete(); if (model != null) model.delete(); } } @Test public void testArrestsOnlyV() throws InterruptedException, ExecutionException { // Expected right singular vectors double[][] svec = ard(ard(-0.04239181, 0.01616262, -0.06588426, 0.99679535), ard(-0.94395706, 0.32068580, 0.06655170, -0.04094568), ard(-0.30842767, -0.93845891, 0.15496743, 0.01234261), ard(-0.10963744, -0.12725666, -0.98347101, -0.06760284)); SVDModel model = null; Frame train = null; try { train = parse_test_file(Key.make("arrests.hex"), "smalldata/pca_test/USArrests.csv"); SVDModel.SVDParameters parms = new SVDModel.SVDParameters(); parms._train = train._key; parms._nv = 4; parms._seed = 1234; parms._only_v = true; parms._transform = DataInfo.TransformType.NONE; parms._svd_method = SVDParameters.Method.Power; parms._save_v_frame = false; SVD job = new SVD(parms); try { model = job.trainModel().get(); TestUtil.checkEigvec(svec, model._output._v, TOLERANCE); assert model._output._d == null; } catch (Throwable t) { t.printStackTrace(); throw new RuntimeException(t); } finally { job.remove(); } } catch (Throwable t) { t.printStackTrace(); throw new RuntimeException(t); } finally { if (train != null) train.delete(); if (model != null) model.delete(); } } @Test public void testArrestsScoring() throws InterruptedException, ExecutionException { double[] stddev = new double[] {202.7230564, 27.8322637, 6.5230482, 2.5813652}; double[][] eigvec = ard(ard(-0.04239181, 0.01616262, -0.06588426, 0.99679535), ard(-0.94395706, 0.32068580, 0.06655170, -0.04094568), ard(-0.30842767, -0.93845891, 0.15496743, 0.01234261), ard(-0.10963744, -0.12725666, -0.98347101, -0.06760284)); SVD job = null; SVDModel model = null; Frame train = null, score = null, scoreR = null; try { train = parse_test_file(Key.make("arrests.hex"), "smalldata/pca_test/USArrests.csv"); SVDModel.SVDParameters parms = new SVDModel.SVDParameters(); parms._train = train._key; parms._nv = 4; parms._transform = DataInfo.TransformType.NONE; parms._svd_method = SVDParameters.Method.Power; parms._only_v = false; parms._keep_u = false; parms._save_v_frame = false; try { job = new SVD(parms); model = job.trainModel().get(); boolean[] flippedEig = TestUtil.checkEigvec(eigvec, model._output._v, TOLERANCE); score = model.score(train); scoreR = parse_test_file(Key.make("scoreR.hex"), "smalldata/pca_test/USArrests_PCAscore.csv"); TestUtil.checkProjection(scoreR, score, TOLERANCE, flippedEig); // Flipped cols must match those from eigenvectors } catch (Throwable t) { t.printStackTrace(); throw new RuntimeException(t); } finally { if (job != null) job.remove(); } } catch (Throwable t) { t.printStackTrace(); throw new RuntimeException(t); } finally { if (train != null) train.delete(); if (score != null) score.delete(); if (scoreR != null) scoreR.delete(); if (model != null) model.delete(); } } // TODO: This fails GramSVD since JAMA can't handle NaNs in input matrix @Test @Ignore public void testArrestsMissing() throws InterruptedException, ExecutionException { SVDModel model = null; SVDParameters parms = null; Frame train = null; long seed = 1234; for (double missing_fraction : new double[]{0, 0.1, 0.25, 0.5, 0.75, 0.9}) { try { Scope.enter(); train = parse_test_file(Key.make("arrests.hex"), "smalldata/pca_test/USArrests.csv"); // Add missing values to the training data if (missing_fraction > 0) { Frame frtmp = new Frame(Key.make(), train.names(), train.vecs()); DKV.put(frtmp._key, frtmp); // Need to put the frame (to be modified) into DKV for MissingInserter to pick up FrameUtils.MissingInserter j = new FrameUtils.MissingInserter(frtmp._key, seed, missing_fraction); j.execImpl(); j.get(); // MissingInserter is non-blocking, must block here explicitly DKV.remove(frtmp._key); // Delete the frame header (not the data) } parms = new SVDParameters(); parms._train = train._key; parms._nv = train.numCols(); parms._transform = DataInfo.TransformType.STANDARDIZE; parms._svd_method = SVDParameters.Method.Power; parms._max_iterations = 1000; parms._seed = seed; parms._save_v_frame = false; SVD job = new SVD(parms); try { model = job.trainModel().get(); Log.info(100 * missing_fraction + "% missing values: Singular values = " + Arrays.toString(model._output._d)); } catch (Throwable t) { t.printStackTrace(); throw new RuntimeException(t); } finally { job.remove(); } Scope.exit(); } catch(Throwable t) { t.printStackTrace(); throw new RuntimeException(t); } finally { if (train != null) train.delete(); if (model != null) model.delete(); } } } @Test public void testArrestsProb() throws InterruptedException, ExecutionException { // Expected right singular values and vectors double[] d_expected = new double[] {11.024148, 6.964086, 4.179904, 2.915146}; double[][] v_expected = ard(ard(-0.5358995, 0.4181809, -0.3412327, 0.64922780), ard(-0.5831836, 0.1879856, -0.2681484, -0.74340748), ard(-0.2781909, -0.8728062, -0.3780158, 0.13387773), ard(-0.5434321, -0.1673186, 0.8177779, 0.08902432)); SVDModel model = null; Frame train = null, score = null; try { train = parse_test_file(Key.make("arrests.hex"), "smalldata/pca_test/USArrests.csv"); SVDModel.SVDParameters parms = new SVDModel.SVDParameters(); parms._train = train._key; parms._nv = 4; parms._keep_u = true; parms._transform = DataInfo.TransformType.STANDARDIZE; parms._svd_method = SVDParameters.Method.Randomized; parms._max_iterations = 4; parms._save_v_frame = false; SVD job = new SVD(parms); try { model = job.trainModel().get(); Assert.assertArrayEquals(d_expected, model._output._d, TOLERANCE); TestUtil.checkEigvec(v_expected, model._output._v, TOLERANCE); score = model.score(train); } catch (Throwable t) { t.printStackTrace(); throw new RuntimeException(t); } finally { job.remove(); } } catch (Throwable t) { t.printStackTrace(); throw new RuntimeException(t); } finally { if (train != null) train.delete(); if (score != null) score.delete(); if (model != null) model.delete(); } } @Test public void testIrisGram() throws InterruptedException, ExecutionException { // Expected right singular values and vectors double[] d_expected = new double[] {96.2090445, 19.0425654, 7.2250378, 3.1636131, 1.8816739, 1.1451307, 0.5820806}; double[][] v_expected = ard(ard(-0.03169051, -0.32305860, 0.185100382, -0.12336685, -0.14867156, 0.75932119, -0.496462912), ard(-0.04289677, 0.04037565, -0.780961964, 0.19727933, 0.07251338, -0.12216945, -0.572298338), ard(-0.05019689, 0.16836717, 0.551432201, -0.07122329, 0.08454116, -0.48327010, -0.647522462), ard(-0.74915107, -0.26629420, -0.101102186, -0.48920057, 0.32458460, -0.09176909, 0.067412858), ard(-0.37877011, -0.50636060, 0.142219195, 0.69081642, -0.26312992, -0.17811871, 0.041411296), ard(-0.51177078, 0.65945159, -0.005079934, 0.04881900, -0.52128288, 0.17038367, 0.006223427), ard(-0.16742875, 0.32166036, 0.145893901, 0.47102115, 0.72052968, 0.32523458, 0.020389463)); SVDModel model = null; Frame train = null; try { train = parse_test_file(Key.make("iris.hex"), "smalldata/iris/iris_wheader.csv"); SVDModel.SVDParameters parms = new SVDModel.SVDParameters(); parms._train = train._key; parms._nv = 7; parms._use_all_factor_levels = true; parms._keep_u = true; parms._transform = DataInfo.TransformType.NONE; parms._svd_method = SVDParameters.Method.GramSVD; parms._save_v_frame = false; SVD job = new SVD(parms); try { model = job.trainModel().get(); TestUtil.checkEigvec(v_expected, model._output._v, TOLERANCE); Assert.assertArrayEquals(d_expected, model._output._d, TOLERANCE); } catch (Throwable t) { t.printStackTrace(); throw new RuntimeException(t); } finally { job.remove(); } } catch(Throwable t) { t.printStackTrace(); throw new RuntimeException(t); } finally { if (train != null) train.delete(); if (model != null) model.delete(); } } @Test public void testIrisSVDScore() throws InterruptedException, ExecutionException { // Expected right singular values and vectors double[] d_expected = new double[] {96.2090445, 19.0425654, 7.2250378, 3.1636131, 1.8816739, 1.1451307, 0.5820806}; double[][] v_expected = ard(ard(-0.03169051, -0.32305860, 0.185100382, -0.12336685, -0.14867156, 0.75932119, -0.496462912), ard(-0.04289677, 0.04037565, -0.780961964, 0.19727933, 0.07251338, -0.12216945, -0.572298338), ard(-0.05019689, 0.16836717, 0.551432201, -0.07122329, 0.08454116, -0.48327010, -0.647522462), ard(-0.74915107, -0.26629420, -0.101102186, -0.48920057, 0.32458460, -0.09176909, 0.067412858), ard(-0.37877011, -0.50636060, 0.142219195, 0.69081642, -0.26312992, -0.17811871, 0.041411296), ard(-0.51177078, 0.65945159, -0.005079934, 0.04881900, -0.52128288, 0.17038367, 0.006223427), ard(-0.16742875, 0.32166036, 0.145893901, 0.47102115, 0.72052968, 0.32523458, 0.020389463)); SVDModel model = null; Frame train = null, score = null; try { train = parse_test_file(Key.make("iris.hex"), "smalldata/iris/iris_wheader.csv"); SVDModel.SVDParameters parms = new SVDModel.SVDParameters(); parms._train = train._key; parms._nv = 7; parms._use_all_factor_levels = true; parms._transform = DataInfo.TransformType.NONE; parms._svd_method = SVDParameters.Method.Power; parms._save_v_frame = false; SVD job = new SVD(parms); try { model = job.trainModel().get(); TestUtil.checkEigvec(v_expected, model._output._v, TOLERANCE); Assert.assertArrayEquals(d_expected, model._output._d, TOLERANCE); score = model.score(train); } catch (Throwable t) { t.printStackTrace(); throw new RuntimeException(t); } finally { job.remove(); } } catch (Throwable t) { t.printStackTrace(); throw new RuntimeException(t); } finally { if (train != null) train.delete(); if (score != null) score.delete(); if (model != null) model.delete(); } } @Test public void testIrisSplitScoring() throws InterruptedException, ExecutionException { SVD job = null; SVDModel model = null; Frame fr = null, fr2= null; Frame tr = null, te= null; try { fr = parse_test_file("smalldata/iris/iris_wheader.csv"); SplitFrame sf = new SplitFrame(); sf.dataset = fr; sf.ratios = new double[] { 0.5, 0.5 }; sf.destination_frames = new Key[] { Key.make("train.hex"), Key.make("test.hex")}; // Invoke the job sf.exec().get(); Key[] ksplits = sf.destination_frames; tr = DKV.get(ksplits[0]).get(); te = DKV.get(ksplits[1]).get(); SVDModel.SVDParameters parms = new SVDModel.SVDParameters(); parms._train = ksplits[0]; parms._valid = ksplits[1]; parms._nv = 4; parms._max_iterations = 1000; parms._svd_method = SVDParameters.Method.Power; parms._save_v_frame = false; try { job = new SVD(parms); model = job.trainModel().get(); } finally { if (job != null) job.remove(); } // Done building model; produce a score column with cluster choices fr2 = model.score(te); Assert.assertTrue(model.testJavaScoring(te, fr2, 1e-5)); } catch (Throwable t) { t.printStackTrace(); throw new RuntimeException(t); } finally { if( fr != null ) fr.delete(); if( fr2 != null ) fr2.delete(); if( tr != null ) tr .delete(); if( te != null ) te .delete(); if (model != null) model.delete(); } } @Test public void testIrisProb() throws InterruptedException, ExecutionException { // Expected right singular values and vectors double[] d_expected = new double[] {96.2090445, 19.0425654, 7.2250378, 3.1636131, 1.8816739, 1.1451307, 0.5820806}; double[][] v_expected = ard(ard(-0.03169051, -0.32305860, 0.185100382, -0.12336685, -0.14867156, 0.75932119, -0.496462912), ard(-0.04289677, 0.04037565, -0.780961964, 0.19727933, 0.07251338, -0.12216945, -0.572298338), ard(-0.05019689, 0.16836717, 0.551432201, -0.07122329, 0.08454116, -0.48327010, -0.647522462), ard(-0.74915107, -0.26629420, -0.101102186, -0.48920057, 0.32458460, -0.09176909, 0.067412858), ard(-0.37877011, -0.50636060, 0.142219195, 0.69081642, -0.26312992, -0.17811871, 0.041411296), ard(-0.51177078, 0.65945159, -0.005079934, 0.04881900, -0.52128288, 0.17038367, 0.006223427), ard(-0.16742875, 0.32166036, 0.145893901, 0.47102115, 0.72052968, 0.32523458, 0.020389463)); SVDModel model = null; Frame train = null, score = null; try { train = parse_test_file(Key.make("iris.hex"), "smalldata/iris/iris_wheader.csv"); SVDModel.SVDParameters parms = new SVDModel.SVDParameters(); parms._train = train._key; parms._nv = 7; parms._use_all_factor_levels = true; parms._keep_u = false; parms._transform = DataInfo.TransformType.NONE; parms._svd_method = SVDParameters.Method.Randomized; parms._max_iterations = 7; parms._save_v_frame = false; SVD job = new SVD(parms); try { model = job.trainModel().get(); TestUtil.checkEigvec(v_expected, model._output._v, TOLERANCE); Assert.assertArrayEquals(d_expected, model._output._d, TOLERANCE); score = model.score(train); } catch (Throwable t) { t.printStackTrace(); throw new RuntimeException(t); } finally { job.remove(); } } catch (Throwable t) { t.printStackTrace(); throw new RuntimeException(t); } finally { if (train != null) train.delete(); if (score != null) score.delete(); if (model != null) model.delete(); } } @Test public void testBenignProb() throws InterruptedException, ExecutionException { // Expected singular values double[] d_expected = new double[] {450.809529, 212.934956, 155.608260, 64.528823, 52.334624}; SVDModel model = null; Frame train = null, score = null; try { train = parse_test_file(Key.make("benign.hex"), "smalldata/logreg/benign.csv"); SVDModel.SVDParameters parms = new SVDModel.SVDParameters(); parms._train = train._key; parms._nv = 5; parms._keep_u = true; parms._transform = DataInfo.TransformType.DEMEAN; parms._svd_method = SVDParameters.Method.Randomized; parms._impute_missing = true; parms._max_iterations = 20; parms._save_v_frame = false; SVD job = new SVD(parms); try { model = job.trainModel().get(); // Assert.assertArrayEquals(d_expected, model._output._d, 1e-4); score = model.score(train); } catch (Throwable t) { t.printStackTrace(); throw new RuntimeException(t); } finally { job.remove(); } } catch (Throwable t) { t.printStackTrace(); throw new RuntimeException(t); } finally { if (train != null) train.delete(); if (score != null) score.delete(); if (model != null) model.delete(); } } @Test public void testProstateMissingProb() throws InterruptedException, ExecutionException { long seed = 1234; Frame train = null, score = null; SVDModel model = null; try { train = parse_test_file(Key.make("prostate.hex"), "smalldata/prostate/prostate_cat.csv"); // Add missing values to the training data Frame frtmp = new Frame(Key.make(), train.names(), train.vecs()); DKV.put(frtmp._key, frtmp); // Need to put the frame (to be modified) into DKV for MissingInserter to pick up FrameUtils.MissingInserter j = new FrameUtils.MissingInserter(frtmp._key, seed, 0.25); j.execImpl(); j.get(); // MissingInserter is non-blocking, must block here explicitly DKV.remove(frtmp._key); // Delete the frame header (not the data) SVDParameters parms = new SVDParameters(); parms._train = train._key; parms._nv = 8; parms._only_v = false; parms._keep_u = true; parms._svd_method = SVDParameters.Method.Randomized; parms._impute_missing = true; parms._max_iterations = 20; parms._save_v_frame = false; SVD job = new SVD(parms); try { model = job.trainModel().get(); score = model.score(train); } catch (Throwable t) { t.printStackTrace(); throw new RuntimeException(t); } finally { job.remove(); } } catch(Throwable t) { t.printStackTrace(); throw new RuntimeException(t); } finally { if (train != null) train.delete(); if (score != null) score.delete(); if (model != null) model.delete(); } } @Test public void testIVVSum() { double[][] res = ard(ard(1, 2, 3), ard(2, 5, 6), ard(3, 6, 9)); double[] v = new double[] {7, 8, 9}; double[][] xvv = ard(ard(-48, -54, -60), ard(-54, -59, -66), ard(-60, -66, -72)); SVD.updateIVVSum(res, v); Assert.assertArrayEquals(xvv, res); } }
/* * Copyright 2013 Google Inc. * Copyright 2013-2014 Ronald W Hoffman * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ScripterRon.BitcoinCore; import java.util.HashMap; import java.util.Map; /** * Various constants that define the assembly-like scripting language that forms part of the Bitcoin protocol. */ public class ScriptOpCodes { /** Standard signature types */ public static final int PAY_TO_PUBKEY_HASH = 1; public static final int PAY_TO_PUBKEY = 2; public static final int PAY_TO_SCRIPT_HASH = 3; public static final int PAY_TO_MULTISIG = 4; public static final int PAY_TO_NOBODY = 5; /** Signature hash types */ public static final int SIGHASH_ALL = 1; public static final int SIGHASH_NONE = 2; public static final int SIGHASH_SINGLE = 3; public static final int SIGHASH_ANYONE_CAN_PAY = 128; // Push value public static final int OP_0 = 0x00; public static final int OP_FALSE = OP_0; public static final int OP_PUSHDATA1 = 0x4c; public static final int OP_PUSHDATA2 = 0x4d; public static final int OP_PUSHDATA4 = 0x4e; public static final int OP_1NEGATE = 0x4f; public static final int OP_RESERVED = 0x50; public static final int OP_1 = 0x51; public static final int OP_TRUE = OP_1; public static final int OP_2 = 0x52; public static final int OP_3 = 0x53; public static final int OP_4 = 0x54; public static final int OP_5 = 0x55; public static final int OP_6 = 0x56; public static final int OP_7 = 0x57; public static final int OP_8 = 0x58; public static final int OP_9 = 0x59; public static final int OP_10 = 0x5a; public static final int OP_11 = 0x5b; public static final int OP_12 = 0x5c; public static final int OP_13 = 0x5d; public static final int OP_14 = 0x5e; public static final int OP_15 = 0x5f; public static final int OP_16 = 0x60; // Control public static final int OP_NOP = 0x61; public static final int OP_VER = 0x62; public static final int OP_IF = 0x63; public static final int OP_NOTIF = 0x64; public static final int OP_VERIF = 0x65; public static final int OP_VERNOTIF = 0x66; public static final int OP_ELSE = 0x67; public static final int OP_ENDIF = 0x68; public static final int OP_VERIFY = 0x69; public static final int OP_RETURN = 0x6a; // Stack ops public static final int OP_TOALTSTACK = 0x6b; public static final int OP_FROMALTSTACK = 0x6c; public static final int OP_2DROP = 0x6d; public static final int OP_2DUP = 0x6e; public static final int OP_3DUP = 0x6f; public static final int OP_2OVER = 0x70; public static final int OP_2ROT = 0x71; public static final int OP_2SWAP = 0x72; public static final int OP_IFDUP = 0x73; public static final int OP_DEPTH = 0x74; public static final int OP_DROP = 0x75; public static final int OP_DUP = 0x76; public static final int OP_NIP = 0x77; public static final int OP_OVER = 0x78; public static final int OP_PICK = 0x79; public static final int OP_ROLL = 0x7a; public static final int OP_ROT = 0x7b; public static final int OP_SWAP = 0x7c; public static final int OP_TUCK = 0x7d; // Splice ops public static final int OP_CAT = 0x7e; public static final int OP_SUBSTR = 0x7f; public static final int OP_LEFT = 0x80; public static final int OP_RIGHT = 0x81; public static final int OP_SIZE = 0x82; // Bit logic public static final int OP_INVERT = 0x83; public static final int OP_AND = 0x84; public static final int OP_OR = 0x85; public static final int OP_XOR = 0x86; public static final int OP_EQUAL = 0x87; public static final int OP_EQUALVERIFY = 0x88; public static final int OP_RESERVED1 = 0x89; public static final int OP_RESERVED2 = 0x8a; // Numeric public static final int OP_1ADD = 0x8b; public static final int OP_1SUB = 0x8c; public static final int OP_2MUL = 0x8d; public static final int OP_2DIV = 0x8e; public static final int OP_NEGATE = 0x8f; public static final int OP_ABS = 0x90; public static final int OP_NOT = 0x91; public static final int OP_0NOTEQUAL = 0x92; public static final int OP_ADD = 0x93; public static final int OP_SUB = 0x94; public static final int OP_MUL = 0x95; public static final int OP_DIV = 0x96; public static final int OP_MOD = 0x97; public static final int OP_LSHIFT = 0x98; public static final int OP_RSHIFT = 0x99; public static final int OP_BOOLAND = 0x9a; public static final int OP_BOOLOR = 0x9b; public static final int OP_NUMEQUAL = 0x9c; public static final int OP_NUMEQUALVERIFY = 0x9d; public static final int OP_NUMNOTEQUAL = 0x9e; public static final int OP_LESSTHAN = 0x9f; public static final int OP_GREATERTHAN = 0xa0; public static final int OP_LESSTHANOREQUAL = 0xa1; public static final int OP_GREATERTHANOREQUAL = 0xa2; public static final int OP_MIN = 0xa3; public static final int OP_MAX = 0xa4; public static final int OP_WITHIN = 0xa5; // Crypto public static final int OP_RIPEMD160 = 0xa6; public static final int OP_SHA1 = 0xa7; public static final int OP_SHA256 = 0xa8; public static final int OP_HASH160 = 0xa9; public static final int OP_HASH256 = 0xaa; public static final int OP_CODESEPARATOR = 0xab; public static final int OP_CHECKSIG = 0xac; public static final int OP_CHECKSIGVERIFY = 0xad; public static final int OP_CHECKMULTISIG = 0xae; public static final int OP_CHECKMULTISIGVERIFY = 0xaf; // Expansion public static final int OP_NOP1 = 0xb0; public static final int OP_NOP2 = 0xb1; public static final int OP_NOP3 = 0xb2; public static final int OP_NOP4 = 0xb3; public static final int OP_NOP5 = 0xb4; public static final int OP_NOP6 = 0xb5; public static final int OP_NOP7 = 0xb6; public static final int OP_NOP8 = 0xb7; public static final int OP_NOP9 = 0xb8; public static final int OP_NOP10 = 0xb9; public static final int OP_INVALIDOPCODE = 0xff; private static final Map<Integer, String> opCodeMap = new HashMap<>(125); static { opCodeMap.put(OP_0, "0"); opCodeMap.put(OP_PUSHDATA1, "PUSHDATA1"); opCodeMap.put(OP_PUSHDATA2, "PUSHDATA2"); opCodeMap.put(OP_PUSHDATA4, "PUSHDATA4"); opCodeMap.put(OP_1NEGATE, "1NEGATE"); opCodeMap.put(OP_RESERVED, "RESERVED"); opCodeMap.put(OP_1, "1"); opCodeMap.put(OP_2, "2"); opCodeMap.put(OP_3, "3"); opCodeMap.put(OP_4, "4"); opCodeMap.put(OP_5, "5"); opCodeMap.put(OP_6, "6"); opCodeMap.put(OP_7, "7"); opCodeMap.put(OP_8, "8"); opCodeMap.put(OP_9, "9"); opCodeMap.put(OP_10, "10"); opCodeMap.put(OP_11, "11"); opCodeMap.put(OP_12, "12"); opCodeMap.put(OP_13, "13"); opCodeMap.put(OP_14, "14"); opCodeMap.put(OP_15, "15"); opCodeMap.put(OP_16, "16"); opCodeMap.put(OP_NOP, "NOP"); opCodeMap.put(OP_VER, "VER"); opCodeMap.put(OP_IF, "IF"); opCodeMap.put(OP_NOTIF, "NOTIF"); opCodeMap.put(OP_VERIF, "VERIF"); opCodeMap.put(OP_VERNOTIF, "VERNOTIF"); opCodeMap.put(OP_ELSE, "ELSE"); opCodeMap.put(OP_ENDIF, "ENDIF"); opCodeMap.put(OP_VERIFY, "VERIFY"); opCodeMap.put(OP_RETURN, "RETURN"); opCodeMap.put(OP_TOALTSTACK, "TOALTSTACK"); opCodeMap.put(OP_FROMALTSTACK, "FROMALTSTACK"); opCodeMap.put(OP_2DROP, "2DROP"); opCodeMap.put(OP_2DUP, "2DUP"); opCodeMap.put(OP_3DUP, "3DUP"); opCodeMap.put(OP_2OVER, "2OVER"); opCodeMap.put(OP_2ROT, "2ROT"); opCodeMap.put(OP_2SWAP, "2SWAP"); opCodeMap.put(OP_IFDUP, "IFDUP"); opCodeMap.put(OP_DEPTH, "DEPTH"); opCodeMap.put(OP_DROP, "DROP"); opCodeMap.put(OP_DUP, "DUP"); opCodeMap.put(OP_NIP, "NIP"); opCodeMap.put(OP_OVER, "OVER"); opCodeMap.put(OP_PICK, "PICK"); opCodeMap.put(OP_ROLL, "ROLL"); opCodeMap.put(OP_ROT, "ROT"); opCodeMap.put(OP_SWAP, "SWAP"); opCodeMap.put(OP_TUCK, "TUCK"); opCodeMap.put(OP_CAT, "CAT"); opCodeMap.put(OP_SUBSTR, "SUBSTR"); opCodeMap.put(OP_LEFT, "LEFT"); opCodeMap.put(OP_RIGHT, "RIGHT"); opCodeMap.put(OP_SIZE, "SIZE"); opCodeMap.put(OP_INVERT, "INVERT"); opCodeMap.put(OP_AND, "AND"); opCodeMap.put(OP_OR, "OR"); opCodeMap.put(OP_XOR, "XOR"); opCodeMap.put(OP_EQUAL, "EQUAL"); opCodeMap.put(OP_EQUALVERIFY, "EQUALVERIFY"); opCodeMap.put(OP_RESERVED1, "RESERVED1"); opCodeMap.put(OP_RESERVED2, "RESERVED2"); opCodeMap.put(OP_1ADD, "1ADD"); opCodeMap.put(OP_1SUB, "1SUB"); opCodeMap.put(OP_2MUL, "2MUL"); opCodeMap.put(OP_2DIV, "2DIV"); opCodeMap.put(OP_NEGATE, "NEGATE"); opCodeMap.put(OP_ABS, "ABS"); opCodeMap.put(OP_NOT, "NOT"); opCodeMap.put(OP_0NOTEQUAL, "0NOTEQUAL"); opCodeMap.put(OP_ADD, "ADD"); opCodeMap.put(OP_SUB, "SUB"); opCodeMap.put(OP_MUL, "MUL"); opCodeMap.put(OP_DIV, "DIV"); opCodeMap.put(OP_MOD, "MOD"); opCodeMap.put(OP_LSHIFT, "LSHIFT"); opCodeMap.put(OP_RSHIFT, "RSHIFT"); opCodeMap.put(OP_BOOLAND, "BOOLAND"); opCodeMap.put(OP_BOOLOR, "BOOLOR"); opCodeMap.put(OP_NUMEQUAL, "NUMEQUAL"); opCodeMap.put(OP_NUMEQUALVERIFY, "NUMEQUALVERIFY"); opCodeMap.put(OP_NUMNOTEQUAL, "NUMNOTEQUAL"); opCodeMap.put(OP_LESSTHAN, "LESSTHAN"); opCodeMap.put(OP_GREATERTHAN, "GREATERTHAN"); opCodeMap.put(OP_LESSTHANOREQUAL, "LESSTHANOREQUAL"); opCodeMap.put(OP_GREATERTHANOREQUAL, "GREATERTHANOREQUAL"); opCodeMap.put(OP_MIN, "MIN"); opCodeMap.put(OP_MAX, "MAX"); opCodeMap.put(OP_WITHIN, "WITHIN"); opCodeMap.put(OP_RIPEMD160, "RIPEMD160"); opCodeMap.put(OP_SHA1, "SHA1"); opCodeMap.put(OP_SHA256, "SHA256"); opCodeMap.put(OP_HASH160, "HASH160"); opCodeMap.put(OP_HASH256, "HASH256"); opCodeMap.put(OP_CODESEPARATOR, "CODESEPARATOR"); opCodeMap.put(OP_CHECKSIG, "CHECKSIG"); opCodeMap.put(OP_CHECKSIGVERIFY, "CHECKSIGVERIFY"); opCodeMap.put(OP_CHECKMULTISIG, "CHECKMULTISIG"); opCodeMap.put(OP_CHECKMULTISIGVERIFY, "CHECKMULTISIGVERIFY"); opCodeMap.put(OP_NOP1, "NOP1"); opCodeMap.put(OP_NOP2, "NOP2"); opCodeMap.put(OP_NOP3, "NOP3"); opCodeMap.put(OP_NOP4, "NOP4"); opCodeMap.put(OP_NOP5, "NOP5"); opCodeMap.put(OP_NOP6, "NOP6"); opCodeMap.put(OP_NOP7, "NOP7"); opCodeMap.put(OP_NOP8, "NOP8"); opCodeMap.put(OP_NOP9, "NOP9"); opCodeMap.put(OP_NOP10, "NOP10"); } /** * Converts the given OpCode into a string (eg "0", "PUSHDATA", or "NON_OP(10)") * * @param opcode Opcode * @return String result */ public static String getOpCodeName(int opcode) { if (opCodeMap.containsKey(opcode)) return opCodeMap.get(opcode); return "NON_OP(" + opcode + ")"; } }
package com.diegodevelopero.PicoyPlacaReminder; import java.util.Calendar; import android.app.Activity; import android.app.AlertDialog; import android.content.ContentValues; import android.content.DialogInterface; import android.content.Intent; import android.database.Cursor; import android.os.Bundle; import android.view.View; import android.view.View.OnClickListener; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.Button; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ListView; import android.widget.SimpleCursorAdapter; import android.widget.SimpleCursorAdapter.ViewBinder; import android.widget.TextView; import com.google.ads.AdRequest; import com.google.ads.AdSize; import com.google.ads.AdView; import com.google.android.apps.analytics.GoogleAnalyticsTracker; public class CurrentCars extends Activity{ private Button addCar; private Button btnCiudad; private DbAdapter myDbAdapter; private DbAdapter myDbCity; private Cursor allCarsCursor; private ListView listContent; private int flag = 0; private Cursor picoyplaca; private String number=""; private WidgetUpdate myWidgetUpdate; private int numberOfCars; private TextView numberCurrentCars; private Cursor currentCityCursor; private int currentCityId; private String currentCityName; private WidgetUpdate widgetUpdate; GoogleAnalyticsTracker tracker; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.current_cars); // Lookup R.layout.main LinearLayout layout = (LinearLayout) findViewById(R.id.current_cars); // Create the adView // Please replace MY_BANNER_UNIT_ID with your AdMob Publisher ID AdView adView = new AdView(this, AdSize.BANNER, "a14f6480b45f38a"); // Add the adView to it layout.addView(adView); // Initiate a generic request to load it with an ad AdRequest request = new AdRequest(); //request.addTestDevice("0D22E81D0C61259DD8B6D26C156AA4B8"); //request.setTesting(true); adView.loadAd(request); updateCurrentList(); tracker = GoogleAnalyticsTracker.getInstance(); tracker.startNewSession("UA-3974370-12", 30, this); tracker.trackPageView("/CurrentCars"); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (resultCode == RESULT_OK){ myDbAdapter.close(); updateCurrentList(); //Update the widget myWidgetUpdate = new WidgetUpdate(); myWidgetUpdate.updateWidget(CurrentCars.this, true); } } private void updateCurrentList(){ listContent = (ListView)findViewById(R.id.contentlist); numberCurrentCars = (TextView) findViewById(R.id.number_curren_cars); //Instance of the object DBHelper/ myDbAdapter = new DbAdapter(this); allCarsCursor = myDbAdapter.getAllCars(); startManagingCursor(allCarsCursor); numberOfCars = allCarsCursor.getCount(); numberCurrentCars.setText("Mostrando "+numberOfCars+" vehiculos"); final Calendar c = Calendar.getInstance(); int day = c.get(Calendar.DAY_OF_WEEK); //Get the currentCity currentCityCursor = myDbAdapter.getCurrentCity(); currentCityCursor.moveToFirst(); currentCityId = currentCityCursor.getInt(currentCityCursor.getColumnIndex("_id")); currentCityName = currentCityCursor.getString(currentCityCursor.getColumnIndex("name")); currentCityCursor.close(); picoyplaca = myDbAdapter.findPicoyPlacaToday(day, currentCityId); startManagingCursor(picoyplaca); if(picoyplaca.moveToFirst()){ number = picoyplaca.getString(picoyplaca.getColumnIndex("numero")); } //TODO here use constants String[] from = new String[] {"_id", "nombre", "placa", "icon", "icon"}; int[] to = new int[] {R.id.id_entry, R.id.name_entry, R.id.plate_entry, R.id.car_block, R.id.car_entry}; SimpleCursorAdapter mAdapter = new SimpleCursorAdapter(CurrentCars.this, R.layout.current_cars_entry, allCarsCursor, from, to); mAdapter.setViewBinder(new ViewBinder() { @Override public boolean setViewValue(View view, Cursor cursor, int columnIndex) { if(columnIndex == cursor.getColumnIndex("icon")){ Car carObj; int placa = Integer.parseInt(cursor.getString(cursor.getColumnIndex("placa"))); int icon = Integer.parseInt(cursor.getString(cursor.getColumnIndex("icon"))); carObj = new Car(icon, placa); ImageView imgView = (ImageView) view; imgView.setImageResource(carObj.getCarDraw()); if(currentCityId==2){//Bogota if(!number.equals("null")){ boolean found = carObj.hasPicoBogota(placa); if(flag == 0){ if(found){ imgView.setImageResource(R.drawable.stop); }else{ imgView.setImageResource(R.drawable.go); } flag++; }else{ imgView.setImageResource(carObj.getCarDraw()); flag=0; } return true; }else{ if(flag == 0){ imgView.setImageResource(R.drawable.go); flag++; }else{ imgView.setImageResource(carObj.getCarDraw()); flag=0; } return true; } }else{ if(!number.equals("null")){ boolean found = carObj.hasPico(number); if(flag == 0){ if(found){ imgView.setImageResource(R.drawable.stop); }else{ imgView.setImageResource(R.drawable.go); } flag++; }else{ imgView.setImageResource(carObj.getCarDraw()); flag=0; } return true; }else{ if(flag == 0){ imgView.setImageResource(R.drawable.go); flag++; }else{ imgView.setImageResource(carObj.getCarDraw()); flag=0; } return true; } } /*if(!number.equals("null")){ boolean found = carObj.hasPico(number); if(flag == 0){ if(found){ imgView.setImageResource(R.drawable.stop); }else{ imgView.setImageResource(R.drawable.go); } flag++; }else{ imgView.setImageResource(carObj.getCarDraw()); flag=0; } return true; }else{ if(flag == 0){ imgView.setImageResource(R.drawable.go); flag++; }else{ imgView.setImageResource(carObj.getCarDraw()); flag=0; } return true; }*/ } return false; } }); listContent.setAdapter(mAdapter); listContent.setOnItemClickListener(new OnItemClickListener() { @Override public void onItemClick(AdapterView<?> arg0, View arg1, int arg2,long arg3) { TextView selection; selection = (TextView) arg1.findViewById(R.id.id_entry); Intent intent = new Intent(CurrentCars.this, AddCar.class); intent.putExtra("cursorId", Integer.parseInt(selection.getText().toString())); startActivityForResult(intent, 0); } }); addCar = (Button) findViewById(R.id.btnAddCar); addCar.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { tracker.trackEvent("Clicks", "Button", "addCar", 0); startActivityForResult(new Intent(CurrentCars.this, AddCar.class), 0); } }); btnCiudad = (Button) findViewById(R.id.btnciudad); btnCiudad.setText(currentCityName); btnCiudad.setOnClickListener(new OnClickListener() { @Override public void onClick(View arg0) { tracker.trackEvent("Clicks", "Button", "seleccionarCiudad", 0); final CharSequence[] items = {"Barranquilla", "Bogota", "Bucaramanga", "Cali", "Cartagena", "Medellin"}; AlertDialog.Builder builder = new AlertDialog.Builder(CurrentCars.this); builder.setTitle("Seleccione una ciudad"); builder.setItems(items, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int item) { //Save here the new city ContentValues values = new ContentValues(); values.put("activo", 1); tracker.trackEvent("Clicks", "Option", ""+items[item], 0); btnCiudad.setText(items[item]); item++; myDbCity = new DbAdapter(CurrentCars.this); myDbCity.updateCurrentCity(values, item); myDbCity.close(); dialog.dismiss(); //update the widget widgetUpdate = new WidgetUpdate(); widgetUpdate.updateWidget(CurrentCars.this, true); //update the currentList myDbAdapter.close(); updateCurrentList(); } }); AlertDialog alert = builder.create(); alert.show(); } }); } @Override protected void onDestroy() { super.onDestroy(); myDbAdapter.close(); // Stop the tracker when it is no longer needed. tracker.stopSession(); } @Override protected void onStop() { super.onStop(); myDbAdapter.close(); } @Override protected void onRestart() { super.onStart(); myDbAdapter.close(); updateCurrentList(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.spi.security.authorization.accesscontrol; import java.util.ArrayList; import java.util.Collections; import java.util.List; import javax.jcr.Value; import javax.jcr.security.AccessControlEntry; import javax.jcr.security.AccessControlException; import javax.jcr.security.Privilege; import org.apache.jackrabbit.api.security.JackrabbitAccessControlEntry; import org.apache.jackrabbit.api.security.JackrabbitAccessControlList; import org.apache.jackrabbit.oak.namepath.NamePathMapper; import org.apache.jackrabbit.oak.spi.security.authorization.restriction.RestrictionProvider; import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeBits; import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Mockito.when; /** * Test for {@code ImmutableACL} */ public class ImmutableACLTest extends AbstractAccessControlListTest { private Privilege[] testPrivileges; @Before public void before() throws Exception { testPrivileges = privilegesFromNames(PrivilegeConstants.JCR_READ, PrivilegeConstants.JCR_ADD_CHILD_NODES); } private static Privilege[] privilegesFromNames(String... privNames) { Privilege[] p = new Privilege[privNames.length]; for (int i = 0; i < privNames.length; i++) { Privilege privilege = Mockito.mock(Privilege.class); when(privilege.getName()).thenReturn(privNames[i]); p[i] = privilege; } return p; } protected ImmutableACL createACL(@Nullable String jcrPath, @NotNull List<JackrabbitAccessControlEntry> entries, @NotNull NamePathMapper namePathMapper, @NotNull RestrictionProvider restrictionProvider) { String oakPath = (jcrPath == null) ? null : namePathMapper.getOakPath(jcrPath); return new ImmutableACL(oakPath, entries, restrictionProvider, namePathMapper); } private void assertImmutable(JackrabbitAccessControlList acl) throws Exception { String msg = "ACL should be immutable."; try { acl.addAccessControlEntry(testPrincipal, testPrivileges); fail(msg); } catch (AccessControlException e) { // success } try { acl.addEntry(testPrincipal, testPrivileges, true); fail(msg); } catch (AccessControlException e) { // success } try { acl.addEntry(testPrincipal, testPrivileges, false, Collections.<String, Value>emptyMap()); fail(msg); } catch (AccessControlException e) { // success } try { acl.addEntry(testPrincipal, testPrivileges, false, Collections.<String, Value>emptyMap(), Collections.<String, Value[]>emptyMap()); fail(msg); } catch (AccessControlException e) { // success } AccessControlEntry[] entries = acl.getAccessControlEntries(); if (entries.length > 1) { try { acl.orderBefore(entries[0], null); fail(msg); } catch (AccessControlException e) { // success } try { acl.orderBefore(entries[1], entries[0]); fail(msg); } catch (AccessControlException e) { // success } } for (AccessControlEntry ace : entries) { try { acl.removeAccessControlEntry(ace); fail(msg); } catch (AccessControlException e) { // success } } } @Test public void testImmutable() throws Exception { List<JackrabbitAccessControlEntry> entries = new ArrayList(); entries.add(createEntry(true, PrivilegeConstants.JCR_READ, PrivilegeConstants.JCR_ADD_CHILD_NODES)); entries.add(createEntry(false, PrivilegeConstants.JCR_LIFECYCLE_MANAGEMENT)); JackrabbitAccessControlList acl = createACL(entries); assertFalse(acl.isEmpty()); assertEquals(2, acl.size()); assertEquals(getTestPath(), acl.getPath()); assertImmutable(acl); } @Test public void testEmptyIsImmutable() throws Exception { JackrabbitAccessControlList acl = createEmptyACL(); assertTrue(acl.isEmpty()); assertEquals(0, acl.size()); assertEquals(getTestPath(), acl.getPath()); assertImmutable(acl); } @Test public void testEqualsForEmpty() throws Exception { JackrabbitAccessControlList acl = createEmptyACL(); assertEquals(acl, createEmptyACL()); ACE entry = createEntry(true, PrivilegeConstants.JCR_READ, PrivilegeConstants.JCR_ADD_CHILD_NODES); assertFalse(acl.equals(createACL(entry))); assertFalse(acl.equals(new TestACL(getTestPath(), getRestrictionProvider(), getNamePathMapper(), Collections.<JackrabbitAccessControlEntry>emptyList()))); } @Test public void testEquals() throws Exception { RestrictionProvider rp = getRestrictionProvider(); ACE ace1 = createEntry(testPrincipal, PrivilegeBits.BUILT_IN.get(PrivilegeConstants.JCR_VERSION_MANAGEMENT), false); ACE ace2 = createEntry(true, PrivilegeConstants.JCR_READ, PrivilegeConstants.JCR_ADD_CHILD_NODES); ACE ace2b = createEntry(true, PrivilegeConstants.REP_READ_NODES, PrivilegeConstants.REP_READ_PROPERTIES, PrivilegeConstants.JCR_ADD_CHILD_NODES); JackrabbitAccessControlList acl = createACL(ace1, ace2); assertTrue(acl instanceof ImmutableACL); assertEquals(acl, acl); JackrabbitAccessControlList repoAcl = createACL((String) null, ace1, ace2); assertTrue(repoAcl instanceof ImmutableACL); assertEquals(repoAcl, repoAcl); assertEquals(acl, createACL(ace1, ace2)); assertEquals(acl, createACL(ace1, ace2b)); assertEquals(repoAcl, createACL((String) null, ace1, ace2b)); assertFalse(acl.equals(createACL(ace2, ace1))); assertFalse(acl.equals(repoAcl)); assertFalse(acl.equals(createEmptyACL())); assertFalse(acl.equals(createACL("/anotherPath", ace1, ace2))); assertFalse(acl.equals(new TestACL("/anotherPath", rp, getNamePathMapper(), ace1, ace2))); assertFalse(acl.equals(new TestACL("/anotherPath", rp, getNamePathMapper(), ace1, ace2))); assertFalse(acl.equals(new TestACL("/anotherPath", rp, getNamePathMapper()))); assertFalse(acl.equals(new TestACL(getTestPath(), rp, getNamePathMapper(), ace1, ace2))); } @Test public void testHashCode() throws Exception { RestrictionProvider rp = getRestrictionProvider(); ACE ace1 = createEntry(false, PrivilegeConstants.JCR_VERSION_MANAGEMENT); ACE ace2 = createEntry(true, PrivilegeConstants.JCR_READ, PrivilegeConstants.JCR_ADD_CHILD_NODES); ACE ace2b = createEntry(true, PrivilegeConstants.REP_READ_NODES, PrivilegeConstants.REP_READ_PROPERTIES, PrivilegeConstants.JCR_ADD_CHILD_NODES); JackrabbitAccessControlList acl = createACL(ace1, ace2); JackrabbitAccessControlList repoAcl = createACL((String) null, ace1, ace2); int hc = acl.hashCode(); assertTrue(hc == createACL(ace1, ace2).hashCode()); assertTrue(hc == createACL(ace1, ace2b).hashCode()); assertTrue(repoAcl.hashCode() == createACL((String) null, ace1, ace2b).hashCode()); assertFalse(hc == createACL(ace2, ace1).hashCode()); assertFalse(hc == repoAcl.hashCode()); assertFalse(hc == createEmptyACL().hashCode()); assertFalse(hc == createACL("/anotherPath", ace1, ace2).hashCode()); assertFalse(hc == new TestACL("/anotherPath", rp, getNamePathMapper(), ace1, ace2).hashCode()); assertFalse(hc == new TestACL("/anotherPath", rp, getNamePathMapper(), ace1, ace2).hashCode()); assertFalse(hc == new TestACL("/anotherPath", rp, getNamePathMapper()).hashCode()); assertFalse(hc == new TestACL(getTestPath(), rp, getNamePathMapper(), ace1, ace2).hashCode()); } }
/* * $Header: /var/chroot/cvs/cvs/factsheetDesigner/extern/jakarta-slide-server-src-2.1-iPlus Edit/src/webdav/server/org/apache/slide/webdav/method/CopyMethod.java,v 1.2 2006-01-22 22:55:20 peter-cvs Exp $ * $Revision: 1.2 $ * $Date: 2006-01-22 22:55:20 $ * * ==================================================================== * * Copyright 1999-2002 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.slide.webdav.method; import java.util.*; import org.apache.slide.content.*; import org.apache.slide.macro.*; import org.apache.slide.webdav.util.*; import java.io.IOException; import org.apache.slide.common.NamespaceAccessToken; import org.apache.slide.common.ServiceAccessException; import org.apache.slide.common.SlideException; import org.apache.slide.event.EventDispatcher; import org.apache.slide.event.VetoException; import org.apache.slide.lock.ObjectLockedException; import org.apache.slide.security.AccessDeniedException; import org.apache.slide.structure.LinkedObjectNotFoundException; import org.apache.slide.structure.ObjectNode; import org.apache.slide.structure.ObjectNotFoundException; import org.apache.slide.structure.SubjectNode; import org.apache.slide.util.Configuration; import org.apache.slide.util.XMLValue; import org.apache.slide.webdav.WebdavException; import org.apache.slide.webdav.WebdavServletConfig; import org.apache.slide.webdav.event.WebdavEvent; import org.apache.slide.webdav.util.resourcekind.AbstractResourceKind; import org.apache.slide.webdav.util.resourcekind.CheckedInVersionControlled; import org.apache.slide.webdav.util.resourcekind.ResourceKind; import org.apache.slide.webdav.util.resourcekind.VersionableImpl; import org.jdom.Element; import org.jdom.JDOMException; import org.jdom.Namespace; /** * COPY Method. * */ public class CopyMethod extends AbstractMultistatusResponseMethod implements DeltavConstants, AclConstants, BindConstants, CopyListener, DeleteListener, CopyRouteRedirector, WriteMethod { /** * The VersioningHelper used by this instance. */ protected VersioningHelper versioningHelper = null; /** * Maps the URI of a destination to its descriptor. * Used by {@link #beforeCopy beforeCopy()} and {@link #afterCopy afterCopy()}. */ protected Map destinationDescriptorMap = new HashMap(); /** * Maps the URI of a destination to its backup descriptor. * Used by {@link #beforeCopy beforeCopy()} and {@link #afterCopy afterCopy()}. */ protected Map destinationBackupDescriptorMap = new HashMap(); /** * The value of the <code>Label</code> header. */ protected String labelHeader = null; private MacroParameters macroParameters = null; // ----------------------------------------------------------- Constructors /** * Constructor. * * @param token the token for accessing the namespace * @param config configuration of the WebDAV servlet */ public CopyMethod(NamespaceAccessToken token, WebdavServletConfig config) { super(token, config); } // ------------------------------------------------------ Protected Methods /** * Parse request. * * @exception WebdavException Does not happen */ protected void parseRequest() throws WebdavException { super.parseRequest(); versioningHelper = VersioningHelper.getVersioningHelper(slideToken, token, req, resp, config); labelHeader = WebdavUtils.fixTomcatHeader(requestHeaders.getLabel(), "UTF-8"); } /** * Execute request. * * @exception WebdavException Unrecoverable error occured while copying */ protected void executeRequest() throws WebdavException, IOException { // Prevent dirty reads slideToken.setForceStoreEnlistment(true); boolean isCollection = isCollection(sourceUri); // check lock-null resources try { if (isLockNull(sourceUri)) { int statusCode = WebdavStatus.SC_NOT_FOUND; sendError( statusCode, "lock-null resource", new Object[]{sourceUri} ); throw new WebdavException( statusCode ); } } catch (ServiceAccessException e) { int statusCode = getErrorCode((Exception)e); sendError( statusCode, e ); throw new WebdavException( statusCode ); } // check destination URI UriHandler destUh = UriHandler.getUriHandler(destinationUri); if (destUh.isRestrictedUri()) { boolean sendError = true; if( destUh.isWorkspaceUri() || destUh.isWorkingresourceUri() ) { // COPY on existing WSs or WRs is *not* restricted !!! try { if ( WebdavEvent.COPY.isEnabled() ) EventDispatcher.getInstance().fireVetoableEvent(WebdavEvent.COPY, new WebdavEvent(this)); content.retrieve(slideToken, destinationUri); sendError = false; } catch( SlideException x ) { int statusCode = getErrorCode((SlideException)x); sendError( statusCode, x ); throw new WebdavException( statusCode ); } } if( sendError ) { int statusCode = WebdavStatus.SC_FORBIDDEN; sendError( statusCode, getClass().getName()+".restrictedDestinationUri", new Object[]{destinationUri} ); throw new WebdavException( statusCode ); } } try { // compare resource types of source and destination int depth = requestHeaders.getDepth(INFINITY); if (depth != 0 && depth != INFINITY) { int sc = WebdavStatus.SC_PRECONDITION_FAILED; sendError( sc, "Invalid header Depth: "+depth ); throw new WebdavException( sc ); } boolean recursive = (depth == INFINITY); if (overwrite) { macroParameters = new MacroParameters(recursive, true, true); } else { macroParameters = new MacroParameters(recursive, false, false); } boolean destinationExistsBefore = exists( destinationUri ); if (!overwrite && destinationExistsBefore) { int statusCode = WebdavStatus.SC_PRECONDITION_FAILED; sendError( statusCode, getClass().getName()+".noOverwrite", new Object[]{destinationUri} ); throw new WebdavException( statusCode ); } macro.copy(slideToken, sourceUri, destinationUri, macroParameters, this, this, null, this); if (overwrite && destinationExistsBefore) { resp.setStatus(WebdavStatus.SC_NO_CONTENT); } else { resp.setStatus(WebdavStatus.SC_CREATED); } } catch (MacroException e) { if(generateMultiStatusResponse(isCollection, e, requestUri)) { String errorMessage = generateErrorMessage(e); // Write it on the servlet writer resp.setStatus(WebdavStatus.SC_MULTI_STATUS); try { resp.setContentType(TEXT_XML_UTF_8); resp.getWriter().write(errorMessage); } catch(IOException ex) { // Critical error ... Servlet container is dead or something int statusCode = WebdavStatus.SC_INTERNAL_SERVER_ERROR; sendError( statusCode, e ); throw new WebdavException( statusCode ); } } else { // Returning 207 on non-collection requests is generally // considered bad. So let's not do it, since this way // makes clients generally behave better. SlideException exception = (SlideException)e.enumerateExceptions().nextElement(); if (exception instanceof PreconditionViolationException) { try { sendPreconditionViolation((PreconditionViolationException)exception); } catch(IOException ex) { // Critical error ... Servlet container is dead or something int statusCode = WebdavStatus.SC_INTERNAL_SERVER_ERROR; sendError( statusCode, e ); throw new WebdavException( statusCode ); } } else { int statusCode = getErrorCode( exception ); sendError( statusCode, exception ); throw new WebdavException( statusCode ); } } // // make sure the transaction is aborted // throw any WebDAV exception to indicate the transaction wants to be aborted // throw new WebdavException(WebdavStatus.SC_ACCEPTED, false); } catch (WebdavException e) { throw e; } catch (SlideException e) { int statusCode = getErrorCode( e ); sendError( statusCode, e ); throw new WebdavException( statusCode ); } } /** * Get return status based on exception type. */ protected int getErrorCode(SlideException ex) { try { throw ex; } catch(RevisionNotFoundException e) { return WebdavStatus.SC_NOT_FOUND; } catch (SlideException e) { return super.getErrorCode(e); } } /** * Restores all live properties that should not be copied. * * @param destinationRevisionDescriptor the descriptor to restore. * @param existingDestinationRevisionDescriptor the descriptor that has been overwritten. */ private void restoreLiveProperties(String destinationUri, NodeRevisionDescriptor destinationNrd, NodeRevisionDescriptor existingNrd) { // remove all live properties Enumeration propertyEnum = destinationNrd.enumerateProperties(); NodeProperty property = null; while (propertyEnum.hasMoreElements()) { property = (NodeProperty)propertyEnum.nextElement(); if (isLivePropertyToRestore(destinationUri, property)) { destinationNrd.removeProperty(property); } } // copy all live properties of the existing destination propertyEnum = existingNrd.enumerateProperties(); property = null; while (propertyEnum.hasMoreElements()) { property = (NodeProperty)propertyEnum.nextElement(); if (isLivePropertyToRestore(destinationUri, property)) { destinationNrd.setProperty(property); } } } /** * Indicates if the given property is a live property to restore. * * @param property the NodeProperty to decide. * * @return <code>true</code> if this is a live property to restore. */ private boolean isLivePropertyToRestore(String uri, NodeProperty property) { boolean isLivePropertyToRestore = property.isLiveProperty() && ( DeltavConstants.DELTAV_PROPERTY_LIST.contains(property.getName()) || AclConstants.ACL_PROPERTY_LIST.contains(property.getName()) || BindConstants.BIND_PROPERTY_LIST.contains(property.getName()) || P_CREATIONDATE.equals(property.getName()) || ( P_DISPLAYNAME.equals(property.getName()) && Configuration.useBinding(token.getUri(slideToken, uri).getStore()) ) ); return isLivePropertyToRestore; } /** * Sets all DeltaV specific properties of the given NodeRevisionDescriptor * to their initial value. * * @param revisionDescriptor the NodeRevisionDescriptor whose DeltaV * properties should be reset. */ private void resetDeltavProperties(NodeRevisionDescriptor revisionDescriptor, String resourcePath) { // use initial values for DeltaV properties PropertyHelper propertyHelper = PropertyHelper.getPropertyHelper(slideToken, token, getConfig()); ResourceKind resourceKind = VersionableImpl.getInstance(); Iterator initialPropertyIterator = propertyHelper.createInitialProperties(resourceKind, resourcePath).iterator(); NodeProperty property = null; List initialDeltavProperties = new ArrayList(); while (initialPropertyIterator.hasNext()) { property = (NodeProperty)initialPropertyIterator.next(); if (DeltavConstants.DELTAV_PROPERTY_LIST.contains(property.getName())) { initialDeltavProperties.add(property); } } Enumeration propertyEnum = revisionDescriptor.enumerateProperties(); property = null; int index = 0; while (propertyEnum.hasMoreElements()) { property = (NodeProperty)propertyEnum.nextElement(); if (DeltavConstants.DELTAV_PROPERTY_LIST.contains(property.getName())) { index = initialDeltavProperties.indexOf(property); if (index >= 0) { revisionDescriptor.setProperty((NodeProperty)initialDeltavProperties.get(index)); } else { revisionDescriptor.removeProperty(property); } } } } /** * Restores the "backup" NodeRevisionDescriptor which has been saved in * method {@link #beforeDelete beforeDelete()}. * * @param destinationUri the Uri of the resource. * @param destinationRevisionDescriptors the NodeRevisionDescriptors of * the resource. */ private void restoreBackupRevisionDescriptor(String destinationUri, NodeRevisionDescriptors destinationNrds) throws RevisionNotFoundException, ServiceAccessException, RevisionAlreadyExistException, ObjectNotFoundException, LinkedObjectNotFoundException, ObjectLockedException, AccessDeniedException, RevisionDescriptorNotFoundException, BranchNotFoundException, NodeNotVersionedException, VetoException { NodeRevisionDescriptor backupNrd = (NodeRevisionDescriptor)destinationBackupDescriptorMap.get(destinationUri); if (backupNrd != null) { try { content.retrieve( slideToken, destinationNrds, NodeRevisionNumber.HIDDEN_0_0 ); content.store( slideToken, destinationNrds.getUri(), backupNrd, null ); } catch (RevisionDescriptorNotFoundException e) { content.create( slideToken, destinationNrds.getUri(), null, backupNrd, null ); // branch=null, revisionContent=null } } } // ------------------------------------------------------ Interface CopyRouteRedirector /** * Returns the (redirected) CopyRoute to use. Must not be <code>null</code>. * * @param the original CopyRoute. * * @return the (redirected) CopyRoute to use. * * @throws SlideException this Exception will be passed to the caller * of the Macro helper (contained in the * MacroCopyException). */ public CopyRoute getRedirectedCopyRoute(CopyRoute copyRoute) throws SlideException { if (Configuration.useVersionControl()) { String sourceUri = copyRoute.getSourceUri(); String destinationUri = copyRoute.getDestinationUri(); try { sourceUri = versioningHelper.getLabeledResourceUri(sourceUri, labelHeader); copyRoute = new CopyRoute(sourceUri, destinationUri); } catch (LabeledRevisionNotFoundException e) { ViolatedPrecondition violatedPrecondition = new ViolatedPrecondition(DeltavConstants.C_MUST_SELECT_VERSION_IN_HISTORY, WebdavStatus.SC_CONFLICT); throw new PreconditionViolationException(violatedPrecondition, sourceUri); } } return copyRoute; } // ------------------------------------------------------ Interface CopyListener /** * This method is called prior to copying the resource associated by * the given <code>sourceUri</code>. The copy can be prohibited by * throwing a SlideException. * * @param sourceUri the Uri of the resource that will be copied. * @param destinationUri the Uri of the copy. * * @throws SlideException this Exception will be passed to the caller * of the Macro helper (contained in the * MacroDeleteException. */ public void beforeCopy(String sourceUri, String destinationUri, boolean isRootOfCopy) throws SlideException { if(Configuration.useVersionControl()) { UriHandler sourceUh = UriHandler.getUriHandler(sourceUri); if (sourceUh.isHistoryUri()) { throw new PreconditionViolationException( new ViolatedPrecondition(DeltavConstants.C_CANNOT_COPY_HISTORY, WebdavStatus.SC_FORBIDDEN), sourceUri); } if (!macroParameters.isDeleteCreate()) { beforeUpdateOrDelete( destinationUri ); } } if (isRootOfCopy && Configuration.useBinding(token.getUri(slideToken, destinationUri).getStore())) { // collect the parent bindings of the destination node Map parentBindings = new HashMap(); try { NodeRevisionDescriptor destinationNrd = content.retrieve( slideToken, content.retrieve(slideToken, destinationUri) ); XMLValue v = new XMLValue( (String)destinationNrd.getProperty( P_PARENT_SET ).getValue() ); Iterator i = v.iterator(); while (i.hasNext()) { Namespace dnsp = Namespace.getNamespace(S_DAV); Element parentElm = (Element)i.next(); String segment = parentElm.getChild(E_SEGMENT, dnsp).getTextTrim(); String href = parentElm.getChild(E_HREF, dnsp).getTextTrim(); parentBindings.put( href, segment ); } } catch( ServiceAccessException x ) { throw x; } catch (SlideException e) {} catch (JDOMException e) {} macroParameters.setParameter( Macro.PARENT_BINDINGS, parentBindings ); } } /** * This method is called after copying the resource to * the given <code>destinationUri</code>. * * @param sourceUri the Uri of the resource that has been copied. * @param destinationUri the Uri of the copy. * * @throws SlideException this Exception will be passed to the caller * of the Macro helper (contained in the * MacroDeleteException. */ public void afterCopy(String sourceUri, String destinationUri, boolean isRootOfCopy, boolean destinationExists) throws SlideException { if(Configuration.useVersionControl()) { NodeRevisionDescriptors destinationNrds = content.retrieve( slideToken, destinationUri); NodeRevisionDescriptor destinationNrd = content.retrieve( slideToken, destinationNrds); // restore backup descriptor restoreBackupRevisionDescriptor(destinationUri, destinationNrds); NodeRevisionDescriptor existingNrd = (NodeRevisionDescriptor)destinationDescriptorMap.get(destinationUri); if (existingNrd != null) { // there has been an existing destination, so restore live properties restoreLiveProperties(destinationUri, destinationNrd, existingNrd); } else { // DAV:must-not-copy-versioning-property resetDeltavProperties(destinationNrd, destinationUri); } // set <workspace> property versioningHelper.setWorkspaceProperty(destinationUri, destinationNrd); // set some other properties destinationNrd.setLastModified( new Date() ); // P_GETLASTMODIFIED destinationNrd.setETag( PropertyHelper.computeEtag(destinationUri, destinationNrd) ); // P_GETETAG if (!destinationExists) { // copy is creating a new resource String creator = ((SubjectNode)security.getPrincipal(slideToken)).getPath().lastSegment(); destinationNrd.setOwner(creator); } content.store(slideToken, destinationNrds.getUri(), destinationNrd, null); // checkin if necessary boolean mustCheckin = versioningHelper.mustCheckinAutoVersionedVCR(slideToken, destinationNrds, destinationNrd); if (mustCheckin) { try { versioningHelper.checkin(destinationUri, false, false, true ); //forkOk=false, keepCheckedOut=false } catch (IOException e) { throw new SlideException("Checkin failed: " + e.getMessage()); } catch (JDOMException e) { throw new SlideException("Checkin failed: " + e.getMessage()); } } // check if the resource should be put under version-control if( isAutoVersionControl(destinationUri) && !isCollection(destinationUri) && !isExcludedForVersionControl(destinationUri) ) { versioningHelper.versionControl(destinationUri); } } if (isRootOfCopy && Configuration.useBinding(token.getUri(slideToken, destinationUri).getStore())) { // try to restore the parent bindings if (macroParameters.getParameter(Macro.PARENT_BINDINGS) != null) { Map parentBindings = (Map)macroParameters.getParameter(Macro.PARENT_BINDINGS); Iterator i = parentBindings.entrySet().iterator(); while (i.hasNext()) { Map.Entry me = (Map.Entry)i.next(); ObjectNode parentNode = structure.retrieve( slideToken, (String)me.getKey() ); ObjectNode destinationNode = structure.retrieve( slideToken, destinationUri ); String segment = (String)me.getValue(); structure.addBinding( slideToken, parentNode, segment, destinationNode ); } } } } // ------------------------------------------------------ Interface DeleteListener /** * This method is called prior to deleting the resource associated by * the given <code>targetUri</code>. The deletion can be prohibited by * throwing a SlideException. * * @param destinationUri the Uri of the resource that will be deleted. * * @throws SlideException this Exception will be passed to the caller * of the Macro helper (contained in the * MacroDeleteException. */ public void beforeDelete(String destinationUri) throws SlideException { beforeUpdateOrDelete( destinationUri ); } /** * This method is called prior to deleting the resource associated by * the given <code>targetUri</code>. The deletion can be prohibited by * throwing a SlideException. * * @param destinationUri the Uri of the resource that will be deleted. * * @throws SlideException this Exception will be passed to the caller * of the Macro helper (contained in the * MacroDeleteException. */ private void beforeUpdateOrDelete(String destinationUri) throws SlideException { if( Configuration.useVersionControl() ) { NodeRevisionDescriptors destinationNrds = null; NodeRevisionDescriptor destinationNrd = null; try { destinationNrds = content.retrieve( slideToken, destinationUri); destinationNrd = content.retrieve( slideToken, destinationNrds); } catch (ObjectNotFoundException e) {} if (destinationNrds != null && destinationNrd != null) { ResourceKind resourceKind = AbstractResourceKind.determineResourceKind(token, destinationUri, destinationNrd); if (resourceKind instanceof CheckedInVersionControlled) { // check precondition DAV:cannot-modify-version-controlled-content String autoVersion = versioningHelper.getAutoVersionElementName(destinationNrd); if (autoVersion == null) { autoVersion = ""; } if ( !E_CHECKOUT_CHECKIN.equals(autoVersion) && !E_CHECKOUT_UNLOCKED_CHECKIN.equals(autoVersion) && !E_CHECKOUT.equals(autoVersion) && !E_CHECKOUT_IGNORE_UNLOCK.equals(autoVersion) && !E_LOCKED_CHECKOUT.equals(autoVersion) ) { throw new PreconditionViolationException(new ViolatedPrecondition(C_CANNOT_MODIFY_VERSION_CONTROLLED_CONTENT, WebdavStatus.SC_FORBIDDEN), destinationUri); } if ( E_LOCKED_CHECKOUT.equals(autoVersion) && ( !versioningHelper.isWriteLocked(slideToken, destinationNrds) ) ) { throw new PreconditionViolationException(new ViolatedPrecondition(C_CANNOT_MODIFY_VERSION_CONTROLLED_CONTENT, WebdavStatus.SC_FORBIDDEN), destinationUri); } } // check precondition DAV:cannot-modify-version UriHandler uriHandler = UriHandler.getUriHandler(destinationUri); if (uriHandler.isVersionUri()) { throw new PreconditionViolationException(new ViolatedPrecondition(C_CANNOT_MODIFY_VERSION, WebdavStatus.SC_FORBIDDEN), destinationUri); } // checkout if necessary if( Configuration.useVersionControl() && (resourceKind instanceof CheckedInVersionControlled) && versioningHelper.mustCheckoutAutoVersionedVCR(destinationNrds, destinationNrd) ) { try { versioningHelper.checkout(destinationNrds, destinationNrd, false, false, true ); } catch (IOException e) { throw new SlideException("Checkout failed: " + e.getMessage()); } catch (JDOMException e) { throw new SlideException("Checkout failed: " + e.getMessage()); } } // store the descriptor(s) in order to restore it in afterDelete() // (the COPY specification for DeltaV says that an existing destination // must not be deleted) try { NodeRevisionDescriptor backupNrd = content.retrieve( slideToken, destinationNrds, NodeRevisionNumber.HIDDEN_0_0 ); destinationBackupDescriptorMap.put(destinationUri, backupNrd); } catch (RevisionDescriptorNotFoundException e) { } destinationDescriptorMap.put(destinationUri, destinationNrd); } } } /** * This method is called after deleting the resource associated by * the given <code>targetUri</code>. * * @param targetUri the Uri of the resource that has been deleted. * * @throws SlideException this Exception will be passed to the caller * of the Macro helper (contained in the * targetUricroDeleteException. */ public void afterDelete(String targetUri) throws SlideException { } }
package eu.chargetime.ocpp; /* ChargeTime.eu - Java-OCA-OCPP Copyright (C) 2015-2016 Thomas Volden <[email protected]> MIT License Copyright (C) 2016-2018 Thomas Volden Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ import eu.chargetime.ocpp.feature.Feature; import eu.chargetime.ocpp.model.Confirmation; import eu.chargetime.ocpp.model.Request; import eu.chargetime.ocpp.utilities.MoreObjects; import java.util.Optional; import java.util.UUID; import java.util.concurrent.CompletableFuture; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Unites outgoing {@link Request} with incoming {@link Confirmation}s or errors. Catches errors and * responds with error messages. */ public class Session implements ISession { private static final Logger logger = LoggerFactory.getLogger(Session.class); private final UUID sessionId = UUID.randomUUID(); private final Communicator communicator; private final Queue queue; private final RequestDispatcher dispatcher; private final IFeatureRepository featureRepository; private SessionEvents events; /** * Handles required injections. * * @param communicator send and receive messages. * @param queue store and restore requests based on unique ids. */ public Session( Communicator communicator, Queue queue, PromiseFulfiller fulfiller, IFeatureRepository featureRepository) { this.communicator = communicator; this.queue = queue; this.dispatcher = new RequestDispatcher(fulfiller); this.featureRepository = featureRepository; } /** * Get a unique session {@link UUID} identifier. * * @return the unique session {@link UUID} identifier */ public UUID getSessionId() { return sessionId; } /** * Send a {@link Request}. * * @param action action name to identify the feature. * @param payload the {@link Request} payload to send * @param uuid unique identification to identify the request */ public void sendRequest(String action, Request payload, String uuid) { communicator.sendCall(uuid, action, payload); } /** * Store a {@link Request} and get the unique id. * * @param payload the {@link Request} payload to send * @return unique identification to identify the request. */ public String storeRequest(Request payload) { return queue.store(payload); } /** * Send a {@link Confirmation} to a {@link Request} * * @param uniqueId the unique identification the receiver expects. * @param confirmation the {@link Confirmation} payload to send. */ public void sendConfirmation(String uniqueId, String action, Confirmation confirmation) { communicator.sendCallResult(uniqueId, action, confirmation); } private Optional<Class<? extends Confirmation>> getConfirmationType(String uniqueId) throws UnsupportedFeatureException { Optional<Request> requestOptional = queue.restoreRequest(uniqueId); if (requestOptional.isPresent()) { Optional<Feature> featureOptional = featureRepository.findFeature(requestOptional.get()); if (featureOptional.isPresent()) { return Optional.of(featureOptional.get().getConfirmationType()); } else { logger.debug("Feature for request with id: {} not found in session: {}", uniqueId, this); throw new UnsupportedFeatureException( "Error with getting confirmation type by request id = " + uniqueId); } } else { logger.debug("Request with id: {} not found in session: {}", uniqueId, this); } return Optional.empty(); } /** * Connect to a specific uri, provided a call back handler for connection related events. * * @param uri url and port of the remote system. * @param eventHandler call back handler for connection related events. */ public void open(String uri, SessionEvents eventHandler) { this.events = eventHandler; dispatcher.setEventHandler(eventHandler); communicator.connect(uri, new CommunicatorEventHandler()); } /** Close down the connection. */ public void close() { communicator.disconnect(); } public void accept(SessionEvents eventHandler) { this.events = eventHandler; dispatcher.setEventHandler(eventHandler); communicator.accept(new CommunicatorEventHandler()); } private class CommunicatorEventHandler implements CommunicatorEvents { private static final String OCCURENCE_CONSTRAINT_VIOLATION = "Payload for Action is syntactically correct but at least one of the fields violates occurence constraints"; private static final String INTERNAL_ERROR = "An internal error occurred and the receiver was not able to process the requested Action successfully"; private static final String UNABLE_TO_PROCESS = "Unable to process action"; @Override public void onCallResult(String id, String action, Object payload) { try { Optional<Class<? extends Confirmation>> confirmationTypeOptional = getConfirmationType(id); if (confirmationTypeOptional.isPresent()) { Confirmation confirmation = communicator.unpackPayload(payload, confirmationTypeOptional.get()); if (confirmation.validate()) { events.handleConfirmation(id, confirmation); } else { communicator.sendCallError( id, action, "OccurenceConstraintViolation", OCCURENCE_CONSTRAINT_VIOLATION); } } else { logger.warn(INTERNAL_ERROR); communicator.sendCallError(id, action, "InternalError", INTERNAL_ERROR); } } catch (PropertyConstraintException ex) { logger.warn(ex.getMessage(), ex); communicator.sendCallError(id, action, "TypeConstraintViolation", ex.getMessage()); } catch (UnsupportedFeatureException ex) { logger.warn(INTERNAL_ERROR, ex); communicator.sendCallError(id, action, "InternalError", INTERNAL_ERROR); } catch (Exception ex) { logger.warn(UNABLE_TO_PROCESS, ex); communicator.sendCallError(id, action, "FormationViolation", UNABLE_TO_PROCESS); } } @Override public synchronized void onCall(String id, String action, Object payload) { Optional<Feature> featureOptional = featureRepository.findFeature(action); if (!featureOptional.isPresent()) { communicator.sendCallError( id, action, "NotImplemented", "Requested Action is not known by receiver"); } else { try { Request request = communicator.unpackPayload(payload, featureOptional.get().getRequestType()); if (request.validate()) { CompletableFuture<Confirmation> promise = dispatcher.handleRequest(request); promise.whenComplete(new ConfirmationHandler(id, action, communicator)); } else { communicator.sendCallError( id, action, "OccurenceConstraintViolation", OCCURENCE_CONSTRAINT_VIOLATION); } } catch (PropertyConstraintException ex) { logger.warn(ex.getMessage(), ex); communicator.sendCallError(id, action, "TypeConstraintViolation", ex.getMessage()); } catch (Exception ex) { logger.warn(UNABLE_TO_PROCESS, ex); communicator.sendCallError(id, action, "FormationViolation", UNABLE_TO_PROCESS); } } } @Override public void onError(String id, String errorCode, String errorDescription, Object payload) { events.handleError(id, errorCode, errorDescription, payload); } @Override public void onDisconnected() { events.handleConnectionClosed(); } @Override public void onConnected() { events.handleConnectionOpened(); } } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Session session = (Session) o; return MoreObjects.equals(sessionId, session.sessionId); } @Override public int hashCode() { return MoreObjects.hash(sessionId); } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("sessionId", sessionId) .add("communicator", communicator) .add("queue", queue) .add("dispatcher", dispatcher) .add("featureRepository", featureRepository) .add("events", events) .toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Random; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; import org.apache.geode.cache.ExpirationAttributes; import org.apache.geode.cache.Region; import org.apache.geode.cache.Scope; import org.apache.geode.distributed.internal.membership.InternalDistributedMember; import org.apache.geode.internal.cache.BucketAdvisor.BucketProfile; import org.apache.geode.internal.cache.partitioned.RegionAdvisor; import org.apache.geode.internal.cache.partitioned.RegionAdvisor.PartitionProfile; import org.apache.geode.internal.util.VersionedArrayList; /** * This class is an integration test for {@link PartitionedRegionQueryEvaluator} class. */ public class PartitionedRegionQueryEvaluatorIntegrationTest { @Rule public TestName name = new TestName(); /** * Test for the helper method getNodeToBucketMap. */ @Test public void testGetNodeToBucketMap() { int totalNodes = 100; String prPrefix = name.getMethodName(); String localMaxMemory = "0"; final int redundancy = 1; final int totalNoOfBuckets = 5; PartitionedRegion pr = (PartitionedRegion) PartitionedRegionTestHelper .createPartitionedRegion(prPrefix, localMaxMemory, redundancy); HashSet<Integer> bucketsToQuery = new HashSet<Integer>(); for (int i = 0; i < totalNoOfBuckets; i++) { bucketsToQuery.add(i); } final String expectedUnknownHostException = UnknownHostException.class.getName(); pr.getCache().getLogger().info( "<ExpectedException action=add>" + expectedUnknownHostException + "</ExpectedException>"); final ArrayList nodes = createNodeList(totalNodes); pr.getCache().getLogger().info("<ExpectedException action=remove>" + expectedUnknownHostException + "</ExpectedException>"); // populating bucket2Node of the partition region // ArrayList<InternalDistributedMember> final ArrayList dses = createDataStoreList(totalNodes); populateBucket2Node(pr, dses, totalNoOfBuckets); populateAllPartitionedRegion(pr, nodes); // running the algorithm and getting the list of bucktes to grab PartitionedRegionQueryEvaluator evalr = new PartitionedRegionQueryEvaluator(pr.getSystem(), pr, null, null, null, null, bucketsToQuery); Map n2bMap = null; try { n2bMap = evalr.buildNodeToBucketMap(); } catch (Exception ex) { } ArrayList buckList = new ArrayList(); for (Iterator itr = n2bMap.entrySet().iterator(); itr.hasNext();) { Map.Entry entry = (Map.Entry) itr.next(); if (entry.getValue() != null) buckList.addAll((List) entry.getValue()); } // checking size of the two lists assertEquals("Unexpected number of buckets", totalNoOfBuckets, buckList.size()); for (int i = 0; i < totalNoOfBuckets; i++) { assertTrue(" Bucket with Id = " + i + " not present in bucketList.", buckList.contains(new Integer(i))); } pr.destroyRegion(); } /** * This function populates bucket2Node region of the partition region */ private void populateBucket2Node(PartitionedRegion pr, List nodes, int numOfBuckets) { assertEquals(0, pr.getRegionAdvisor().getCreatedBucketsCount()); final RegionAdvisor ra = pr.getRegionAdvisor(); int nodeListCnt = 0; Random ran = new Random(); HashMap verMap = new HashMap(); // Map tracking version for profile insertion purposes for (int i = 0; i < numOfBuckets; i++) { nodeListCnt = setNodeListCnt(nodeListCnt); for (int j = 0; j < nodeListCnt; j++) { BucketProfile bp = new BucketProfile(); bp.peerMemberId = (InternalDistributedMember) nodes.get(ran.nextInt(nodes.size())); Integer v; if ((v = (Integer) verMap.get(bp.getDistributedMember())) != null) { bp.version = v.intValue() + 1; verMap.put(bp.getDistributedMember(), new Integer(bp.version)); } else { verMap.put(bp.getDistributedMember(), new Integer(0)); bp.version = 0; } bp.isHosting = true; if (j == 0) { bp.isPrimary = true; } bp.scope = Scope.DISTRIBUTED_ACK; boolean forceBadProfile = true; assertTrue(ra.getBucket(i).getBucketAdvisor().putProfile(bp, forceBadProfile)); } } } /** * This function decides number of the nodes in the list of bucket2Node region */ private int setNodeListCnt(int i) { int nListcnt = 0; switch (i) { case 0: nListcnt = 1; break; case 1: nListcnt = 4; break; case 2: nListcnt = 1; break; case 3: nListcnt = 2; break; case 4: nListcnt = 1; break; case 5: nListcnt = 3; break; case 6: nListcnt = 3; break; case 7: nListcnt = 1; break; case 8: nListcnt = 1; break; case 9: nListcnt = 2; break; } return nListcnt; } /** * This functions number of new nodes specified by nCount. */ private ArrayList createNodeList(int nCount) { ArrayList nodeList = new ArrayList(nCount); for (int i = 0; i < nCount; i++) { nodeList.add(createNode(i)); } return nodeList; } private ArrayList createDataStoreList(int nCount) { ArrayList nodeList = new ArrayList(nCount); for (int i = 0; i < nCount; i++) { nodeList.add(createDataStoreMember(i)); } return nodeList; } private VersionedArrayList getVersionedNodeList(int nCount, List<Node> nodes) { VersionedArrayList nodeList = new VersionedArrayList(nCount); Random ran = new Random(); for (int i = 0; i < nCount; i++) { nodeList.add(nodes.get(ran.nextInt(nodes.size()))); } return nodeList; } private InternalDistributedMember createDataStoreMember(int i) { return new InternalDistributedMember("host" + i, 3033); } /** * this function creates new node. */ private Node createNode(int i) { Node node = new Node(new InternalDistributedMember("host" + i, 3033), i); node.setPRType(Node.DATASTORE); return node; } private void populateAllPartitionedRegion(PartitionedRegion pr, List nodes) { Region rootReg = PartitionedRegionHelper.getPRRoot(pr.getCache()); PartitionRegionConfig prConf = new PartitionRegionConfig(pr.getPRId(), pr.getFullPath(), pr.getPartitionAttributes(), pr.getScope(), new EvictionAttributesImpl(), new ExpirationAttributes(), new ExpirationAttributes(), new ExpirationAttributes(), new ExpirationAttributes(), Collections.emptySet()); RegionAdvisor ra = pr.getRegionAdvisor(); for (Iterator itr = nodes.iterator(); itr.hasNext();) { Node n = (Node) itr.next(); prConf.addNode(n); PartitionProfile pp = (PartitionProfile) ra.createProfile(); pp.peerMemberId = n.getMemberId(); pp.isDataStore = true; final boolean forceFakeProfile = true; pr.getRegionAdvisor().putProfile(pp, forceFakeProfile); } rootReg.put(pr.getRegionIdentifier(), prConf); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.ahc.ws; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.CharArrayReader; import java.io.IOException; import java.util.HashSet; import java.util.Set; import java.util.concurrent.ExecutionException; import com.ning.http.client.AsyncHttpClient; import com.ning.http.client.AsyncHttpClientConfig; import com.ning.http.client.AsyncHttpProvider; import com.ning.http.client.providers.grizzly.GrizzlyAsyncHttpProvider; import com.ning.http.client.websocket.WebSocket; import com.ning.http.client.websocket.WebSocketByteListener; import com.ning.http.client.websocket.WebSocketTextListener; import com.ning.http.client.websocket.WebSocketUpgradeHandler; import org.apache.camel.Consumer; import org.apache.camel.Processor; import org.apache.camel.Producer; import org.apache.camel.component.ahc.AhcEndpoint; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * */ public class WsEndpoint extends AhcEndpoint { private static final transient Logger LOG = LoggerFactory.getLogger(WsEndpoint.class); // for using websocket streaming/fragments private static final boolean GRIZZLY_AVAILABLE = probeClass("com.ning.http.client.providers.grizzly.GrizzlyAsyncHttpProvider"); private WebSocket websocket; private Set<WsConsumer> consumers; private boolean useStreaming; public WsEndpoint(String endpointUri, WsComponent component) { super(endpointUri, component, null); this.consumers = new HashSet<WsConsumer>(); } private static boolean probeClass(String name) { try { Class.forName(name, true, WsEndpoint.class.getClassLoader()); return true; } catch (Throwable t) { return false; } } @Override public WsComponent getComponent() { return (WsComponent) super.getComponent(); } @Override public Producer createProducer() throws Exception { return new WsProducer(this); } @Override public Consumer createConsumer(Processor processor) throws Exception { return new WsConsumer(this, processor); } WebSocket getWebSocket() { synchronized (this) { if (websocket == null) { try { connect(); } catch (Exception e) { LOG.error("Failed to connect", e); } } } return websocket; } void setWebSocket(WebSocket websocket) { this.websocket = websocket; } /** * @return the useStreaming */ public boolean isUseStreaming() { return useStreaming; } /** * @param useStreaming the useStreaming to set */ public void setUseStreaming(boolean useStreaming) { this.useStreaming = useStreaming; } /* (non-Javadoc) * @see org.apache.camel.component.ahc.AhcEndpoint#createClient(com.ning.http.client.AsyncHttpClientConfig) */ @Override protected AsyncHttpClient createClient(AsyncHttpClientConfig config) { AsyncHttpClient client; if (config == null) { config = new AsyncHttpClientConfig.Builder().build(); } AsyncHttpProvider ahp = getAsyncHttpProvider(config); if (ahp == null) { client = new AsyncHttpClient(config); } else { client = new AsyncHttpClient(ahp, config); } return client; } public void connect() throws InterruptedException, ExecutionException, IOException { websocket = getClient().prepareGet(getHttpUri().toASCIIString()).execute( new WebSocketUpgradeHandler.Builder() .addWebSocketListener(new WsListener()).build()).get(); } @Override protected void doStop() throws Exception { if (websocket != null && websocket.isOpen()) { websocket.close(); } super.doStop(); } void connect(WsConsumer wsConsumer) { consumers.add(wsConsumer); } void disconnect(WsConsumer wsConsumer) { consumers.remove(wsConsumer); } class WsListener implements WebSocketTextListener, WebSocketByteListener { private ByteArrayOutputStream byteBuffer = new ByteArrayOutputStream(); private StringBuffer textBuffer = new StringBuffer(); @Override public void onOpen(WebSocket websocket) { LOG.debug("websocket opened"); } @Override public void onClose(WebSocket websocket) { LOG.debug("websocket closed"); } @Override public void onError(Throwable t) { LOG.error("websocket on error", t); } @Override public void onMessage(byte[] message) { LOG.debug("received message --> {}", message); for (WsConsumer consumer : consumers) { consumer.sendMessage(message); } } @Override public void onFragment(byte[] fragment, boolean last) { if (LOG.isDebugEnabled()) { LOG.debug("received fragment({}) --> {}", last, fragment); } // for now, construct a memory based stream. In future, we provide a fragmented stream that can // be consumed before the final fragment is added. synchronized (byteBuffer) { try { byteBuffer.write(fragment); } catch (IOException e) { //ignore } if (last) { //REVIST avoid using baos/bais that waste memory byte[] msg = byteBuffer.toByteArray(); for (WsConsumer consumer : consumers) { consumer.sendMessage(new ByteArrayInputStream(msg)); } byteBuffer.reset(); } } } @Override public void onMessage(String message) { LOG.debug("received message --> {}", message); for (WsConsumer consumer : consumers) { consumer.sendMessage(message); } } @Override public void onFragment(String fragment, boolean last) { if (LOG.isDebugEnabled()) { LOG.debug("received fragment({}) --> {}", last, fragment); } // for now, construct a memory based stream. In future, we provide a fragmented stream that can // be consumed before the final fragment is added. synchronized (textBuffer) { textBuffer.append(fragment); if (last) { //REVIST avoid using sb/car that waste memory char[] msg = new char[textBuffer.length()]; textBuffer.getChars(0, msg.length, msg, 0); for (WsConsumer consumer : consumers) { consumer.sendMessage(new CharArrayReader(msg)); } textBuffer.setLength(0); } } } } protected AsyncHttpProvider getAsyncHttpProvider(AsyncHttpClientConfig config) { if (GRIZZLY_AVAILABLE) { return new GrizzlyAsyncHttpProvider(config); } return null; } }
// -*- mode: java; c-basic-offset: 2; -*- // Copyright 2009-2011 Google, All Rights reserved // Copyright 2011-2012 MIT, All rights reserved // Released under the MIT License https://raw.github.com/mit-cml/app-inventor/master/mitlicense.txt package com.google.appinventor.client.wizards; import static com.google.appinventor.client.Ode.MESSAGES; import java.io.File; import com.google.appinventor.client.ErrorReporter; import com.google.appinventor.client.Ode; import com.google.appinventor.client.OdeAsyncCallback; import com.google.appinventor.client.explorer.project.Project; import com.google.appinventor.client.utils.Uploader; import com.google.appinventor.client.youngandroid.TextValidators; import com.google.appinventor.shared.rpc.ServerLayout; import com.google.appinventor.shared.rpc.UploadResponse; import com.google.appinventor.shared.rpc.project.FileNode; import com.google.appinventor.shared.rpc.project.FolderNode; import com.google.appinventor.shared.rpc.project.ProjectNode; import com.google.appinventor.shared.rpc.project.youngandroid.YoungAndroidAssetNode; import com.google.appinventor.shared.rpc.project.youngandroid.YoungAndroidAssetsFolder; import com.google.gwt.core.client.GWT; import com.google.gwt.http.client.URL; import com.google.gwt.user.client.Command; import com.google.gwt.user.client.Window; import com.google.gwt.user.client.ui.FileUpload; import com.google.gwt.user.client.ui.VerticalPanel; /** * Wizard for uploading individual files. * */ public class FileUploadWizard extends Wizard { /** * Interface for callback to execute after a file is uploaded. */ public static interface FileUploadedCallback { /** * Will be invoked after a file is uploaded. * * @param folderNode the upload destination folder * @param fileNode the file just uploaded */ void onFileUploaded(FolderNode folderNode, FileNode fileNode); } /** * Creates a new file upload wizard. * * @param folderNode the upload destination folder */ public FileUploadWizard(FolderNode folderNode) { this(folderNode, null); } /** * Creates a new file upload wizard. * * @param folderNode the upload destination folder * @param fileUploadedCallback callback to be executed after upload */ public FileUploadWizard(final FolderNode folderNode, final FileUploadedCallback fileUploadedCallback) { super(MESSAGES.fileUploadWizardCaption(), true, false); // Initialize UI final FileUpload upload = new FileUpload(); upload.setName(ServerLayout.UPLOAD_FILE_FORM_ELEMENT); setStylePrimaryName("ode-DialogBox"); VerticalPanel panel = new VerticalPanel(); panel.setVerticalAlignment(VerticalPanel.ALIGN_MIDDLE); panel.add(upload); addPage(panel); // Create finish command (upload a file) initFinishCommand(new Command() { @Override public void execute() { String uploadFilename = upload.getFilename(); if (!uploadFilename.isEmpty()) { final String filename = makeValidFilename(uploadFilename); if(!TextValidators.isValidCharFilename(filename)){ Window.alert(MESSAGES.malformedFilename()); return; } else if (!TextValidators.isValidLengthFilename(filename)){ Window.alert(MESSAGES.filenameBadSize()); return; } String fn = conflictingExistingFile(folderNode, filename); if (fn != null && !confirmOverwrite(folderNode, fn, filename)) { return; } else { String fileId = folderNode.getFileId() + "/" + filename; // We delete all the conflicting files. for (ProjectNode child : folderNode.getChildren()) { if (fileId.equalsIgnoreCase(child.getFileId()) && !fileId.equals(child.getFileId())) { final ProjectNode node = child; String filesToClose [] = { node.getFileId()}; Ode ode = Ode.getInstance(); ode.getEditorManager().closeFileEditors(node.getProjectId(), filesToClose); ode.getProjectService().deleteFile( node.getProjectId(), node.getFileId(), new OdeAsyncCallback<Long>( // message on failure MESSAGES.deleteFileError()) { @Override public void onSuccess(Long date) { Ode.getInstance().getProjectManager().getProject(node).deleteNode(node); Ode.getInstance().updateModificationDate(node.getProjectId(), date); } }); } } } ErrorReporter.reportInfo(MESSAGES.fileUploadingMessage(filename)); // Use the folderNode's project id and file id in the upload URL so that the file is // uploaded into that project and that folder in our back-end storage. String uploadUrl = GWT.getModuleBaseURL() + ServerLayout.UPLOAD_SERVLET + "/" + ServerLayout.UPLOAD_FILE + "/" + folderNode.getProjectId() + "/" + folderNode.getFileId() + "/" + filename; Uploader.getInstance().upload(upload, uploadUrl, new OdeAsyncCallback<UploadResponse>(MESSAGES.fileUploadError()) { @Override public void onSuccess(UploadResponse uploadResponse) { switch (uploadResponse.getStatus()) { case SUCCESS: ErrorReporter.hide(); onUploadSuccess(folderNode, filename, uploadResponse.getModificationDate(), fileUploadedCallback); break; case FILE_TOO_LARGE: // The user can resolve the problem by // uploading a smaller file. ErrorReporter.reportInfo(MESSAGES.fileTooLargeError()); break; default: ErrorReporter.reportError(MESSAGES.fileUploadError()); break; } } }); } else { Window.alert(MESSAGES.noFileSelected()); new FileUploadWizard(folderNode, fileUploadedCallback).show(); } } }); } @Override public void show() { super.show(); int width = 320; int height = 40; this.center(); setPixelSize(width, height); super.setPagePanelHeight(40); } private String makeValidFilename(String uploadFilename) { // Strip leading path off filename. // We need to support both Unix ('/') and Windows ('\\') separators. String filename = uploadFilename.substring( Math.max(uploadFilename.lastIndexOf('/'), uploadFilename.lastIndexOf('\\')) + 1); // We need to strip out whitespace from the filename. filename = filename.replaceAll("\\s", ""); return filename; } private String conflictingExistingFile(FolderNode folderNode, String filename) { String fileId = folderNode.getFileId() + "/" + filename; for (ProjectNode child : folderNode.getChildren()) { if (fileId.equalsIgnoreCase(child.getFileId())) { // we want to return kitty.png rather than assets/kitty.png return lastPathComponent(child.getFileId()); } } return null; } private String lastPathComponent (String path) { String [] pieces = path.split("/"); return pieces[pieces.length - 1]; } private boolean confirmOverwrite(FolderNode folderNode, String newFile, String existingFile) { return Window.confirm(MESSAGES.confirmOverwrite(newFile, existingFile)); } private void onUploadSuccess(final FolderNode folderNode, final String filename, long modificationDate, final FileUploadedCallback fileUploadedCallback) { Ode.getInstance().updateModificationDate(folderNode.getProjectId(), modificationDate); finishUpload(folderNode, filename, fileUploadedCallback); } private void finishUpload(FolderNode folderNode, String filename, FileUploadedCallback fileUploadedCallback) { String uploadedFileId = folderNode.getFileId() + "/" + filename; FileNode uploadedFileNode; if (folderNode instanceof YoungAndroidAssetsFolder) { uploadedFileNode = new YoungAndroidAssetNode(filename, uploadedFileId); } else { uploadedFileNode = new FileNode(filename, uploadedFileId); } Project project = Ode.getInstance().getProjectManager().getProject(folderNode); uploadedFileNode = (FileNode) project.addNode(folderNode, uploadedFileNode); if (fileUploadedCallback != null) { fileUploadedCallback.onFileUploaded(folderNode, uploadedFileNode); } } }
/* * @(#)ServiceLoader.java 1.10 06/04/10 * * Copyright 2006 Sun Microsystems, Inc. All rights reserved. * SUN PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. */ package de.intarsys.tools.provider; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.URL; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.ServiceLoader; import java.util.logging.Level; import java.util.logging.Logger; import de.intarsys.tools.stream.StreamTools; /** * This is an alternative service- or provider loading tool. * <p> * The standard {@link ServiceLoader} architecture is quite unfriendly to * reflective access and is even more unfriendly to a lightweight generic * build&deployment procedure. * <p> * This implementation uses a single file "META-INF/provider/provider.list" to * enumerate service/implementation pairs. Each service is listed on a line of * its own. The service is followed by a ";", then followed by the * implementation class. Empty lines are ignored. */ public class Providers { static class ProviderEntry { public String serviceName; public String providerName; public Object provider; } public static final char COMMENT = '#'; public static final String SEPARATOR = ";"; private static final Map<ClassLoader, Providers> providers = new HashMap<ClassLoader, Providers>(); private static final String PROVIDERLIST = "META-INF/provider/provider.list"; static public Providers get() { return get(Thread.currentThread().getContextClassLoader()); } static public Providers get(ClassLoader classloader) { synchronized (providers) { Providers result = providers.get(classloader); if (result == null) { try { result = new Providers(classloader); } catch (IOException e) { throw new ProviderConfigurationException( "error creating providers", e); } providers.put(classloader, result); } return result; } } private ClassLoader loader; private List<ProviderEntry> entries = new ArrayList<ProviderEntry>(); private static final Logger Log = Logger .getLogger("de.intarsys.tools.provider"); protected Providers(ClassLoader cl) throws IOException { loader = cl; init(); } protected void addProvider(ProviderEntry entry) { for (ProviderEntry temp : entries) { if (temp.serviceName.equals(entry.serviceName) && temp.providerName.equals(entry.providerName)) { return; } } entries.add(entry); } private void init() throws IOException { Enumeration<URL> providerlistUrls; if (loader == null) { providerlistUrls = ClassLoader.getSystemResources(PROVIDERLIST); } else { providerlistUrls = loader.getResources(PROVIDERLIST); } while (providerlistUrls.hasMoreElements()) { URL providerlistUrl = providerlistUrls.nextElement(); InputStream is = null; try { is = providerlistUrl.openStream(); register(is); } finally { StreamTools.close(is); } } } public <S> Iterator<S> lookupProviders(final Class<S> service) { return new Iterator<S>() { private ProviderEntry current; private String serviceName = service.getName(); private Iterator<ProviderEntry> it = entries.iterator(); @Override public boolean hasNext() { if (current != null) { return true; } while (it.hasNext()) { ProviderEntry temp = it.next(); if (serviceName.equals(temp.serviceName)) { current = temp; return true; } } return false; } @Override public S next() { if (!hasNext()) { throw new NoSuchElementException(); } ProviderEntry temp = current; current = null; if (temp.provider == null) { try { temp.provider = Class.forName(temp.providerName, true, loader).newInstance(); } catch (Throwable e) { Log.log(Level.FINEST, "loading provider failed", e); throw new ProviderConfigurationException("error loading " + temp.providerName, e); } } return (S) temp.provider; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } public void register(InputStream is) throws IOException { BufferedReader r = null; try { r = new BufferedReader(new InputStreamReader(is, "utf-8")); while (registerLine(r)) ; } finally { StreamTools.close(r); } } protected boolean registerLine(BufferedReader r) throws IOException { String ln = r.readLine(); if (ln == null) { return false; } int ci = ln.indexOf(COMMENT); if (ci >= 0) ln = ln.substring(0, ci); ln = ln.trim(); if (ln.length() == 0) { return true; } String[] parts = ln.split(SEPARATOR); if (parts.length < 2) { return true; } ProviderEntry entry = new ProviderEntry(); entry.serviceName = parts[0].trim(); entry.providerName = parts[1].trim(); addProvider(entry); return true; } }
/* * * * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com) * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * * For more information: http://www.orientechnologies.com * */ package com.orientechnologies.common.log; import com.orientechnologies.common.exception.OException; import com.orientechnologies.common.parser.OSystemVariableResolver; import com.orientechnologies.orient.core.db.ODatabaseDocumentInternal; import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal; import com.orientechnologies.orient.core.storage.impl.local.OAbstractPaginatedStorage; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.Locale; import java.util.logging.ConsoleHandler; import java.util.logging.FileHandler; import java.util.logging.Handler; import java.util.logging.Level; import java.util.logging.Logger; public class OLogManager { private static final String DEFAULT_LOG = "com.orientechnologies"; private static final String ENV_INSTALL_CUSTOM_FORMATTER = "orientdb.installCustomFormatter"; private static final OLogManager instance = new OLogManager(); private boolean debug = true; private boolean info = true; private boolean warn = true; private boolean error = true; private Level minimumLevel = Level.SEVERE; protected OLogManager() { } public static OLogManager instance() { return instance; } public static void installCustomFormatter() { final boolean installCustomFormatter = Boolean.parseBoolean(OSystemVariableResolver.resolveSystemVariables("${" + ENV_INSTALL_CUSTOM_FORMATTER + "}", "true")); if (!installCustomFormatter) return; try { // ASSURE TO HAVE THE ORIENT LOG FORMATTER TO THE CONSOLE EVEN IF NO CONFIGURATION FILE IS TAKEN final Logger log = Logger.getLogger(""); if (log.getHandlers().length == 0) { // SET DEFAULT LOG FORMATTER final Handler h = new ConsoleHandler(); h.setFormatter(new OLogFormatter()); log.addHandler(h); } else { for (Handler h : log.getHandlers()) { if (h instanceof ConsoleHandler && !h.getFormatter().getClass().equals(OLogFormatter.class)) h.setFormatter(new OLogFormatter()); } } } catch (Exception e) { System.err.println("Error while installing custom formatter. Logging could be disabled. Cause: " + e.toString()); } } public void setConsoleLevel(final String iLevel) { setLevel(iLevel, ConsoleHandler.class); } public void setFileLevel(final String iLevel) { setLevel(iLevel, FileHandler.class); } public void log(final Object iRequester, final Level iLevel, String iMessage, final Throwable iException, final Object... iAdditionalArgs) { if (iMessage != null) { try { final ODatabaseDocumentInternal db = ODatabaseRecordThreadLocal.INSTANCE != null ? ODatabaseRecordThreadLocal.INSTANCE .getIfDefined() : null; if (db != null && db.getStorage() != null && db.getStorage() instanceof OAbstractPaginatedStorage) { final String dbName = db.getStorage().getName(); if (dbName != null) iMessage = "{db=" + dbName + "} " + iMessage; } } catch (Throwable e) { } final Logger log = iRequester != null ? Logger.getLogger(iRequester.getClass().getName()) : Logger.getLogger(DEFAULT_LOG); if (log == null) { // USE SYSERR try { System.err.println(String.format(iMessage, iAdditionalArgs)); } catch (Exception e) { OLogManager.instance().warn(this, "Error on formatting message", e); } } else if (log.isLoggable(iLevel)) { // USE THE LOG try { final String msg = String.format(iMessage, iAdditionalArgs); if (iException != null) log.log(iLevel, msg, iException); else log.log(iLevel, msg); } catch (Exception e) { System.err.print(String.format("Error on formatting message '%s'. Exception: %s", iMessage, e.toString())); } } } } public void debug(final Object iRequester, final String iMessage, final Object... iAdditionalArgs) { if (isDebugEnabled()) log(iRequester, Level.FINE, iMessage, null, iAdditionalArgs); } public void debug(final Object iRequester, final String iMessage, final Throwable iException, final Object... iAdditionalArgs) { if (isDebugEnabled()) log(iRequester, Level.FINE, iMessage, iException, iAdditionalArgs); } public void debug(final Object iRequester, final String iMessage, final Throwable iException, final Class<? extends OException> iExceptionClass, final Object... iAdditionalArgs) { debug(iRequester, iMessage, iException, iAdditionalArgs); if (iExceptionClass != null) try { throw iExceptionClass.getConstructor(String.class, Throwable.class).newInstance(iMessage, iException); } catch (NoSuchMethodException e) { } catch (IllegalArgumentException e) { } catch (SecurityException e) { } catch (InstantiationException e) { } catch (IllegalAccessException e) { } catch (InvocationTargetException e) { } } public void info(final Object iRequester, final String iMessage, final Object... iAdditionalArgs) { if (isInfoEnabled()) log(iRequester, Level.INFO, iMessage, null, iAdditionalArgs); } public void info(final Object iRequester, final String iMessage, final Throwable iException, final Object... iAdditionalArgs) { if (isInfoEnabled()) log(iRequester, Level.INFO, iMessage, iException, iAdditionalArgs); } public void warn(final Object iRequester, final String iMessage, final Object... iAdditionalArgs) { if (isWarnEnabled()) log(iRequester, Level.WARNING, iMessage, null, iAdditionalArgs); } public void warn(final Object iRequester, final String iMessage, final Throwable iException, final Object... iAdditionalArgs) { if (isWarnEnabled()) log(iRequester, Level.WARNING, iMessage, iException, iAdditionalArgs); } public void config(final Object iRequester, final String iMessage, final Object... iAdditionalArgs) { log(iRequester, Level.CONFIG, iMessage, null, iAdditionalArgs); } public void error(final Object iRequester, final String iMessage, final Object... iAdditionalArgs) { log(iRequester, Level.SEVERE, iMessage, null, iAdditionalArgs); } public void error(final Object iRequester, final String iMessage, final Throwable iException, final Object... iAdditionalArgs) { if (isErrorEnabled()) log(iRequester, Level.SEVERE, iMessage, iException, iAdditionalArgs); } public void error(final Object iRequester, final String iMessage, final Throwable iException, final Class<? extends OException> iExceptionClass, final Object... iAdditionalArgs) { error(iRequester, iMessage, iException, iAdditionalArgs); final String msg = String.format(iMessage, iAdditionalArgs); if (iExceptionClass != null) try { throw iExceptionClass.getConstructor(String.class, Throwable.class).newInstance(msg, iException); } catch (NoSuchMethodException e) { } catch (IllegalArgumentException e) { } catch (SecurityException e) { } catch (InstantiationException e) { } catch (IllegalAccessException e) { } catch (InvocationTargetException e) { } } public void error(final Object iRequester, final String iMessage, final Class<? extends OException> iExceptionClass) { error(iRequester, iMessage, (Throwable) null); try { throw iExceptionClass.getConstructor(String.class).newInstance(iMessage); } catch (IllegalArgumentException e) { } catch (SecurityException e) { } catch (InstantiationException e) { } catch (IllegalAccessException e) { } catch (InvocationTargetException e) { } catch (NoSuchMethodException e) { } } @SuppressWarnings("unchecked") public void exception(final String iMessage, final Exception iNestedException, final Class<? extends OException> iExceptionClass, final Object... iAdditionalArgs) throws OException { if (iMessage == null) return; // FORMAT THE MESSAGE String msg = String.format(iMessage, iAdditionalArgs); Constructor<OException> c; OException exceptionToThrow = null; try { if (iNestedException != null) { c = (Constructor<OException>) iExceptionClass.getConstructor(String.class, Throwable.class); exceptionToThrow = c.newInstance(msg, iNestedException); } } catch (Exception e) { } if (exceptionToThrow == null) try { c = (Constructor<OException>) iExceptionClass.getConstructor(String.class); exceptionToThrow = c.newInstance(msg); } catch (SecurityException e1) { } catch (NoSuchMethodException e1) { } catch (IllegalArgumentException e1) { } catch (InstantiationException e1) { } catch (IllegalAccessException e1) { } catch (InvocationTargetException e1) { } if (exceptionToThrow != null) throw exceptionToThrow; else throw new IllegalArgumentException("Cannot create the exception of type: " + iExceptionClass); } public boolean isWarn() { return warn; } public boolean isLevelEnabled(final Level level) { if (level.equals(Level.FINER) || level.equals(Level.FINE) || level.equals(Level.FINEST)) return debug; else if (level.equals(Level.INFO)) return info; else if (level.equals(Level.WARNING)) return warn; else if (level.equals(Level.SEVERE)) return error; return false; } public boolean isDebugEnabled() { return debug; } public void setDebugEnabled(boolean debug) { this.debug = debug; } public boolean isInfoEnabled() { return info; } public void setInfoEnabled(boolean info) { this.info = info; } public boolean isWarnEnabled() { return warn; } public void setWarnEnabled(boolean warn) { this.warn = warn; } public boolean isErrorEnabled() { return error; } public void setErrorEnabled(boolean error) { this.error = error; } public Level setLevel(final String iLevel, final Class<? extends Handler> iHandler) { final Level level = iLevel != null ? Level.parse(iLevel.toUpperCase(Locale.ENGLISH)) : Level.INFO; if (level.intValue() < minimumLevel.intValue()) { // UPDATE MINIMUM LEVEL minimumLevel = level; if (level.equals(Level.FINER) || level.equals(Level.FINE) || level.equals(Level.FINEST)) debug = info = warn = error = true; else if (level.equals(Level.INFO)) { info = warn = error = true; debug = false; } else if (level.equals(Level.WARNING)) { warn = error = true; debug = info = false; } else if (level.equals(Level.SEVERE)) { error = true; debug = info = warn = false; } } Logger log = Logger.getLogger(DEFAULT_LOG); while (log != null) { log.setLevel(level); for (Handler h : log.getHandlers()) { if (h.getClass().isAssignableFrom(iHandler)) { h.setLevel(level); break; } } log = log.getParent(); } return level; } public void flush() { for (Handler h : Logger.getLogger(Logger.GLOBAL_LOGGER_NAME).getHandlers()) h.flush(); } }
/* * Copyright (c) 2010-2012. Axon Framework * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.axonframework.serializer.bson; import org.axonframework.serializer.Revision; import org.axonframework.serializer.SerializedObject; import org.joda.time.DateMidnight; import org.joda.time.DateTime; import org.joda.time.Period; import org.junit.*; import java.io.Serializable; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.EnumSet; import java.util.List; import java.util.Set; import static org.junit.Assert.*; /** * @author Allard Buijze */ public class DBObjectXStreamSerializerTest { private DBObjectXStreamSerializer testSubject; private static final String SPECIAL__CHAR__STRING = "Special chars: '\"&;\n\\<>/\n\t"; @Before public void setUp() { this.testSubject = new DBObjectXStreamSerializer(); } @Test public void testSerializeAndDeserializeDomainEventWithListOfObjects() { List<Object> objectList = new ArrayList<Object>(); objectList.add("a"); objectList.add(1L); objectList.add("b"); SerializedObject<String> serializedEvent = testSubject.serialize(new SecondTestEvent("eventName", objectList), String.class); Object actualResult = testSubject.deserialize(serializedEvent); assertTrue(actualResult instanceof SecondTestEvent); SecondTestEvent actualEvent = (SecondTestEvent) actualResult; assertEquals(objectList, actualEvent.getStrings()); } // Test for issue AXON-141 - BSONNode - marshalling EnumSet problem @Test public void testSerializeEnumSet() throws Exception { SerializedObject<String> serialized = testSubject.serialize(new TestEventWithEnumSet("testing123"), String.class); TestEventWithEnumSet actual = testSubject.deserialize(serialized); assertEquals("testing123", actual.getName()); assertEquals(EnumSet.of(TestEventWithEnumSet.SomeEnum.FIRST, TestEventWithEnumSet.SomeEnum.SECOND), actual.enumSet); } @Test public void testSerializeAndDeserializeDomainEvent() { SerializedObject<byte[]> serializedEvent = testSubject.serialize(new TestEvent("Henk"), byte[].class); Object actualResult = testSubject.deserialize(serializedEvent); assertTrue(actualResult instanceof TestEvent); TestEvent actualEvent = (TestEvent) actualResult; assertEquals("Henk", actualEvent.getName()); } @Test public void testPackageAlias() throws UnsupportedEncodingException { testSubject.addPackageAlias("test", "org.axonframework.serializer.bson"); testSubject.addPackageAlias("axon", "org.axonframework"); SerializedObject<String> serialized = testSubject.serialize(new StubDomainEvent(), String.class); String asString = serialized.getData(); assertFalse("Package name found in:" + asString, asString.contains("org")); StubDomainEvent deserialized = (StubDomainEvent) testSubject.deserialize(serialized); assertEquals(StubDomainEvent.class, deserialized.getClass()); assertTrue(asString.contains("test")); } @Test public void testAlias() throws UnsupportedEncodingException { testSubject.addAlias("stub", StubDomainEvent.class); SerializedObject<byte[]> serialized = testSubject.serialize(new StubDomainEvent(), byte[].class); String asString = new String(serialized.getData(), "UTF-8"); assertFalse(asString.contains("org.axonframework.domain")); assertTrue(asString.contains("\"stub")); StubDomainEvent deserialized = (StubDomainEvent) testSubject.deserialize(serialized); assertEquals(StubDomainEvent.class, deserialized.getClass()); } @Test public void testFieldAlias() throws UnsupportedEncodingException { testSubject.addFieldAlias("relevantPeriod", TestEvent.class, "period"); SerializedObject<byte[]> serialized = testSubject.serialize(new TestEvent("hello"), byte[].class); String asString = new String(serialized.getData(), "UTF-8"); assertFalse(asString.contains("period")); assertTrue(asString.contains("\"relevantPeriod")); TestEvent deserialized = (TestEvent) testSubject.deserialize(serialized); assertNotNull(deserialized); } @Test public void testRevisionNumber_FromAnnotation() throws UnsupportedEncodingException { SerializedObject<byte[]> serialized = testSubject.serialize(new RevisionSpecifiedEvent(), byte[].class); assertNotNull(serialized); assertEquals("2", serialized.getType().getRevision()); assertEquals(RevisionSpecifiedEvent.class.getName(), serialized.getType().getName()); } @Test public void testSerializedTypeUsesClassAlias() throws UnsupportedEncodingException { testSubject.addAlias("rse", RevisionSpecifiedEvent.class); SerializedObject<byte[]> serialized = testSubject.serialize(new RevisionSpecifiedEvent(), byte[].class); assertNotNull(serialized); assertEquals("2", serialized.getType().getRevision()); assertEquals("rse", serialized.getType().getName()); } /** * Tests the scenario as described in <a href="http://code.google.com/p/axonframework/issues/detail?id=150">issue * #150</a>. */ @Test public void testSerializeWithSpecialCharacters_WithoutUpcasters() { SerializedObject<byte[]> serialized = testSubject.serialize(new TestEvent(SPECIAL__CHAR__STRING), byte[].class); TestEvent deserialized = (TestEvent) testSubject.deserialize(serialized); assertEquals(SPECIAL__CHAR__STRING, deserialized.getName()); } @Revision("2") public static class RevisionSpecifiedEvent { } public static class SecondTestEvent extends TestEvent { private List<Object> objects; public SecondTestEvent(String name, List<Object> objects) { super(name); this.objects = new ArrayList<Object>(objects); } public List<Object> getStrings() { return objects; } } public static class TestEvent implements Serializable { private static final long serialVersionUID = 1L; private String name; private List<String> someListOfString; private DateMidnight date; private DateTime dateTime; private Period period; public TestEvent(String name) { this.name = name; this.date = new DateMidnight(); this.dateTime = new DateTime(); this.period = new Period(100); this.someListOfString = new ArrayList<String>(); someListOfString.add("First"); someListOfString.add("Second"); } public String getName() { return name; } } public static class TestEventWithEnumSet extends TestEvent { private Set<SomeEnum> enumSet; public TestEventWithEnumSet(String name) { super(name); enumSet = EnumSet.of(SomeEnum.FIRST, SomeEnum.SECOND); } private enum SomeEnum { FIRST, SECOND, THIRD; } } }
/* * Copyright (c) 2012-2014 Spotify AB * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.spotify.elasticsearch.plugins.indexwindow; import static org.elasticsearch.rest.RestRequest.Method.DELETE; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestRequest.Method.PUT; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.concurrent.TimeUnit; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.XContentRestResponse; import org.elasticsearch.rest.XContentThrowableRestResponse; import org.elasticsearch.rest.action.support.RestXContentBuilder; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; public class IndexWindowAction extends BaseRestHandler { private static final String PARAM_DELETE_INDEX = "delete_index"; private static final String PARAM_CHECK_INTERVAL = "check_interval"; private static final String PARAM_KEEP = "keep"; private static final String PARAM_DATE_FORMAT = "date_format"; private static final String PARAM_INDEX_PREFIX = "index_prefix"; private static final TimeValue DEFAULT_PARAM_CHECK_INTERVAL = new TimeValue( 30, TimeUnit.MINUTES); private static final int DEFAULT_PARAM_KEEP = 7; private static final String DEFAULT_PARAM_DATE_FORMAT = "yyyy.MM.dd"; private static final long LOAD_RETRY_INTERVAL = 20000; private static final long LOAD_TIMEOUT = 200000; private static final String META_TYPE = "window"; private static final String META_INDEX = "index-window"; private Map<String, IndexWindowRunner> activeWindows; private final ObjectMapper mapper; @Inject public IndexWindowAction(Settings settings, Client client, RestController controller) { super(settings, client); controller.registerHandler(PUT, "/" + META_INDEX, this); controller.registerHandler(POST, "/" + META_INDEX, this); controller.registerHandler(DELETE, "/" + META_INDEX + "/{" + PARAM_DELETE_INDEX + "}", this); mapper = new ObjectMapper(); loadActiveWindows(); } @Override public void handleRequest(RestRequest request, RestChannel channel) { switch (request.method()) { case DELETE: handleDeleteRequest(request, channel); break; default: handleCreateRequest(request, channel); break; } } /** * This method is called when an index window is going to be created, or * updated. */ private void handleCreateRequest(RestRequest request, RestChannel channel) { try { final String indexPrefix = request.param(PARAM_INDEX_PREFIX, ""); if (indexPrefix.isEmpty()) { respondBadRequest(request, channel, PARAM_INDEX_PREFIX + " missing"); return; } final String format = request.param(PARAM_DATE_FORMAT, DEFAULT_PARAM_DATE_FORMAT); try { // Try to parse the provided date format new SimpleDateFormat(format); } catch (final Exception e) { respondBadRequest(request, channel, "invalid " + PARAM_DATE_FORMAT); return; } final Integer keep = request.paramAsInt(PARAM_KEEP, DEFAULT_PARAM_KEEP); if (keep < 0) { respondBadRequest(request, channel, PARAM_KEEP + " cannot be negative"); return; } TimeValue checkInterval = null; try { checkInterval = request.paramAsTime(PARAM_CHECK_INTERVAL, DEFAULT_PARAM_CHECK_INTERVAL); } catch (final Exception e) { respondBadRequest(request, channel, "invalid " + PARAM_CHECK_INTERVAL); return; } final IndexWindow window = new IndexWindow(indexPrefix, format, keep, checkInterval.millis()); addOrReplaceWindow(window); final XContentBuilder builder = RestXContentBuilder .restContentBuilder(request); builder.startObject(); builder.field("acknowledge", true); builder.field("source", window); builder.endObject(); channel.sendResponse(new XContentRestResponse(request, RestStatus.OK, builder)); } catch (final IOException e) { try { channel.sendResponse(new XContentThrowableRestResponse(request, e)); } catch (final Exception ex) { logger.error("Unknown problem occurred", ex); } } } private void handleDeleteRequest(RestRequest request, RestChannel channel) { if (request.hasParam(PARAM_DELETE_INDEX)) { final String deleteIndex = request.param(PARAM_DELETE_INDEX); final boolean found = removeWindow(deleteIndex); if (found) { logger.info("index window removed: " + deleteIndex); } try { final XContentBuilder builder = RestXContentBuilder .restContentBuilder(request); builder.startObject(); builder.field("acknowledge", true); builder.field("found", found); builder.field("deleted_index", deleteIndex); builder.endObject(); channel.sendResponse(new XContentRestResponse(request, RestStatus.OK, builder)); } catch (final IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } else { try { respondBadRequest(request, channel, "invalid request"); } catch (final IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } return; } private void respondBadRequest(RestRequest request, RestChannel channel, String message) throws IOException { final XContentBuilder builder = RestXContentBuilder .restContentBuilder(request); builder.startObject(); builder.field("error", message); builder.endObject(); channel.sendResponse(new XContentRestResponse(request, RestStatus.BAD_REQUEST, builder)); } /** * Loads index windows configurations from elasticsearch and starts their * runners. */ private void loadActiveWindows() { new Thread(new Runnable() { @Override public void run() { boolean loadSuccess = false; final long loadStart = System.currentTimeMillis(); while (!loadSuccess && (System.currentTimeMillis() - loadStart) <= LOAD_TIMEOUT) { try { Thread.sleep(LOAD_RETRY_INTERVAL); } catch (final InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.info("Trying to load active index windows..."); loadSuccess = doLoadActiveWindows(); if (!loadSuccess) { logger.info("Failed to load active index windows... maybe the shards are not loaded yet..."); } else { logger.info("Loading index windows successful!"); } } if (!loadSuccess) { logger.warn("Giving up on loading active index windows... Either there is not window to load, " + "or the cluster is taking too long time to start up. Timeout: " + LOAD_TIMEOUT); } } }).start(); } private boolean doLoadActiveWindows() { clearActiveWindows(); activeWindows = new HashMap<String, IndexWindowRunner>(); SearchHits hits = null; try { hits = client.prepareSearch(META_INDEX).execute().actionGet() .getHits(); } catch (final Exception e) { return false; } final Iterator<SearchHit> iterator = hits.iterator(); while (iterator.hasNext()) { final SearchHit hit = iterator.next(); final String json = hit.getSourceAsString(); try { final IndexWindow window = mapper.readValue(json, IndexWindow.class); activeWindows.put(window.getIndexPrefix(), new IndexWindowRunner(client, window)); } catch (final JsonParseException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (final JsonMappingException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (final IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } return true; } private void clearActiveWindows() { if (activeWindows == null) { return; } final Collection<IndexWindowRunner> windows = activeWindows.values(); for (final IndexWindowRunner w : windows) { w.close(); } activeWindows.clear(); } private void addOrReplaceWindow(IndexWindow indexWindow) { removeWindow(indexWindow.getIndexPrefix()); writeToElasticsearch(indexWindow); activeWindows.put(indexWindow.getIndexPrefix(), new IndexWindowRunner( client, indexWindow)); } private void writeToElasticsearch(IndexWindow indexWindow) { String json = null; try { json = mapper.writeValueAsString(indexWindow); client.prepareIndex(META_INDEX, META_TYPE, indexWindow.getIndexPrefix()).setSource(json).execute() .actionGet(); } catch (final JsonProcessingException e) { e.printStackTrace(); } } private boolean removeWindow(String indexPrefix) { boolean found = false; client.prepareDelete(META_INDEX, META_TYPE, indexPrefix).execute() .actionGet(); final IndexWindowRunner removedWindow = activeWindows .remove(indexPrefix); if (removedWindow != null) { removedWindow.close(); found = true; } return found; } }
package org.hl7.fhir.instance.model.valuesets; /* Copyright (c) 2011+, HL7, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of HL7 nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ // Generated on Sat, Aug 22, 2015 23:00-0400 for FHIR v0.5.0 import org.hl7.fhir.instance.model.EnumFactory; public class V3VaccineManufacturerEnumFactory implements EnumFactory<V3VaccineManufacturer> { public V3VaccineManufacturer fromCode(String codeString) throws IllegalArgumentException { if (codeString == null || "".equals(codeString)) return null; if ("AB".equals(codeString)) return V3VaccineManufacturer.AB; if ("AD".equals(codeString)) return V3VaccineManufacturer.AD; if ("ALP".equals(codeString)) return V3VaccineManufacturer.ALP; if ("AR".equals(codeString)) return V3VaccineManufacturer.AR; if ("AVI".equals(codeString)) return V3VaccineManufacturer.AVI; if ("BA".equals(codeString)) return V3VaccineManufacturer.BA; if ("BAY".equals(codeString)) return V3VaccineManufacturer.BAY; if ("BP".equals(codeString)) return V3VaccineManufacturer.BP; if ("BPC".equals(codeString)) return V3VaccineManufacturer.BPC; if ("CEN".equals(codeString)) return V3VaccineManufacturer.CEN; if ("CHI".equals(codeString)) return V3VaccineManufacturer.CHI; if ("CON".equals(codeString)) return V3VaccineManufacturer.CON; if ("EVN".equals(codeString)) return V3VaccineManufacturer.EVN; if ("GRE".equals(codeString)) return V3VaccineManufacturer.GRE; if ("IAG".equals(codeString)) return V3VaccineManufacturer.IAG; if ("IM".equals(codeString)) return V3VaccineManufacturer.IM; if ("IUS".equals(codeString)) return V3VaccineManufacturer.IUS; if ("JPN".equals(codeString)) return V3VaccineManufacturer.JPN; if ("KGC".equals(codeString)) return V3VaccineManufacturer.KGC; if ("LED".equals(codeString)) return V3VaccineManufacturer.LED; if ("MA".equals(codeString)) return V3VaccineManufacturer.MA; if ("MED".equals(codeString)) return V3VaccineManufacturer.MED; if ("MIL".equals(codeString)) return V3VaccineManufacturer.MIL; if ("MIP".equals(codeString)) return V3VaccineManufacturer.MIP; if ("MSD".equals(codeString)) return V3VaccineManufacturer.MSD; if ("NAB".equals(codeString)) return V3VaccineManufacturer.NAB; if ("NAV".equals(codeString)) return V3VaccineManufacturer.NAV; if ("NOV".equals(codeString)) return V3VaccineManufacturer.NOV; if ("NYB".equals(codeString)) return V3VaccineManufacturer.NYB; if ("ORT".equals(codeString)) return V3VaccineManufacturer.ORT; if ("OTC".equals(codeString)) return V3VaccineManufacturer.OTC; if ("PD".equals(codeString)) return V3VaccineManufacturer.PD; if ("PMC".equals(codeString)) return V3VaccineManufacturer.PMC; if ("PRX".equals(codeString)) return V3VaccineManufacturer.PRX; if ("SCL".equals(codeString)) return V3VaccineManufacturer.SCL; if ("SI".equals(codeString)) return V3VaccineManufacturer.SI; if ("SKB".equals(codeString)) return V3VaccineManufacturer.SKB; if ("USA".equals(codeString)) return V3VaccineManufacturer.USA; if ("WA".equals(codeString)) return V3VaccineManufacturer.WA; if ("WAL".equals(codeString)) return V3VaccineManufacturer.WAL; throw new IllegalArgumentException("Unknown V3VaccineManufacturer code '"+codeString+"'"); } public String toCode(V3VaccineManufacturer code) { if (code == V3VaccineManufacturer.AB) return "AB"; if (code == V3VaccineManufacturer.AD) return "AD"; if (code == V3VaccineManufacturer.ALP) return "ALP"; if (code == V3VaccineManufacturer.AR) return "AR"; if (code == V3VaccineManufacturer.AVI) return "AVI"; if (code == V3VaccineManufacturer.BA) return "BA"; if (code == V3VaccineManufacturer.BAY) return "BAY"; if (code == V3VaccineManufacturer.BP) return "BP"; if (code == V3VaccineManufacturer.BPC) return "BPC"; if (code == V3VaccineManufacturer.CEN) return "CEN"; if (code == V3VaccineManufacturer.CHI) return "CHI"; if (code == V3VaccineManufacturer.CON) return "CON"; if (code == V3VaccineManufacturer.EVN) return "EVN"; if (code == V3VaccineManufacturer.GRE) return "GRE"; if (code == V3VaccineManufacturer.IAG) return "IAG"; if (code == V3VaccineManufacturer.IM) return "IM"; if (code == V3VaccineManufacturer.IUS) return "IUS"; if (code == V3VaccineManufacturer.JPN) return "JPN"; if (code == V3VaccineManufacturer.KGC) return "KGC"; if (code == V3VaccineManufacturer.LED) return "LED"; if (code == V3VaccineManufacturer.MA) return "MA"; if (code == V3VaccineManufacturer.MED) return "MED"; if (code == V3VaccineManufacturer.MIL) return "MIL"; if (code == V3VaccineManufacturer.MIP) return "MIP"; if (code == V3VaccineManufacturer.MSD) return "MSD"; if (code == V3VaccineManufacturer.NAB) return "NAB"; if (code == V3VaccineManufacturer.NAV) return "NAV"; if (code == V3VaccineManufacturer.NOV) return "NOV"; if (code == V3VaccineManufacturer.NYB) return "NYB"; if (code == V3VaccineManufacturer.ORT) return "ORT"; if (code == V3VaccineManufacturer.OTC) return "OTC"; if (code == V3VaccineManufacturer.PD) return "PD"; if (code == V3VaccineManufacturer.PMC) return "PMC"; if (code == V3VaccineManufacturer.PRX) return "PRX"; if (code == V3VaccineManufacturer.SCL) return "SCL"; if (code == V3VaccineManufacturer.SI) return "SI"; if (code == V3VaccineManufacturer.SKB) return "SKB"; if (code == V3VaccineManufacturer.USA) return "USA"; if (code == V3VaccineManufacturer.WA) return "WA"; if (code == V3VaccineManufacturer.WAL) return "WAL"; return "?"; } }
/* * Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ // // SunJSSE does not support dynamic system properties, no way to re-use // system properties in samevm/agentvm mode. // /* * @test * @bug 7174244 * @summary NPE in Krb5ProxyImpl.getServerKeys() * @run main/othervm CipherSuitesInOrder */ import java.util.*; import javax.net.ssl.*; public class CipherSuitesInOrder { // supported ciphersuites private final static List<String> supportedCipherSuites = Arrays.<String>asList( "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384", "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384", "TLS_RSA_WITH_AES_256_CBC_SHA256", "TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA384", "TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384", "TLS_DHE_RSA_WITH_AES_256_CBC_SHA256", "TLS_DHE_DSS_WITH_AES_256_CBC_SHA256", "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA", "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA", "TLS_RSA_WITH_AES_256_CBC_SHA", "TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA", "TLS_ECDH_RSA_WITH_AES_256_CBC_SHA", "TLS_DHE_RSA_WITH_AES_256_CBC_SHA", "TLS_DHE_DSS_WITH_AES_256_CBC_SHA", "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256", "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256", "TLS_RSA_WITH_AES_128_CBC_SHA256", "TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA256", "TLS_ECDH_RSA_WITH_AES_128_CBC_SHA256", "TLS_DHE_RSA_WITH_AES_128_CBC_SHA256", "TLS_DHE_DSS_WITH_AES_128_CBC_SHA256", "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA", "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA", "TLS_RSA_WITH_AES_128_CBC_SHA", "TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA", "TLS_ECDH_RSA_WITH_AES_128_CBC_SHA", "TLS_DHE_RSA_WITH_AES_128_CBC_SHA", "TLS_DHE_DSS_WITH_AES_128_CBC_SHA", "TLS_ECDHE_ECDSA_WITH_RC4_128_SHA", "TLS_ECDHE_RSA_WITH_RC4_128_SHA", "SSL_RSA_WITH_RC4_128_SHA", "TLS_ECDH_ECDSA_WITH_RC4_128_SHA", "TLS_ECDH_RSA_WITH_RC4_128_SHA", "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", "TLS_RSA_WITH_AES_256_GCM_SHA384", "TLS_ECDH_ECDSA_WITH_AES_256_GCM_SHA384", "TLS_ECDH_RSA_WITH_AES_256_GCM_SHA384", "TLS_DHE_RSA_WITH_AES_256_GCM_SHA384", "TLS_DHE_DSS_WITH_AES_256_GCM_SHA384", "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", "TLS_RSA_WITH_AES_128_GCM_SHA256", "TLS_ECDH_ECDSA_WITH_AES_128_GCM_SHA256", "TLS_ECDH_RSA_WITH_AES_128_GCM_SHA256", "TLS_DHE_RSA_WITH_AES_128_GCM_SHA256", "TLS_DHE_DSS_WITH_AES_128_GCM_SHA256", "TLS_ECDHE_ECDSA_WITH_3DES_EDE_CBC_SHA", "TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA", "SSL_RSA_WITH_3DES_EDE_CBC_SHA", "TLS_ECDH_ECDSA_WITH_3DES_EDE_CBC_SHA", "TLS_ECDH_RSA_WITH_3DES_EDE_CBC_SHA", "SSL_DHE_RSA_WITH_3DES_EDE_CBC_SHA", "SSL_DHE_DSS_WITH_3DES_EDE_CBC_SHA", "SSL_RSA_WITH_RC4_128_MD5", "TLS_EMPTY_RENEGOTIATION_INFO_SCSV", "TLS_DH_anon_WITH_AES_256_GCM_SHA384", "TLS_DH_anon_WITH_AES_128_GCM_SHA256", "TLS_DH_anon_WITH_AES_256_CBC_SHA256", "TLS_ECDH_anon_WITH_AES_256_CBC_SHA", "TLS_DH_anon_WITH_AES_256_CBC_SHA", "TLS_DH_anon_WITH_AES_128_CBC_SHA256", "TLS_ECDH_anon_WITH_AES_128_CBC_SHA", "TLS_DH_anon_WITH_AES_128_CBC_SHA", "TLS_ECDH_anon_WITH_RC4_128_SHA", "SSL_DH_anon_WITH_RC4_128_MD5", "TLS_ECDH_anon_WITH_3DES_EDE_CBC_SHA", "SSL_DH_anon_WITH_3DES_EDE_CBC_SHA", "TLS_RSA_WITH_NULL_SHA256", "TLS_ECDHE_ECDSA_WITH_NULL_SHA", "TLS_ECDHE_RSA_WITH_NULL_SHA", "SSL_RSA_WITH_NULL_SHA", "TLS_ECDH_ECDSA_WITH_NULL_SHA", "TLS_ECDH_RSA_WITH_NULL_SHA", "TLS_ECDH_anon_WITH_NULL_SHA", "SSL_RSA_WITH_NULL_MD5", "SSL_RSA_WITH_DES_CBC_SHA", "SSL_DHE_RSA_WITH_DES_CBC_SHA", "SSL_DHE_DSS_WITH_DES_CBC_SHA", "SSL_DH_anon_WITH_DES_CBC_SHA", "SSL_RSA_EXPORT_WITH_RC4_40_MD5", "SSL_DH_anon_EXPORT_WITH_RC4_40_MD5", "SSL_RSA_EXPORT_WITH_DES40_CBC_SHA", "SSL_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA", "SSL_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA", "SSL_DH_anon_EXPORT_WITH_DES40_CBC_SHA", "TLS_KRB5_WITH_RC4_128_SHA", "TLS_KRB5_WITH_RC4_128_MD5", "TLS_KRB5_WITH_3DES_EDE_CBC_SHA", "TLS_KRB5_WITH_3DES_EDE_CBC_MD5", "TLS_KRB5_WITH_DES_CBC_SHA", "TLS_KRB5_WITH_DES_CBC_MD5", "TLS_KRB5_EXPORT_WITH_RC4_40_SHA", "TLS_KRB5_EXPORT_WITH_RC4_40_MD5", "TLS_KRB5_EXPORT_WITH_DES_CBC_40_SHA", "TLS_KRB5_EXPORT_WITH_DES_CBC_40_MD5" ); private final static String[] protocols = { "", "SSL", "TLS", "SSLv3", "TLSv1", "TLSv1.1", "TLSv1.2" }; public static void main(String[] args) throws Exception { // show all of the supported cipher suites showSuites(supportedCipherSuites.toArray(new String[0]), "All supported cipher suites"); for (String protocol : protocols) { System.out.println("//"); System.out.println("// " + "Testing for SSLContext of " + protocol); System.out.println("//"); checkForProtocols(protocol); } } public static void checkForProtocols(String protocol) throws Exception { SSLContext context; if (protocol.isEmpty()) { context = SSLContext.getDefault(); } else { context = SSLContext.getInstance(protocol); context.init(null, null, null); } // check the order of default cipher suites of SSLContext SSLParameters parameters = context.getDefaultSSLParameters(); checkSuites(parameters.getCipherSuites(), "Default cipher suites in SSLContext"); // check the order of supported cipher suites of SSLContext parameters = context.getSupportedSSLParameters(); checkSuites(parameters.getCipherSuites(), "Supported cipher suites in SSLContext"); // // Check the cipher suites order of SSLEngine // SSLEngine engine = context.createSSLEngine(); // check the order of endabled cipher suites String[] ciphers = engine.getEnabledCipherSuites(); checkSuites(ciphers, "Enabled cipher suites in SSLEngine"); // check the order of supported cipher suites ciphers = engine.getSupportedCipherSuites(); checkSuites(ciphers, "Supported cipher suites in SSLEngine"); // // Check the cipher suites order of SSLSocket // SSLSocketFactory factory = context.getSocketFactory(); try (SSLSocket socket = (SSLSocket)factory.createSocket()) { // check the order of endabled cipher suites ciphers = socket.getEnabledCipherSuites(); checkSuites(ciphers, "Enabled cipher suites in SSLSocket"); // check the order of supported cipher suites ciphers = socket.getSupportedCipherSuites(); checkSuites(ciphers, "Supported cipher suites in SSLSocket"); } // // Check the cipher suites order of SSLServerSocket // SSLServerSocketFactory serverFactory = context.getServerSocketFactory(); try (SSLServerSocket serverSocket = (SSLServerSocket)serverFactory.createServerSocket()) { // check the order of endabled cipher suites ciphers = serverSocket.getEnabledCipherSuites(); checkSuites(ciphers, "Enabled cipher suites in SSLServerSocket"); // check the order of supported cipher suites ciphers = serverSocket.getSupportedCipherSuites(); checkSuites(ciphers, "Supported cipher suites in SSLServerSocket"); } } private static void checkSuites(String[] suites, String title) { showSuites(suites, title); int loc = -1; int index = 0; for (String suite : suites) { index = supportedCipherSuites.indexOf(suite); if (index <= loc) { throw new RuntimeException(suite + " is not in order"); } loc = index; } } private static void showSuites(String[] suites, String title) { System.out.println(title + "[" + suites.length + "]:"); for (String suite : suites) { System.out.println(" " + suite); } } }
/* * The MIT License * * Copyright 2016 CloudBees, Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.jenkinsci.plugins.pipeline.maven; import com.cloudbees.hudson.plugins.folder.Folder; import hudson.model.Fingerprint; import hudson.model.Result; import hudson.plugins.jacoco.JacocoBuildAction; import hudson.plugins.tasks.TasksResultAction; import hudson.tasks.Fingerprinter; import hudson.tasks.junit.TestResultAction; import jenkins.mvn.FilePathGlobalSettingsProvider; import jenkins.mvn.FilePathSettingsProvider; import jenkins.mvn.GlobalMavenConfig; import org.apache.commons.io.FileUtils; import org.jenkinsci.Symbol; import org.jenkinsci.plugins.configfiles.GlobalConfigFiles; import org.jenkinsci.plugins.configfiles.maven.GlobalMavenSettingsConfig; import org.jenkinsci.plugins.configfiles.maven.MavenSettingsConfig; import org.jenkinsci.plugins.configfiles.maven.job.MvnGlobalSettingsProvider; import org.jenkinsci.plugins.configfiles.maven.job.MvnSettingsProvider; import org.jenkinsci.plugins.pipeline.maven.publishers.FindbugsAnalysisPublisher; import org.jenkinsci.plugins.pipeline.maven.publishers.GeneratedArtifactsPublisher; import org.jenkinsci.plugins.pipeline.maven.publishers.JunitTestsPublisher; import org.jenkinsci.plugins.pipeline.maven.publishers.TasksScannerPublisher; import org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition; import org.jenkinsci.plugins.workflow.job.WorkflowJob; import org.jenkinsci.plugins.workflow.job.WorkflowRun; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.jvnet.hudson.test.Issue; import java.io.File; import java.nio.charset.StandardCharsets; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import static org.hamcrest.CoreMatchers.hasItems; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; /** * TODO migrate to {@link WithMavenStepTest} once we have implemented a GitRepoRule that can be used on remote agents */ public class WithMavenStepOnMasterTest extends AbstractIntegrationTest { Logger logger; Level savedLevel; @Before public void before() { // Many log messages checked here are not logged if we are not in FINE level. logger = Logger.getLogger(WithMavenStepExecution2.class.getName()); savedLevel = logger.getLevel(); logger.setLevel(Level.FINE); } @After public void after() { logger.setLevel(savedLevel); } @Test public void maven_build_on_master_with_specified_maven_installation_succeeds() throws Exception { loadMavenJarProjectInGitRepo(this.gitRepoRule); String pipelineScript = "node('master') {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven(maven: 'apache-maven-3.6.3') {\n" + " sh 'mvn package'\n" + " }\n" + "}"; WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master-with-tool-provided-maven"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); // verify provided Maven is used jenkinsRule.assertLogContains("using Maven installation 'apache-maven-3.6.3'", build); // verify .pom is archived and fingerprinted // "[withMaven] Archive ... under jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.pom" jenkinsRule.assertLogContains("under jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.pom", build); // verify .jar is archived and fingerprinted jenkinsRule.assertLogContains("under jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.jar", build); } @Test public void maven_build_on_master_with_missing_specified_maven_installation_fails() throws Exception { loadMavenJarProjectInGitRepo(this.gitRepoRule); String pipelineScript = "node('master') {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven(maven: 'install-does-not-exist') {\n" + " sh 'mvn package'\n" + " }\n" + "}"; WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master-with-tool-provided-maven"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.FAILURE, pipeline.scheduleBuild2(0)); } @Test public void maven_build_jar_project_on_master_succeeds() throws Exception { loadMavenJarProjectInGitRepo(this.gitRepoRule); String pipelineScript = "node('master') {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven() {\n" + " sh 'mvn package verify'\n" + " }\n" + "}"; WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); // verify Maven installation provided by the build agent is used // can be either "by the build agent with executable..." or "by the build agent with the environment variable MAVEN_HOME=..." jenkinsRule.assertLogContains("[withMaven] using Maven installation provided by the build agent with", build); // verify .pom is archived and fingerprinted // "[withMaven] Archive ... under jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.pom" jenkinsRule.assertLogContains("under jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.pom", build); // verify .jar is archived and fingerprinted jenkinsRule.assertLogContains("under jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.jar", build); Collection<String> artifactsFileNames = TestUtils.artifactsToArtifactsFileNames(build.getArtifacts()); assertThat(artifactsFileNames, hasItems("mono-module-maven-app-0.1-SNAPSHOT.pom", "mono-module-maven-app-0.1-SNAPSHOT.jar")); verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.jar"); verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.pom"); // verify Junit Archiver is called for maven-surefire-plugin jenkinsRule.assertLogContains("[withMaven] junitPublisher - Archive test results for Maven artifact jenkins.mvn.test:mono-module-maven-app:jar:0.1-SNAPSHOT " + "generated by maven-surefire-plugin:test", build); TestResultAction testResultAction = build.getAction(TestResultAction.class); assertThat(testResultAction.getTotalCount(), is(2)); assertThat(testResultAction.getFailCount(), is(0)); // verify Junit Archiver is called for maven-failsafe-plugin jenkinsRule.assertLogContains("[withMaven] junitPublisher - Archive test results for Maven artifact jenkins.mvn.test:mono-module-maven-app:jar:0.1-SNAPSHOT " + "generated by maven-failsafe-plugin:integration-test", build); // verify Task Scanner is called for jenkins.mvn.test:mono-module-maven-app jenkinsRule.assertLogContains("[withMaven] openTasksPublisher - Scan Tasks for Maven artifact jenkins.mvn.test:mono-module-maven-app:jar:0.1-SNAPSHOT", build); TasksResultAction tasksResultAction = build.getAction(TasksResultAction.class); assertThat(tasksResultAction.getProjectActions().size(), is(1)); } @Test public void maven_build_jar_with_jacoco_succeeds() throws Exception { loadMavenJarWithJacocoInGitRepo(this.gitRepoRule); String pipelineScript = "node('master') {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven() {\n" + " sh 'mvn package verify'\n" + " }\n" + "}"; WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "jar-with-jacoco"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); Collection<String> artifactsFileNames = TestUtils.artifactsToArtifactsFileNames(build.getArtifacts()); assertThat(artifactsFileNames, hasItems("jar-with-jacoco-0.1-SNAPSHOT.pom", "jar-with-jacoco-0.1-SNAPSHOT.jar")); verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/jar-with-jacoco/0.1-SNAPSHOT/jar-with-jacoco-0.1-SNAPSHOT.jar"); verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/jar-with-jacoco/0.1-SNAPSHOT/jar-with-jacoco-0.1-SNAPSHOT.pom"); List<TestResultAction> testResultActions = build.getActions(TestResultAction.class); assertThat(testResultActions.size(), is(1)); TestResultAction testResultAction = testResultActions.get(0); assertThat(testResultAction.getTotalCount(), is(2)); assertThat(testResultAction.getFailCount(), is(0)); List<JacocoBuildAction> jacocoBuildActions = build.getActions(JacocoBuildAction.class); assertThat(jacocoBuildActions.size(), is(1)); JacocoBuildAction jacocoBuildAction = jacocoBuildActions.get(0); assertThat(jacocoBuildAction.getProjectActions().size(), is(1)); } @Issue("JENKINS-48264") @Test public void maven_build_jar_project_with_whitespace_char_in_name() throws Exception { loadMavenJarProjectInGitRepo(this.gitRepoRule); String pipelineScript = "node('master') {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven() {\n" + " sh 'mvn help:effective-settings'\n" + " }\n" + "}"; String mavenSettings = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<settings \n" + " xmlns='http://maven.apache.org/SETTINGS/1.0.0'\n" + " xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance'\n" + " xsi:schemaLocation='http://maven.apache.org/SETTINGS/1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd'>\n" + " <servers>\n" + " <server>\n" + " <id>id-settings-test-through-config-file-provider</id>\n" + " </server>\n" + " </servers>\n" + "</settings>\n"; MavenSettingsConfig mavenSettingsConfig = new MavenSettingsConfig("maven-config-test", "maven-config-test", "", mavenSettings, false, null); GlobalConfigFiles.get().save(mavenSettingsConfig); GlobalMavenConfig.get().setSettingsProvider(new MvnSettingsProvider(mavenSettingsConfig.id)); try { WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build on master with spaces"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); jenkinsRule.assertLogContains("[withMaven] using Maven settings provided by the Jenkins global configuration", build); jenkinsRule.assertLogContains("<id>id-settings-test-through-config-file-provider</id>", build); } finally { GlobalMavenConfig.get().setSettingsProvider(null); } } @Test public void maven_build_jar_project_on_master_disable_findbugs_publisher_succeeds() throws Exception { maven_build_jar_project_on_master_with_disabled_publisher_param_succeeds(new FindbugsAnalysisPublisher.DescriptorImpl(), "findbugsPublisher", true); } @Test public void maven_build_jar_project_on_master_disable_tasks_publisher_succeeds() throws Exception { maven_build_jar_project_on_master_with_disabled_publisher_param_succeeds(new TasksScannerPublisher.DescriptorImpl(), "openTasksPublisher", true); } @Test public void maven_build_jar_project_on_master_disable_junit_publisher_succeeds() throws Exception { maven_build_jar_project_on_master_with_disabled_publisher_param_succeeds(new JunitTestsPublisher.DescriptorImpl(), "junitPublisher", true); } @Test public void maven_build_jar_project_on_master_disable_generated_artifacts_publisher_succeeds() throws Exception { maven_build_jar_project_on_master_with_disabled_publisher_param_succeeds(new GeneratedArtifactsPublisher.DescriptorImpl(), "artifactsPublisher", true); } @Test public void maven_build_jar_project_on_master_force_enable_findbugs_publisher_succeeds() throws Exception { maven_build_jar_project_on_master_with_disabled_publisher_param_succeeds(new FindbugsAnalysisPublisher.DescriptorImpl(), "findbugsPublisher", false); } @Test public void maven_build_jar_project_on_master_force_enable_tasks_publisher_succeeds() throws Exception { maven_build_jar_project_on_master_with_disabled_publisher_param_succeeds(new TasksScannerPublisher.DescriptorImpl(), "openTasksPublisher", false); } @Test public void maven_build_jar_project_on_master_force_enable_junit_publisher_succeeds() throws Exception { maven_build_jar_project_on_master_with_disabled_publisher_param_succeeds(new JunitTestsPublisher.DescriptorImpl(), "junitPublisher", false); } @Test public void maven_build_jar_project_on_master_force_enable_generated_artifacts_publisher_succeeds() throws Exception { maven_build_jar_project_on_master_with_disabled_publisher_param_succeeds(new GeneratedArtifactsPublisher.DescriptorImpl(), "artifactsPublisher", false); } private void maven_build_jar_project_on_master_with_disabled_publisher_param_succeeds(MavenPublisher.DescriptorImpl descriptor, String symbol, boolean disabled) throws Exception { Logger logger = Logger.getLogger(MavenSpyLogProcessor.class.getName()); Level level = logger.getLevel(); logger.setLevel(Level.FINE); try { String displayName = descriptor.getDisplayName(); Symbol symbolAnnotation = descriptor.getClass().getAnnotation(Symbol.class); String[] symbols = symbolAnnotation.value(); assertThat(new String[]{symbol}, is(symbols)); loadMavenJarProjectInGitRepo(this.gitRepoRule); String pipelineScript = "node('master') {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven(options:[" + symbol + "(disabled:" + disabled + ")]) {\n" + " sh 'mvn package verify'\n" + " }\n" + "}"; WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master-" + symbol + "-publisher-disabled-" + disabled); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); String message = "[withMaven] Skip '" + displayName + "' disabled by configuration"; if (disabled) { jenkinsRule.assertLogContains(message, build); } else { jenkinsRule.assertLogNotContains(message, build); } } finally { logger.setLevel(level); } } @Test public void maven_build_jar_project_on_master_with_open_task_scanner_config_succeeds() throws Exception { MavenPublisher.DescriptorImpl descriptor = new TasksScannerPublisher.DescriptorImpl(); String displayName = descriptor.getDisplayName(); Symbol symbolAnnotation = descriptor.getClass().getAnnotation(Symbol.class); String[] symbols = symbolAnnotation.value(); assertThat(new String[]{"openTasksPublisher"}, is(symbols)); loadMavenJarProjectInGitRepo(this.gitRepoRule); String pipelineScript = "node('master') {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven(options:[openTasksPublisher(" + " disabled:false, " + " pattern:'src/main/java', excludePattern:'a/path'," + " ignoreCase:true, asRegexp:false, " + " lowPriorityTaskIdentifiers:'minor', normalPriorityTaskIdentifiers:'todo', highPriorityTaskIdentifiers:'fixme')]) {\n" + " sh 'mvn package verify'\n" + " }\n" + "}"; WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master-openTasksPublisher-publisher-config"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); String message = "[withMaven] Skip '" + displayName + "' disabled by configuration"; jenkinsRule.assertLogNotContains(message, build); } @Test public void maven_build_maven_jar_with_flatten_pom_project_on_master_succeeds() throws Exception { loadMavenJarWithFlattenPomProjectInGitRepo(this.gitRepoRule); String pipelineScript = "node('master') {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven() {\n" + " sh 'mvn package'\n" + " }\n" + "}"; WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-jar-with-flatten-pom-project-on-master"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); jenkinsRule.assertLogNotContains("[jenkins-maven-event-spy] WARNING: unexpected Maven project file name '.flattened-pom.xml', problems may occur", build); // verify Maven installation provided by the build agent is used // can be either "by the build agent with executable..." or "by the build agent with the environment variable MAVEN_HOME=..." jenkinsRule.assertLogContains("[withMaven] using Maven installation provided by the build agent with", build); // verify .pom is archived and fingerprinted jenkinsRule.assertLogContains("under jenkins/mvn/test/maven-jar-with-flattened-pom/0.1-SNAPSHOT/maven-jar-with-flattened-pom-0.1-SNAPSHOT.pom", build); // verify .jar is archived and fingerprinted jenkinsRule.assertLogContains("under jenkins/mvn/test/maven-jar-with-flattened-pom/0.1-SNAPSHOT/maven-jar-with-flattened-pom-0.1-SNAPSHOT.jar", build); Collection<String> artifactsFileNames = TestUtils.artifactsToArtifactsFileNames(build.getArtifacts()); assertThat(artifactsFileNames, hasItems("maven-jar-with-flattened-pom-0.1-SNAPSHOT.pom", "maven-jar-with-flattened-pom-0.1-SNAPSHOT.jar")); verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/maven-jar-with-flattened-pom/0.1-SNAPSHOT/maven-jar-with-flattened-pom-0.1-SNAPSHOT.jar"); verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/maven-jar-with-flattened-pom/0.1-SNAPSHOT/maven-jar-with-flattened-pom-0.1-SNAPSHOT.pom"); // verify Junit Archiver is called for jenkins.mvn.test:maven-jar-with-flattened-pom jenkinsRule.assertLogContains("[withMaven] junitPublisher - Archive test results for Maven artifact jenkins.mvn.test:maven-jar-with-flattened-pom:jar:0.1-SNAPSHOT generated by", build); // verify Task Scanner is called for jenkins.mvn.test:maven-jar-with-flattened-pom jenkinsRule.assertLogContains("[withMaven] openTasksPublisher - Scan Tasks for Maven artifact jenkins.mvn.test:maven-jar-with-flattened-pom:jar:0.1-SNAPSHOT in source directory", build); } @Test public void maven_build_maven_hpi_project_on_master_succeeds() throws Exception { loadJenkinsPluginProjectInGitRepo(this.gitRepoRule); String pipelineScript = "node('master') {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven() {\n" + " sh 'mvn package'\n" + " }\n" + "}"; WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); // verify Maven installation provided by the build agent is used // can be either "by the build agent with executable..." or "by the build agent with the environment variable MAVEN_HOME=..." jenkinsRule.assertLogContains("[withMaven] using Maven installation provided by the build agent with", build); // verify .pom is archived and fingerprinted jenkinsRule.assertLogContains("under jenkins/mvn/test/test-jenkins-hpi/0.1-SNAPSHOT/test-jenkins-hpi-0.1-SNAPSHOT.pom", build); // verify .jar and .hpi is archived and fingerprinted jenkinsRule.assertLogContains("under jenkins/mvn/test/test-jenkins-hpi/0.1-SNAPSHOT/test-jenkins-hpi-0.1-SNAPSHOT.hpi", build); jenkinsRule.assertLogContains("under jenkins/mvn/test/test-jenkins-hpi/0.1-SNAPSHOT/test-jenkins-hpi-0.1-SNAPSHOT.jar", build); Collection<String> artifactsFileNames = TestUtils.artifactsToArtifactsFileNames(build.getArtifacts()); assertThat(artifactsFileNames, hasItems("test-jenkins-hpi-0.1-SNAPSHOT.pom", "test-jenkins-hpi-0.1-SNAPSHOT.jar", "test-jenkins-hpi-0.1-SNAPSHOT.hpi")); verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/test-jenkins-hpi/0.1-SNAPSHOT/test-jenkins-hpi-0.1-SNAPSHOT.hpi"); verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/test-jenkins-hpi/0.1-SNAPSHOT/test-jenkins-hpi-0.1-SNAPSHOT.jar"); verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/test-jenkins-hpi/0.1-SNAPSHOT/test-jenkins-hpi-0.1-SNAPSHOT.pom"); // verify Junit Archiver is called for jenkins.mvn.test:test-jenkins-hpi jenkinsRule.assertLogContains("[withMaven] junitPublisher - Archive test results for Maven artifact jenkins.mvn.test:test-jenkins-hpi:hpi:0.1-SNAPSHOT generated by", build); // verify Task Scanner is called for jenkins.mvn.test:test-jenkins-hpi jenkinsRule.assertLogContains("[withMaven] openTasksPublisher - Scan Tasks for Maven artifact jenkins.mvn.test:test-jenkins-hpi:hpi:0.1-SNAPSHOT in source directory", build); } @Test public void maven_build_maven_plugin_project_on_master_succeeds() throws Exception { loadMavenPluginProjectInGitRepo(this.gitRepoRule); String pipelineScript = "node('master') {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven() {\n" + " sh 'mvn package'\n" + " }\n" + "}"; WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); // verify Maven installation provided by the build agent is used // can be either "by the build agent with executable..." or "by the build agent with the environment variable MAVEN_HOME=..." jenkinsRule.assertLogContains("[withMaven] using Maven installation provided by the build agent with", build); // verify .pom is archived and fingerprinted jenkinsRule.assertLogContains("under jenkins/mvn/test/maven-test-plugin/1.0-SNAPSHOT/maven-test-plugin-1.0-SNAPSHOT.pom", build); // verify .jar and .hpi is archived and fingerprinted jenkinsRule.assertLogContains("under jenkins/mvn/test/maven-test-plugin/1.0-SNAPSHOT/maven-test-plugin-1.0-SNAPSHOT.jar", build); Collection<String> artifactsFileNames = TestUtils.artifactsToArtifactsFileNames(build.getArtifacts()); assertThat(artifactsFileNames, hasItems("maven-test-plugin-1.0-SNAPSHOT.pom", "maven-test-plugin-1.0-SNAPSHOT.jar")); verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/maven-test-plugin/1.0-SNAPSHOT/maven-test-plugin-1.0-SNAPSHOT.jar"); verifyFileIsFingerPrinted(pipeline, build, "jenkins/mvn/test/maven-test-plugin/1.0-SNAPSHOT/maven-test-plugin-1.0-SNAPSHOT.pom"); // verify Junit Archiver is called for jenkins.mvn.test:test-jenkins-hpi jenkinsRule.assertLogContains("[withMaven] junitPublisher - Archive test results for Maven artifact jenkins.mvn.test:maven-test-plugin:jar:1.0-SNAPSHOT generated by", build); // verify Task Scanner is called for jenkins.mvn.test:test-jenkins-hpi jenkinsRule.assertLogContains("[withMaven] openTasksPublisher - Scan Tasks for Maven artifact jenkins.mvn.test:maven-test-plugin:jar:1.0-SNAPSHOT in source directory", build); } /** * JENKINS-43678 */ @Test public void maven_build_on_master_with_no_generated_jar_succeeds() throws Exception { loadMavenJarProjectInGitRepo(this.gitRepoRule); String pipelineScript = "node('master') {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven() {\n" + " sh 'mvn test'\n" + " }\n" + "}"; WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "test-on-master"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); // don't try to archive the artifact as it has not been generated jenkinsRule.assertLogNotContains("under jenkins/mvn/test/mono-module-maven-app/0.1-SNAPSHOT/mono-module-maven-app-0.1-SNAPSHOT.jar", build); Collection<String> artifactsFileNames = TestUtils.artifactsToArtifactsFileNames(build.getArtifacts()); assertThat(artifactsFileNames, hasItems("mono-module-maven-app-0.1-SNAPSHOT.pom")); assertThat(build.getArtifacts().toString(), build.getArtifacts().size(), is(1)); } private void verifyFileIsFingerPrinted(WorkflowJob pipeline, WorkflowRun build, String fileName) throws java.io.IOException { System.out.println(getClass() + " verifyFileIsFingerPrinted(" + build + ", " + fileName + ")"); Fingerprinter.FingerprintAction fingerprintAction = build.getAction(Fingerprinter.FingerprintAction.class); Map<String, String> records = fingerprintAction.getRecords(); System.out.println(getClass() + " records: " + records); String jarFileMd5sum = records.get(fileName); assertThat(jarFileMd5sum, not(nullValue())); Fingerprint jarFileFingerPrint = jenkinsRule.getInstance().getFingerprintMap().get(jarFileMd5sum); assertThat(jarFileFingerPrint.getFileName(), is(fileName)); assertThat(jarFileFingerPrint.getOriginal().getJob().getName(), is(pipeline.getName())); assertThat(jarFileFingerPrint.getOriginal().getNumber(), is(build.getNumber())); } @Test public void maven_global_settings_path_defined_through_jenkins_global_config() throws Exception { File mavenGlobalSettingsFile = new File(jenkinsRule.jenkins.getRootDir(), "maven-global-settings.xml"); String mavenGlobalSettings = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<settings \n" + " xmlns='http://maven.apache.org/SETTINGS/1.0.0'\n" + " xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance'\n" + " xsi:schemaLocation='http://maven.apache.org/SETTINGS/1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd'>\n" + " <servers>\n" + " <server>\n" + " <id>id-global-settings-test</id>\n" + " </server>\n" + " </servers>\n" + "</settings>\n"; FileUtils.writeStringToFile(mavenGlobalSettingsFile, mavenGlobalSettings, StandardCharsets.UTF_8); String pipelineScript = "node () {\n" + " writeFile file: 'pom.xml', text: '''<?xml version='1.0' encoding='UTF-8'?>\n" + "<project\n" + " xmlns='http://maven.apache.org/POM/4.0.0' \n" + " xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' \n" + " xsi:schemaLocation='http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd'>\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>com.example</groupId>\n" + " <artifactId>my-artifact</artifactId>\n" + " <version>1.0.0-SNAPSHOT</version>\n" + " <packaging>pom</packaging>\n" + "</project>'''\n" + "\n" + " withMaven(maven: 'apache-maven-3.6.3') {\n" + " sh 'mvn help:effective-settings'\n" + " }\n" + "}\n"; GlobalMavenConfig.get().setGlobalSettingsProvider(new FilePathGlobalSettingsProvider(mavenGlobalSettingsFile.getAbsolutePath())); try { WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master-with-maven-global-settings-defined-in-jenkins-global-config"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); jenkinsRule.assertLogContains("[withMaven] using Maven global settings provided by the Jenkins global configuration", build); jenkinsRule.assertLogContains("<id>id-global-settings-test</id>", build); } finally { GlobalMavenConfig.get().setGlobalSettingsProvider(null); } } @Test public void maven_global_settings_defined_through_jenkins_global_config_and_config_file_provider() throws Exception { String mavenGlobalSettings = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<settings \n" + " xmlns='http://maven.apache.org/SETTINGS/1.0.0'\n" + " xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance'\n" + " xsi:schemaLocation='http://maven.apache.org/SETTINGS/1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd'>\n" + " <servers>\n" + " <server>\n" + " <id>id-global-settings-test-from-config-file-provider</id>\n" + " </server>\n" + " </servers>\n" + "</settings>\n"; GlobalMavenSettingsConfig mavenGlobalSettingsConfig = new GlobalMavenSettingsConfig("maven-global-config-test", "maven-global-config-test", "", mavenGlobalSettings); String pipelineScript = "node () {\n" + " writeFile file: 'pom.xml', text: '''<?xml version='1.0' encoding='UTF-8'?>\n" + "<project\n" + " xmlns='http://maven.apache.org/POM/4.0.0' \n" + " xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' \n" + " xsi:schemaLocation='http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd'>\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>com.example</groupId>\n" + " <artifactId>my-artifact</artifactId>\n" + " <version>1.0.0-SNAPSHOT</version>\n" + " <packaging>pom</packaging>\n" + "</project>'''\n" + "\n" + " withMaven(maven: 'apache-maven-3.6.3') {\n" + " sh 'mvn help:effective-settings'\n" + " }\n" + "}\n"; GlobalConfigFiles.get().save(mavenGlobalSettingsConfig); GlobalMavenConfig.get().setGlobalSettingsProvider(new MvnGlobalSettingsProvider(mavenGlobalSettingsConfig.id)); try { WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master-with-maven-global-settings-defined-in-jenkins-global-config-with-config-file-provider"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); jenkinsRule.assertLogContains("[withMaven] using Maven global settings provided by the Jenkins global configuration", build); jenkinsRule.assertLogContains("<id>id-global-settings-test-from-config-file-provider</id>", build); } finally { GlobalMavenConfig.get().setGlobalSettingsProvider(null); GlobalConfigFiles.get().remove(mavenGlobalSettingsConfig.id); } } @Test public void maven_global_settings_defined_through_folder_config_and_config_file_provider() throws Exception { String mavenGlobalSettings = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<settings \n" + " xmlns='http://maven.apache.org/SETTINGS/1.0.0'\n" + " xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance'\n" + " xsi:schemaLocation='http://maven.apache.org/SETTINGS/1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd'>\n" + " <servers>\n" + " <server>\n" + " <id>id-global-settings-test-from-config-file-provider-on-a-folder</id>\n" + " </server>\n" + " </servers>\n" + "</settings>\n"; GlobalMavenSettingsConfig mavenGlobalSettingsConfig = new GlobalMavenSettingsConfig("maven-global-config-test-folder", "maven-global-config-test-folder", "", mavenGlobalSettings); String pipelineScript = "node () {\n" + " writeFile file: 'pom.xml', text: '''<?xml version='1.0' encoding='UTF-8'?>\n" + "<project\n" + " xmlns='http://maven.apache.org/POM/4.0.0' \n" + " xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' \n" + " xsi:schemaLocation='http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd'>\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>com.example</groupId>\n" + " <artifactId>my-artifact</artifactId>\n" + " <version>1.0.0-SNAPSHOT</version>\n" + " <packaging>pom</packaging>\n" + "</project>'''\n" + "\n" + " withMaven(maven: 'apache-maven-3.6.3') {\n" + " sh 'mvn help:effective-settings'\n" + " }\n" + "}\n"; GlobalConfigFiles.get().save(mavenGlobalSettingsConfig); Folder folder = jenkinsRule.createProject(Folder.class, "folder"); MavenConfigFolderOverrideProperty configOverrideProperty = new MavenConfigFolderOverrideProperty(); configOverrideProperty.setOverride(true); configOverrideProperty.setGlobalSettings(new MvnGlobalSettingsProvider(mavenGlobalSettingsConfig.id)); folder.addProperty(configOverrideProperty); try { WorkflowJob pipeline = folder.createProject(WorkflowJob.class, "build-on-master-with-maven-global-settings-defined-in-jenkins-global-config-with-config-file-provider"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); jenkinsRule.assertLogContains( "[withMaven] using overridden Maven global settings by folder 'folder'. Config File Provider maven global settings file 'maven-global-config-test-folder'", build); jenkinsRule.assertLogContains("<id>id-global-settings-test-from-config-file-provider-on-a-folder</id>", build); } finally { GlobalMavenConfig.get().setGlobalSettingsProvider(null); GlobalConfigFiles.get().remove(mavenGlobalSettingsConfig.id); } } @Test public void maven_global_settings_path_defined_through_pipeline_attribute() throws Exception { String pipelineScript = "node () {\n" + " writeFile file: 'maven-global-settings.xml', text: '''<?xml version='1.0' encoding='UTF-8'?>\n" + "<settings \n" + " xmlns='http://maven.apache.org/SETTINGS/1.0.0'\n" + " xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance'\n" + " xsi:schemaLocation='http://maven.apache.org/SETTINGS/1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd'>\n" + " <servers>\n" + " <server>\n" + " <id>id-global-settings-test</id>\n" + " </server>\n" + " </servers>\n" + "</settings>'''\n" + "\n" + " writeFile file: 'pom.xml', text: '''<?xml version='1.0' encoding='UTF-8'?>\n" + "<project\n" + " xmlns='http://maven.apache.org/POM/4.0.0' \n" + " xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' \n" + " xsi:schemaLocation='http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd'>\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>com.example</groupId>\n" + " <artifactId>my-artifact</artifactId>\n" + " <version>1.0.0-SNAPSHOT</version>\n" + " <packaging>pom</packaging>\n" + "</project>'''\n" + "\n" + " withMaven(maven: 'apache-maven-3.6.3', globalMavenSettingsFilePath: 'maven-global-settings.xml') {\n" + " sh 'mvn help:effective-settings'\n" + " }\n" + "}\n"; WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master-with-maven-global-settings-defined-in-pipeline"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); jenkinsRule.assertLogContains("[withMaven] using Maven global settings provided on the build agent", build); jenkinsRule.assertLogContains("<id>id-global-settings-test</id>", build); } /** * https://issues.jenkins-ci.org/browse/JENKINS-42565 */ @Test public void maven_settings_path_defined_through_pipeline_attribute() throws Exception { String pipelineScript = "node () {\n" + " writeFile file: 'maven-settings.xml', text: '''<?xml version='1.0' encoding='UTF-8'?>\n" + "<settings \n" + " xmlns='http://maven.apache.org/SETTINGS/1.0.0'\n" + " xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance'\n" + " xsi:schemaLocation='http://maven.apache.org/SETTINGS/1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd'>\n" + " <servers>\n" + " <server>\n" + " <id>id-settings-test</id>\n" + " </server>\n" + " </servers>\n" + "</settings>'''\n" + "\n" + " writeFile file: 'pom.xml', text: '''<?xml version='1.0' encoding='UTF-8'?>\n" + "<project\n" + " xmlns='http://maven.apache.org/POM/4.0.0' \n" + " xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' \n" + " xsi:schemaLocation='http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd'>\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>com.example</groupId>\n" + " <artifactId>my-artifact</artifactId>\n" + " <version>1.0.0-SNAPSHOT</version>\n" + " <packaging>pom</packaging>\n" + "</project>'''\n" + "\n" + " withMaven(maven: 'apache-maven-3.6.3', mavenSettingsFilePath: 'maven-settings.xml') {\n" + " sh 'env && mvn help:effective-settings'\n" + " }\n" + "}\n"; WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master-with-maven-settings-defined-in-pipeline"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); jenkinsRule.assertLogContains("[withMaven] using Maven settings provided on the build agent", build); jenkinsRule.assertLogContains("<id>id-settings-test</id>", build); } @Test public void maven_settings_defined_through_jenkins_global_config() throws Exception { File mavenSettingsFile = new File(jenkinsRule.jenkins.getRootDir(), "maven-settings.xml"); String mavenSettings = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<settings \n" + " xmlns='http://maven.apache.org/SETTINGS/1.0.0'\n" + " xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance'\n" + " xsi:schemaLocation='http://maven.apache.org/SETTINGS/1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd'>\n" + " <servers>\n" + " <server>\n" + " <id>id-settings-test</id>\n" + " </server>\n" + " </servers>\n" + "</settings>\n"; FileUtils.writeStringToFile(mavenSettingsFile, mavenSettings, StandardCharsets.UTF_8); String pipelineScript = "node () {\n" + " writeFile file: 'pom.xml', text: '''<?xml version='1.0' encoding='UTF-8'?>\n" + "<project\n" + " xmlns='http://maven.apache.org/POM/4.0.0' \n" + " xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' \n" + " xsi:schemaLocation='http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd'>\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>com.example</groupId>\n" + " <artifactId>my-artifact</artifactId>\n" + " <version>1.0.0-SNAPSHOT</version>\n" + " <packaging>pom</packaging>\n" + "</project>'''\n" + "\n" + " withMaven(maven: 'apache-maven-3.6.3') {\n" + " sh 'mvn help:effective-settings'\n" + " }\n" + "}\n"; GlobalMavenConfig.get().setSettingsProvider(new FilePathSettingsProvider(mavenSettingsFile.getAbsolutePath())); try { WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master-with-maven-settings-defined-in-jenkins-global-config"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); jenkinsRule.assertLogContains("[withMaven] using Maven settings provided by the Jenkins global configuration", build); jenkinsRule.assertLogContains("<id>id-settings-test</id>", build); } finally { GlobalMavenConfig.get().setSettingsProvider(null); } } @Test public void maven_settings_defined_through_jenkins_global_config_and_config_file_provider() throws Exception { String mavenSettings = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<settings \n" + " xmlns='http://maven.apache.org/SETTINGS/1.0.0'\n" + " xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance'\n" + " xsi:schemaLocation='http://maven.apache.org/SETTINGS/1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd'>\n" + " <servers>\n" + " <server>\n" + " <id>id-settings-test-through-config-file-provider</id>\n" + " </server>\n" + " </servers>\n" + "</settings>\n"; MavenSettingsConfig mavenSettingsConfig = new MavenSettingsConfig("maven-config-test", "maven-config-test", "", mavenSettings, false, null); String pipelineScript = "node () {\n" + " writeFile file: 'pom.xml', text: '''<?xml version='1.0' encoding='UTF-8'?>\n" + "<project\n" + " xmlns='http://maven.apache.org/POM/4.0.0' \n" + " xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' \n" + " xsi:schemaLocation='http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd'>\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>com.example</groupId>\n" + " <artifactId>my-artifact</artifactId>\n" + " <version>1.0.0-SNAPSHOT</version>\n" + " <packaging>pom</packaging>\n" + "</project>'''\n" + "\n" + " withMaven(maven: 'apache-maven-3.6.3') {\n" + " sh 'mvn help:effective-settings'\n" + " }\n" + "}\n"; GlobalConfigFiles.get().save(mavenSettingsConfig); GlobalMavenConfig.get().setSettingsProvider(new MvnSettingsProvider(mavenSettingsConfig.id)); try { WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master-with-maven-settings-defined-in-jenkins-global-config-with-config-file-provider"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); jenkinsRule.assertLogContains("[withMaven] using Maven settings provided by the Jenkins global configuration", build); jenkinsRule.assertLogContains("<id>id-settings-test-through-config-file-provider</id>", build); } finally { GlobalMavenConfig.get().setSettingsProvider(null); } } @Test public void maven_settings_defined_through_folder_config_and_config_file_provider() throws Exception { String mavenSettings = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<settings \n" + " xmlns='http://maven.apache.org/SETTINGS/1.0.0'\n" + " xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance'\n" + " xsi:schemaLocation='http://maven.apache.org/SETTINGS/1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd'>\n" + " <servers>\n" + " <server>\n" + " <id>id-settings-test-through-config-file-provider-on-a-folder</id>\n" + " </server>\n" + " </servers>\n" + "</settings>\n"; MavenSettingsConfig mavenSettingsConfig = new MavenSettingsConfig("maven-config-test-folder", "maven-config-test-folder", "", mavenSettings, false, null); String pipelineScript = "node () {\n" + " writeFile file: 'pom.xml', text: '''<?xml version='1.0' encoding='UTF-8'?>\n" + "<project\n" + " xmlns='http://maven.apache.org/POM/4.0.0' \n" + " xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' \n" + " xsi:schemaLocation='http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd'>\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>com.example</groupId>\n" + " <artifactId>my-artifact</artifactId>\n" + " <version>1.0.0-SNAPSHOT</version>\n" + " <packaging>pom</packaging>\n" + "</project>'''\n" + "\n" + " withMaven(maven: 'apache-maven-3.6.3') {\n" + " sh 'mvn help:effective-settings'\n" + " }\n" + "}\n"; GlobalConfigFiles.get().save(mavenSettingsConfig); Folder folder = jenkinsRule.createProject(Folder.class, "folder"); MavenConfigFolderOverrideProperty configOverrideProperty = new MavenConfigFolderOverrideProperty(); configOverrideProperty.setOverride(true); GlobalMavenConfig globalMavenConfig = GlobalMavenConfig.get(); configOverrideProperty.setGlobalSettings(globalMavenConfig.getGlobalSettingsProvider()); configOverrideProperty.setSettings(new MvnSettingsProvider(mavenSettingsConfig.id)); folder.addProperty(configOverrideProperty); try { WorkflowJob pipeline = folder.createProject(WorkflowJob.class, "build-on-master-with-maven-settings-defined-in-jenkins-global-config-with-config-file-provider"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); jenkinsRule.assertLogContains("[withMaven] using overridden Maven settings by folder 'folder'. Config File Provider maven settings file 'maven-config-test-folder'", build); jenkinsRule.assertLogContains("<id>id-settings-test-through-config-file-provider-on-a-folder</id>", build); } finally { configOverrideProperty.setOverride(false); } } @Test public void maven_settings_defined_through_pipeline_attribute_and_config_file_provider() throws Exception { String mavenSettings = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<settings \n" + " xmlns='http://maven.apache.org/SETTINGS/1.0.0'\n" + " xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance'\n" + " xsi:schemaLocation='http://maven.apache.org/SETTINGS/1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd'>\n" + " <servers>\n" + " <server>\n" + " <id>id-settings-test-from-pipeline-attribute-and-config-file-provider</id>\n" + " </server>\n" + " </servers>\n" + "</settings>\n"; MavenSettingsConfig mavenSettingsConfig = new MavenSettingsConfig("maven-config-test-from-pipeline-attribute", "maven-config-test-from-pipeline-attribute", "", mavenSettings, false, null); String pipelineScript = "node () {\n" + " writeFile file: 'pom.xml', text: '''<?xml version='1.0' encoding='UTF-8'?>\n" + "<project\n" + " xmlns='http://maven.apache.org/POM/4.0.0' \n" + " xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' \n" + " xsi:schemaLocation='http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd'>\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>com.example</groupId>\n" + " <artifactId>my-artifact</artifactId>\n" + " <version>1.0.0-SNAPSHOT</version>\n" + " <packaging>pom</packaging>\n" + "</project>'''\n" + "\n" + " withMaven(maven: 'apache-maven-3.6.3', mavenSettingsConfig: 'maven-config-test-from-pipeline-attribute') {\n" + " sh 'mvn help:effective-settings'\n" + " }\n" + "}\n"; GlobalConfigFiles.get().save(mavenSettingsConfig); try { WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master-with-maven-global-settings-defined-in-jenkins-global-config-with-config-file-provider"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.assertBuildStatus(Result.SUCCESS, pipeline.scheduleBuild2(0)); jenkinsRule.assertLogContains("[withMaven] using Maven settings provided by the Jenkins Managed Configuration File 'maven-config-test-from-pipeline-attribute'", build); jenkinsRule.assertLogContains("<id>id-settings-test-from-pipeline-attribute-and-config-file-provider</id>", build); } finally { GlobalConfigFiles.get().remove(mavenSettingsConfig.id); } } @Issue("JENKINS-27395") @Test public void maven_build_test_results_by_stage_and_branch() throws Exception { loadMavenJarProjectInGitRepo(this.gitRepoRule); String pipelineScript = "stage('first') {\n" + " parallel(a: {\n" + " node('master') {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven() {\n" + " sh 'mvn package verify'\n" + " }\n" + " }\n" + " },\n" + " b: {\n" + " node('master') {\n" + " git($/" + gitRepoRule.toString() + "/$)\n" + " withMaven() {\n" + " sh 'mvn package verify'\n" + " }\n" + " }\n" + " })\n" + "}"; WorkflowJob pipeline = jenkinsRule.createProject(WorkflowJob.class, "build-on-master"); pipeline.setDefinition(new CpsFlowDefinition(pipelineScript, true)); WorkflowRun build = jenkinsRule.buildAndAssertSuccess(pipeline); TestResultAction testResultAction = build.getAction(TestResultAction.class); assertThat(testResultAction.getTotalCount(), is(4)); assertThat(testResultAction.getFailCount(), is(0)); /* TODO enable test below when we can bump the junit-plugin to version 1.23+ JUnitResultsStepTest.assertStageResults(build, 4, 6, "first"); JUnitResultsStepTest.assertBranchResults(build, 2, 3, "a", "first"); JUnitResultsStepTest.assertBranchResults(build, 2, 3, "b", "first"); */ } }
package com.system.distribute.sqlparser; import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.dfa.DFA; import org.antlr.v4.runtime.*; import org.antlr.v4.runtime.misc.*; import org.antlr.v4.runtime.tree.*; import java.util.List; import java.util.Iterator; import java.util.ArrayList; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) public class BaseQueryParser extends Parser { protected static final DFA[] _decisionToDFA; protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int T__2=1, T__1=2, T__0=3, INSERT=4, SELECT=5, UPDATE=6, DELETE=7, VALUES=8, SYNC=9, AS=10, WHERE=11, SET=12, FROM=13, INTO=14, COMP=15, ID=16, WS=17; public static final String[] tokenNames = { "<INVALID>", "')'", "','", "'('", "INSERT", "SELECT", "UPDATE", "DELETE", "VALUES", "SYNC", "AS", "WHERE", "SET", "FROM", "INTO", "COMP", "ID", "WS" }; public static final int RULE_sql = 0, RULE_insert_statement = 1, RULE_select_statement = 2, RULE_update_statement = 3, RULE_delete_statement = 4, RULE_sync_statement = 5, RULE_expr_lst = 6, RULE_from_cause = 7, RULE_into_cause = 8, RULE_values_cause = 9, RULE_where_cause = 10, RULE_set_cause = 11, RULE_expr = 12, RULE_kinsert = 13, RULE_kselect = 14, RULE_kupdate = 15, RULE_kdelete = 16, RULE_ksync = 17, RULE_kas = 18, RULE_lp = 19, RULE_rp = 20, RULE_comma = 21, RULE_comp = 22, RULE_id = 23; public static final String[] ruleNames = { "sql", "insert_statement", "select_statement", "update_statement", "delete_statement", "sync_statement", "expr_lst", "from_cause", "into_cause", "values_cause", "where_cause", "set_cause", "expr", "kinsert", "kselect", "kupdate", "kdelete", "ksync", "kas", "lp", "rp", "comma", "comp", "id" }; @Override public String getGrammarFileName() { return "BaseQuery.g4"; } @Override public String[] getTokenNames() { return tokenNames; } @Override public String[] getRuleNames() { return ruleNames; } @Override public String getSerializedATN() { return _serializedATN; } @Override public ATN getATN() { return _ATN; } public BaseQueryParser(TokenStream input) { super(input); _interp = new ParserATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); } public static class SqlContext extends ParserRuleContext { public Update_statementContext update_statement() { return getRuleContext(Update_statementContext.class,0); } public Insert_statementContext insert_statement() { return getRuleContext(Insert_statementContext.class,0); } public Select_statementContext select_statement() { return getRuleContext(Select_statementContext.class,0); } public Sync_statementContext sync_statement() { return getRuleContext(Sync_statementContext.class,0); } public Delete_statementContext delete_statement() { return getRuleContext(Delete_statementContext.class,0); } public SqlContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_sql; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).enterSql(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).exitSql(this); } } public final SqlContext sql() throws RecognitionException { SqlContext _localctx = new SqlContext(_ctx, getState()); enterRule(_localctx, 0, RULE_sql); try { setState(53); switch (_input.LA(1)) { case INSERT: enterOuterAlt(_localctx, 1); { setState(48); insert_statement(); } break; case SELECT: enterOuterAlt(_localctx, 2); { setState(49); select_statement(); } break; case UPDATE: enterOuterAlt(_localctx, 3); { setState(50); update_statement(); } break; case DELETE: enterOuterAlt(_localctx, 4); { setState(51); delete_statement(); } break; case SYNC: enterOuterAlt(_localctx, 5); { setState(52); sync_statement(); } break; default: throw new NoViableAltException(this); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class Insert_statementContext extends ParserRuleContext { public Values_causeContext values_cause() { return getRuleContext(Values_causeContext.class,0); } public Into_causeContext into_cause() { return getRuleContext(Into_causeContext.class,0); } public KinsertContext kinsert() { return getRuleContext(KinsertContext.class,0); } public Insert_statementContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_insert_statement; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).enterInsert_statement(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).exitInsert_statement(this); } } public final Insert_statementContext insert_statement() throws RecognitionException { Insert_statementContext _localctx = new Insert_statementContext(_ctx, getState()); enterRule(_localctx, 2, RULE_insert_statement); try { enterOuterAlt(_localctx, 1); { setState(55); kinsert(); setState(56); into_cause(); setState(57); values_cause(); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class Select_statementContext extends ParserRuleContext { public From_causeContext from_cause(int i) { return getRuleContext(From_causeContext.class,i); } public List<From_causeContext> from_cause() { return getRuleContexts(From_causeContext.class); } public Expr_lstContext expr_lst() { return getRuleContext(Expr_lstContext.class,0); } public Where_causeContext where_cause(int i) { return getRuleContext(Where_causeContext.class,i); } public List<Where_causeContext> where_cause() { return getRuleContexts(Where_causeContext.class); } public KselectContext kselect() { return getRuleContext(KselectContext.class,0); } public Select_statementContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_select_statement; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).enterSelect_statement(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).exitSelect_statement(this); } } public final Select_statementContext select_statement() throws RecognitionException { Select_statementContext _localctx = new Select_statementContext(_ctx, getState()); enterRule(_localctx, 4, RULE_select_statement); int _la; try { enterOuterAlt(_localctx, 1); { setState(59); kselect(); setState(60); expr_lst(); setState(64); _errHandler.sync(this); _la = _input.LA(1); while (_la==FROM) { { { setState(61); from_cause(); } } setState(66); _errHandler.sync(this); _la = _input.LA(1); } setState(70); _errHandler.sync(this); _la = _input.LA(1); while (_la==WHERE) { { { setState(67); where_cause(); } } setState(72); _errHandler.sync(this); _la = _input.LA(1); } } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class Update_statementContext extends ParserRuleContext { public Set_causeContext set_cause() { return getRuleContext(Set_causeContext.class,0); } public Expr_lstContext expr_lst() { return getRuleContext(Expr_lstContext.class,0); } public Where_causeContext where_cause(int i) { return getRuleContext(Where_causeContext.class,i); } public List<Where_causeContext> where_cause() { return getRuleContexts(Where_causeContext.class); } public KupdateContext kupdate() { return getRuleContext(KupdateContext.class,0); } public Update_statementContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_update_statement; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).enterUpdate_statement(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).exitUpdate_statement(this); } } public final Update_statementContext update_statement() throws RecognitionException { Update_statementContext _localctx = new Update_statementContext(_ctx, getState()); enterRule(_localctx, 6, RULE_update_statement); int _la; try { enterOuterAlt(_localctx, 1); { setState(73); kupdate(); setState(74); expr_lst(); setState(75); set_cause(); setState(79); _errHandler.sync(this); _la = _input.LA(1); while (_la==WHERE) { { { setState(76); where_cause(); } } setState(81); _errHandler.sync(this); _la = _input.LA(1); } } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class Delete_statementContext extends ParserRuleContext { public From_causeContext from_cause() { return getRuleContext(From_causeContext.class,0); } public KdeleteContext kdelete() { return getRuleContext(KdeleteContext.class,0); } public Where_causeContext where_cause(int i) { return getRuleContext(Where_causeContext.class,i); } public List<Where_causeContext> where_cause() { return getRuleContexts(Where_causeContext.class); } public Delete_statementContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_delete_statement; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).enterDelete_statement(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).exitDelete_statement(this); } } public final Delete_statementContext delete_statement() throws RecognitionException { Delete_statementContext _localctx = new Delete_statementContext(_ctx, getState()); enterRule(_localctx, 8, RULE_delete_statement); int _la; try { enterOuterAlt(_localctx, 1); { setState(82); kdelete(); setState(83); from_cause(); setState(87); _errHandler.sync(this); _la = _input.LA(1); while (_la==WHERE) { { { setState(84); where_cause(); } } setState(89); _errHandler.sync(this); _la = _input.LA(1); } } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class Sync_statementContext extends ParserRuleContext { public From_causeContext from_cause() { return getRuleContext(From_causeContext.class,0); } public Where_causeContext where_cause(int i) { return getRuleContext(Where_causeContext.class,i); } public List<Where_causeContext> where_cause() { return getRuleContexts(Where_causeContext.class); } public KsyncContext ksync() { return getRuleContext(KsyncContext.class,0); } public Sync_statementContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_sync_statement; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).enterSync_statement(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).exitSync_statement(this); } } public final Sync_statementContext sync_statement() throws RecognitionException { Sync_statementContext _localctx = new Sync_statementContext(_ctx, getState()); enterRule(_localctx, 10, RULE_sync_statement); int _la; try { enterOuterAlt(_localctx, 1); { setState(90); ksync(); setState(91); from_cause(); setState(95); _errHandler.sync(this); _la = _input.LA(1); while (_la==WHERE) { { { setState(92); where_cause(); } } setState(97); _errHandler.sync(this); _la = _input.LA(1); } } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class Expr_lstContext extends ParserRuleContext { public List<CommaContext> comma() { return getRuleContexts(CommaContext.class); } public List<ExprContext> expr() { return getRuleContexts(ExprContext.class); } public CommaContext comma(int i) { return getRuleContext(CommaContext.class,i); } public ExprContext expr(int i) { return getRuleContext(ExprContext.class,i); } public Expr_lstContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_expr_lst; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).enterExpr_lst(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).exitExpr_lst(this); } } public final Expr_lstContext expr_lst() throws RecognitionException { Expr_lstContext _localctx = new Expr_lstContext(_ctx, getState()); enterRule(_localctx, 12, RULE_expr_lst); int _la; try { enterOuterAlt(_localctx, 1); { setState(98); expr(0); setState(104); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__1) { { { setState(99); comma(); setState(100); expr(0); } } setState(106); _errHandler.sync(this); _la = _input.LA(1); } } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class From_causeContext extends ParserRuleContext { public Expr_lstContext expr_lst() { return getRuleContext(Expr_lstContext.class,0); } public TerminalNode FROM() { return getToken(BaseQueryParser.FROM, 0); } public From_causeContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_from_cause; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).enterFrom_cause(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).exitFrom_cause(this); } } public final From_causeContext from_cause() throws RecognitionException { From_causeContext _localctx = new From_causeContext(_ctx, getState()); enterRule(_localctx, 14, RULE_from_cause); try { enterOuterAlt(_localctx, 1); { setState(107); match(FROM); setState(108); expr_lst(); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class Into_causeContext extends ParserRuleContext { public CommaContext comma() { return getRuleContext(CommaContext.class,0); } public LpContext lp() { return getRuleContext(LpContext.class,0); } public List<IdContext> id() { return getRuleContexts(IdContext.class); } public RpContext rp() { return getRuleContext(RpContext.class,0); } public TerminalNode INTO() { return getToken(BaseQueryParser.INTO, 0); } public IdContext id(int i) { return getRuleContext(IdContext.class,i); } public Into_causeContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_into_cause; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).enterInto_cause(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).exitInto_cause(this); } } public final Into_causeContext into_cause() throws RecognitionException { Into_causeContext _localctx = new Into_causeContext(_ctx, getState()); enterRule(_localctx, 16, RULE_into_cause); int _la; try { enterOuterAlt(_localctx, 1); { setState(110); match(INTO); setState(111); id(); setState(119); _la = _input.LA(1); if (_la==T__0) { { setState(112); lp(); setState(113); id(); { setState(114); comma(); setState(115); id(); } setState(117); rp(); } } } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class Values_causeContext extends ParserRuleContext { public Expr_lstContext expr_lst() { return getRuleContext(Expr_lstContext.class,0); } public TerminalNode VALUES() { return getToken(BaseQueryParser.VALUES, 0); } public Values_causeContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_values_cause; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).enterValues_cause(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).exitValues_cause(this); } } public final Values_causeContext values_cause() throws RecognitionException { Values_causeContext _localctx = new Values_causeContext(_ctx, getState()); enterRule(_localctx, 18, RULE_values_cause); try { enterOuterAlt(_localctx, 1); { setState(121); match(VALUES); setState(122); expr_lst(); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class Where_causeContext extends ParserRuleContext { public TerminalNode WHERE() { return getToken(BaseQueryParser.WHERE, 0); } public Expr_lstContext expr_lst() { return getRuleContext(Expr_lstContext.class,0); } public Where_causeContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_where_cause; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).enterWhere_cause(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).exitWhere_cause(this); } } public final Where_causeContext where_cause() throws RecognitionException { Where_causeContext _localctx = new Where_causeContext(_ctx, getState()); enterRule(_localctx, 20, RULE_where_cause); try { enterOuterAlt(_localctx, 1); { setState(124); match(WHERE); setState(125); expr_lst(); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class Set_causeContext extends ParserRuleContext { public TerminalNode SET() { return getToken(BaseQueryParser.SET, 0); } public Expr_lstContext expr_lst() { return getRuleContext(Expr_lstContext.class,0); } public Set_causeContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_set_cause; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).enterSet_cause(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).exitSet_cause(this); } } public final Set_causeContext set_cause() throws RecognitionException { Set_causeContext _localctx = new Set_causeContext(_ctx, getState()); enterRule(_localctx, 22, RULE_set_cause); try { enterOuterAlt(_localctx, 1); { setState(127); match(SET); setState(128); expr_lst(); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class ExprContext extends ParserRuleContext { public IdContext id() { return getRuleContext(IdContext.class,0); } public LpContext lp() { return getRuleContext(LpContext.class,0); } public List<ExprContext> expr() { return getRuleContexts(ExprContext.class); } public CompContext comp() { return getRuleContext(CompContext.class,0); } public RpContext rp() { return getRuleContext(RpContext.class,0); } public ExprContext expr(int i) { return getRuleContext(ExprContext.class,i); } public KasContext kas() { return getRuleContext(KasContext.class,0); } public ExprContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_expr; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).enterExpr(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).exitExpr(this); } } public final ExprContext expr() throws RecognitionException { return expr(0); } private ExprContext expr(int _p) throws RecognitionException { ParserRuleContext _parentctx = _ctx; int _parentState = getState(); ExprContext _localctx = new ExprContext(_ctx, _parentState); ExprContext _prevctx = _localctx; int _startState = 24; enterRecursionRule(_localctx, 24, RULE_expr, _p); try { int _alt; enterOuterAlt(_localctx, 1); { setState(136); switch (_input.LA(1)) { case T__0: { setState(131); lp(); setState(132); id(); setState(133); rp(); } break; case ID: { setState(135); id(); } break; default: throw new NoViableAltException(this); } _ctx.stop = _input.LT(-1); setState(148); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,10,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { setState(146); switch ( getInterpreter().adaptivePredict(_input,9,_ctx) ) { case 1: { _localctx = new ExprContext(_parentctx, _parentState); pushNewRecursionContext(_localctx, _startState, RULE_expr); setState(138); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); setState(139); comp(); setState(140); expr(5); } break; case 2: { _localctx = new ExprContext(_parentctx, _parentState); pushNewRecursionContext(_localctx, _startState, RULE_expr); setState(142); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); setState(143); kas(); setState(144); expr(3); } break; } } } setState(150); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,10,_ctx); } } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { unrollRecursionContexts(_parentctx); } return _localctx; } public static class KinsertContext extends ParserRuleContext { public TerminalNode INSERT() { return getToken(BaseQueryParser.INSERT, 0); } public KinsertContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_kinsert; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).enterKinsert(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).exitKinsert(this); } } public final KinsertContext kinsert() throws RecognitionException { KinsertContext _localctx = new KinsertContext(_ctx, getState()); enterRule(_localctx, 26, RULE_kinsert); try { enterOuterAlt(_localctx, 1); { setState(151); match(INSERT); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class KselectContext extends ParserRuleContext { public TerminalNode SELECT() { return getToken(BaseQueryParser.SELECT, 0); } public KselectContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_kselect; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).enterKselect(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).exitKselect(this); } } public final KselectContext kselect() throws RecognitionException { KselectContext _localctx = new KselectContext(_ctx, getState()); enterRule(_localctx, 28, RULE_kselect); try { enterOuterAlt(_localctx, 1); { setState(153); match(SELECT); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class KupdateContext extends ParserRuleContext { public TerminalNode UPDATE() { return getToken(BaseQueryParser.UPDATE, 0); } public KupdateContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_kupdate; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).enterKupdate(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).exitKupdate(this); } } public final KupdateContext kupdate() throws RecognitionException { KupdateContext _localctx = new KupdateContext(_ctx, getState()); enterRule(_localctx, 30, RULE_kupdate); try { enterOuterAlt(_localctx, 1); { setState(155); match(UPDATE); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class KdeleteContext extends ParserRuleContext { public TerminalNode DELETE() { return getToken(BaseQueryParser.DELETE, 0); } public KdeleteContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_kdelete; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).enterKdelete(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).exitKdelete(this); } } public final KdeleteContext kdelete() throws RecognitionException { KdeleteContext _localctx = new KdeleteContext(_ctx, getState()); enterRule(_localctx, 32, RULE_kdelete); try { enterOuterAlt(_localctx, 1); { setState(157); match(DELETE); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class KsyncContext extends ParserRuleContext { public TerminalNode SYNC() { return getToken(BaseQueryParser.SYNC, 0); } public KsyncContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_ksync; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).enterKsync(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).exitKsync(this); } } public final KsyncContext ksync() throws RecognitionException { KsyncContext _localctx = new KsyncContext(_ctx, getState()); enterRule(_localctx, 34, RULE_ksync); try { enterOuterAlt(_localctx, 1); { setState(159); match(SYNC); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class KasContext extends ParserRuleContext { public TerminalNode AS() { return getToken(BaseQueryParser.AS, 0); } public KasContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_kas; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).enterKas(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).exitKas(this); } } public final KasContext kas() throws RecognitionException { KasContext _localctx = new KasContext(_ctx, getState()); enterRule(_localctx, 36, RULE_kas); try { enterOuterAlt(_localctx, 1); { setState(161); match(AS); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class LpContext extends ParserRuleContext { public LpContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_lp; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).enterLp(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).exitLp(this); } } public final LpContext lp() throws RecognitionException { LpContext _localctx = new LpContext(_ctx, getState()); enterRule(_localctx, 38, RULE_lp); try { enterOuterAlt(_localctx, 1); { setState(163); match(T__0); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class RpContext extends ParserRuleContext { public RpContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_rp; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).enterRp(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).exitRp(this); } } public final RpContext rp() throws RecognitionException { RpContext _localctx = new RpContext(_ctx, getState()); enterRule(_localctx, 40, RULE_rp); try { enterOuterAlt(_localctx, 1); { setState(165); match(T__2); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class CommaContext extends ParserRuleContext { public CommaContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_comma; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).enterComma(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).exitComma(this); } } public final CommaContext comma() throws RecognitionException { CommaContext _localctx = new CommaContext(_ctx, getState()); enterRule(_localctx, 42, RULE_comma); try { enterOuterAlt(_localctx, 1); { setState(167); match(T__1); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class CompContext extends ParserRuleContext { public TerminalNode COMP() { return getToken(BaseQueryParser.COMP, 0); } public CompContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_comp; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).enterComp(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).exitComp(this); } } public final CompContext comp() throws RecognitionException { CompContext _localctx = new CompContext(_ctx, getState()); enterRule(_localctx, 44, RULE_comp); try { enterOuterAlt(_localctx, 1); { setState(169); match(COMP); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class IdContext extends ParserRuleContext { public TerminalNode ID() { return getToken(BaseQueryParser.ID, 0); } public IdContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_id; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).enterId(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof BaseQueryListener ) ((BaseQueryListener)listener).exitId(this); } } public final IdContext id() throws RecognitionException { IdContext _localctx = new IdContext(_ctx, getState()); enterRule(_localctx, 46, RULE_id); try { enterOuterAlt(_localctx, 1); { setState(171); match(ID); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { case 12: return expr_sempred((ExprContext)_localctx, predIndex); } return true; } private boolean expr_sempred(ExprContext _localctx, int predIndex) { switch (predIndex) { case 0: return precpred(_ctx, 4); case 1: return precpred(_ctx, 2); } return true; } public static final String _serializedATN = "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3\23\u00b0\4\2\t\2"+ "\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+ "\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ "\3\2\3\2\3\2\3\2\3\2\5\28\n\2\3\3\3\3\3\3\3\3\3\4\3\4\3\4\7\4A\n\4\f\4"+ "\16\4D\13\4\3\4\7\4G\n\4\f\4\16\4J\13\4\3\5\3\5\3\5\3\5\7\5P\n\5\f\5\16"+ "\5S\13\5\3\6\3\6\3\6\7\6X\n\6\f\6\16\6[\13\6\3\7\3\7\3\7\7\7`\n\7\f\7"+ "\16\7c\13\7\3\b\3\b\3\b\3\b\7\bi\n\b\f\b\16\bl\13\b\3\t\3\t\3\t\3\n\3"+ "\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\5\nz\n\n\3\13\3\13\3\13\3\f\3\f\3\f\3\r"+ "\3\r\3\r\3\16\3\16\3\16\3\16\3\16\3\16\5\16\u008b\n\16\3\16\3\16\3\16"+ "\3\16\3\16\3\16\3\16\3\16\7\16\u0095\n\16\f\16\16\16\u0098\13\16\3\17"+ "\3\17\3\20\3\20\3\21\3\21\3\22\3\22\3\23\3\23\3\24\3\24\3\25\3\25\3\26"+ "\3\26\3\27\3\27\3\30\3\30\3\31\3\31\3\31\2\3\32\32\2\4\6\b\n\f\16\20\22"+ "\24\26\30\32\34\36 \"$&(*,.\60\2\2\u00a5\2\67\3\2\2\2\49\3\2\2\2\6=\3"+ "\2\2\2\bK\3\2\2\2\nT\3\2\2\2\f\\\3\2\2\2\16d\3\2\2\2\20m\3\2\2\2\22p\3"+ "\2\2\2\24{\3\2\2\2\26~\3\2\2\2\30\u0081\3\2\2\2\32\u008a\3\2\2\2\34\u0099"+ "\3\2\2\2\36\u009b\3\2\2\2 \u009d\3\2\2\2\"\u009f\3\2\2\2$\u00a1\3\2\2"+ "\2&\u00a3\3\2\2\2(\u00a5\3\2\2\2*\u00a7\3\2\2\2,\u00a9\3\2\2\2.\u00ab"+ "\3\2\2\2\60\u00ad\3\2\2\2\628\5\4\3\2\638\5\6\4\2\648\5\b\5\2\658\5\n"+ "\6\2\668\5\f\7\2\67\62\3\2\2\2\67\63\3\2\2\2\67\64\3\2\2\2\67\65\3\2\2"+ "\2\67\66\3\2\2\28\3\3\2\2\29:\5\34\17\2:;\5\22\n\2;<\5\24\13\2<\5\3\2"+ "\2\2=>\5\36\20\2>B\5\16\b\2?A\5\20\t\2@?\3\2\2\2AD\3\2\2\2B@\3\2\2\2B"+ "C\3\2\2\2CH\3\2\2\2DB\3\2\2\2EG\5\26\f\2FE\3\2\2\2GJ\3\2\2\2HF\3\2\2\2"+ "HI\3\2\2\2I\7\3\2\2\2JH\3\2\2\2KL\5 \21\2LM\5\16\b\2MQ\5\30\r\2NP\5\26"+ "\f\2ON\3\2\2\2PS\3\2\2\2QO\3\2\2\2QR\3\2\2\2R\t\3\2\2\2SQ\3\2\2\2TU\5"+ "\"\22\2UY\5\20\t\2VX\5\26\f\2WV\3\2\2\2X[\3\2\2\2YW\3\2\2\2YZ\3\2\2\2"+ "Z\13\3\2\2\2[Y\3\2\2\2\\]\5$\23\2]a\5\20\t\2^`\5\26\f\2_^\3\2\2\2`c\3"+ "\2\2\2a_\3\2\2\2ab\3\2\2\2b\r\3\2\2\2ca\3\2\2\2dj\5\32\16\2ef\5,\27\2"+ "fg\5\32\16\2gi\3\2\2\2he\3\2\2\2il\3\2\2\2jh\3\2\2\2jk\3\2\2\2k\17\3\2"+ "\2\2lj\3\2\2\2mn\7\17\2\2no\5\16\b\2o\21\3\2\2\2pq\7\20\2\2qy\5\60\31"+ "\2rs\5(\25\2st\5\60\31\2tu\5,\27\2uv\5\60\31\2vw\3\2\2\2wx\5*\26\2xz\3"+ "\2\2\2yr\3\2\2\2yz\3\2\2\2z\23\3\2\2\2{|\7\n\2\2|}\5\16\b\2}\25\3\2\2"+ "\2~\177\7\r\2\2\177\u0080\5\16\b\2\u0080\27\3\2\2\2\u0081\u0082\7\16\2"+ "\2\u0082\u0083\5\16\b\2\u0083\31\3\2\2\2\u0084\u0085\b\16\1\2\u0085\u0086"+ "\5(\25\2\u0086\u0087\5\60\31\2\u0087\u0088\5*\26\2\u0088\u008b\3\2\2\2"+ "\u0089\u008b\5\60\31\2\u008a\u0084\3\2\2\2\u008a\u0089\3\2\2\2\u008b\u0096"+ "\3\2\2\2\u008c\u008d\f\6\2\2\u008d\u008e\5.\30\2\u008e\u008f\5\32\16\7"+ "\u008f\u0095\3\2\2\2\u0090\u0091\f\4\2\2\u0091\u0092\5&\24\2\u0092\u0093"+ "\5\32\16\5\u0093\u0095\3\2\2\2\u0094\u008c\3\2\2\2\u0094\u0090\3\2\2\2"+ "\u0095\u0098\3\2\2\2\u0096\u0094\3\2\2\2\u0096\u0097\3\2\2\2\u0097\33"+ "\3\2\2\2\u0098\u0096\3\2\2\2\u0099\u009a\7\6\2\2\u009a\35\3\2\2\2\u009b"+ "\u009c\7\7\2\2\u009c\37\3\2\2\2\u009d\u009e\7\b\2\2\u009e!\3\2\2\2\u009f"+ "\u00a0\7\t\2\2\u00a0#\3\2\2\2\u00a1\u00a2\7\13\2\2\u00a2%\3\2\2\2\u00a3"+ "\u00a4\7\f\2\2\u00a4\'\3\2\2\2\u00a5\u00a6\7\5\2\2\u00a6)\3\2\2\2\u00a7"+ "\u00a8\7\3\2\2\u00a8+\3\2\2\2\u00a9\u00aa\7\4\2\2\u00aa-\3\2\2\2\u00ab"+ "\u00ac\7\21\2\2\u00ac/\3\2\2\2\u00ad\u00ae\7\22\2\2\u00ae\61\3\2\2\2\r"+ "\67BHQYajy\u008a\u0094\u0096"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { _decisionToDFA = new DFA[_ATN.getNumberOfDecisions()]; for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) { _decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i); } } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInspection.java15api; import com.intellij.ExtensionPoints; import com.intellij.codeHighlighting.HighlightDisplayLevel; import com.intellij.codeInsight.daemon.GroupNames; import com.intellij.codeInspection.*; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.extensions.ExtensionPoint; import com.intellij.openapi.vfs.CharsetToolkit; import com.intellij.psi.*; import com.intellij.psi.javadoc.PsiDocComment; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import gnu.trove.THashSet; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.*; /** * @author max */ public class Java15APIUsageInspection extends BaseJavaLocalInspectionTool { @NonNls public static final String SHORT_NAME = "Since15"; private static final THashSet<String> ourForbidden15API = new THashSet<String>(1500); private static final THashSet<String> ourForbidden16API = new THashSet<String>(200); private static final THashSet<String> ourIgnored16ClassesAPI = new THashSet<String>(10); public boolean FORBID_15_API = true; public boolean FORBID_16_API = true; private JPanel myWholePanel; private JCheckBox my15ApiCb; private JCheckBox my16ApiCb; static { initForbiddenApi("apiList.txt", ourForbidden15API); initForbiddenApi("api16List.txt", ourForbidden16API); initForbiddenApi("ignore16List.txt", ourIgnored16ClassesAPI); } private static void initForbiddenApi(@NonNls String list, THashSet<String> set) { BufferedReader reader = null; try { final InputStream stream = Java15APIUsageInspection.class.getResourceAsStream(list); reader = new BufferedReader(new InputStreamReader(stream, CharsetToolkit.UTF8_CHARSET)); do { String line = reader.readLine(); if (line == null) break; set.add(line); } while(true); } catch (UnsupportedEncodingException e) { // can't be. } catch (IOException e) { // can't be } finally { if (reader != null) { try { reader.close(); } catch (IOException e) { // Will not happen } } } } @Nullable public JComponent createOptionsPanel() { my15ApiCb.addActionListener(new ActionListener() { public void actionPerformed(final ActionEvent e) { FORBID_15_API = my15ApiCb.isSelected(); } }); my15ApiCb.setSelected(FORBID_15_API); my16ApiCb.setSelected(FORBID_16_API); my16ApiCb.addActionListener(new ActionListener() { public void actionPerformed(final ActionEvent e) { FORBID_16_API = my16ApiCb.isSelected(); } }); return myWholePanel; } @NotNull public String getGroupDisplayName() { return GroupNames.JDK15_SPECIFIC_GROUP_NAME; } @NotNull public String getDisplayName() { return InspectionsBundle.message("inspection.1.5.display.name", "@since 1.5(1.6)"); } @NotNull public String getShortName() { return SHORT_NAME; } @NotNull @Override public HighlightDisplayLevel getDefaultLevel() { return HighlightDisplayLevel.ERROR; } @Override public boolean isEnabledByDefault() { return false; } @NotNull public PsiElementVisitor buildVisitor(@NotNull ProblemsHolder holder, boolean isOnTheFly) { return new MyVisitor(holder); } private static boolean isInProject(final PsiElement elt) { return elt.getManager().isInProject(elt); } @Override @Nullable public ProblemDescriptor[] checkFile(@NotNull PsiFile file, @NotNull InspectionManager manager, boolean isOnTheFly) { ExtensionPoint<FileCheckingInspection> point = Extensions.getRootArea().getExtensionPoint(ExtensionPoints.JAVA15_INSPECTION_TOOL); final FileCheckingInspection[] fileCheckingInspections = point.getExtensions(); for(FileCheckingInspection obj: fileCheckingInspections) { ProblemDescriptor[] descriptors = obj.checkFile(file, manager, isOnTheFly); if (descriptors != null) { return descriptors; } } return null; } private class MyVisitor extends JavaElementVisitor { private final ProblemsHolder myHolder; public MyVisitor(final ProblemsHolder holder) { myHolder = holder; } @Override public void visitDocComment(PsiDocComment comment) { // No references inside doc comment are of interest. } @Override public void visitClass(PsiClass aClass) { // Don't go into classes (anonymous, locals). } @Override public void visitReferenceExpression(PsiReferenceExpression expression) { visitReferenceElement(expression); } @Override public void visitReferenceElement(PsiJavaCodeReferenceElement reference) { super.visitReferenceElement(reference); final PsiElement resolved = reference.resolve(); if (resolved instanceof PsiCompiledElement && resolved instanceof PsiMember) { if (isJava15ApiUsage((PsiMember)resolved)) { register15Error(reference); } else if (isJava16ApiUsage(((PsiMember)resolved))) { PsiClass psiClass = null; final PsiElement qualifier = reference.getQualifier(); if (qualifier != null) { if (qualifier instanceof PsiExpression) { psiClass = PsiUtil.resolveClassInType(((PsiExpression)qualifier).getType()); } } else { psiClass = PsiTreeUtil.getParentOfType(reference, PsiClass.class); } if (psiClass != null) { if (isIgnored(psiClass)) return; for (PsiClass superClass : psiClass.getSupers()) { if (isIgnored(superClass)) return; } } register16Error(reference); } } } private boolean isIgnored(PsiClass psiClass) { final String qualifiedName = psiClass.getQualifiedName(); return qualifiedName != null && ourIgnored16ClassesAPI.contains(qualifiedName); } @Override public void visitNewExpression(final PsiNewExpression expression) { super.visitNewExpression(expression); final PsiMethod constructor = expression.resolveConstructor(); if (constructor instanceof PsiCompiledElement) { if (isJava15ApiUsage(constructor)) { register15Error(expression.getClassReference()); } else if (isJava16ApiUsage(constructor)) { register16Error(expression.getClassReference()); } } } private void registerError(PsiJavaCodeReferenceElement reference, @NonNls String api) { if (isInProject(reference)) { myHolder.registerProblem(reference, InspectionsBundle.message("inspection.1.5.problem.descriptor", api)); } } private void register15Error(PsiJavaCodeReferenceElement referenceElement) { registerError(referenceElement, "@since 1.5"); } private void register16Error(PsiJavaCodeReferenceElement referenceElement) { registerError(referenceElement, "@since 1.6"); } } private static boolean isForbiddenApiUsage(final PsiMember member, boolean is15ApiCheck) { if (member == null) return false; // Annotations caught by special inspection if necessary if (member instanceof PsiClass && ((PsiClass)member).isAnnotationType()) return false; if (member instanceof PsiAnonymousClass) return false; if (member.getContainingClass() instanceof PsiAnonymousClass) return false; if (member instanceof PsiClass && !(member.getParent() instanceof PsiClass || member.getParent() instanceof PsiFile)) return false; return (is15ApiCheck && ourForbidden15API.contains(getSignature(member))) || (!is15ApiCheck && ourForbidden16API.contains(getSignature(member))) || isForbiddenApiUsage(member.getContainingClass(), is15ApiCheck); } public boolean isJava15ApiUsage(final PsiMember member) { return FORBID_15_API && isForbiddenApiUsage(member, true); } public boolean isJava16ApiUsage(final PsiMember member) { return (FORBID_15_API || FORBID_16_API) && isForbiddenApiUsage(member, false); } private static String getSignature(PsiMember member) { if (member instanceof PsiClass) { return ((PsiClass)member).getQualifiedName(); } if (member instanceof PsiField) { return getSignature(member.getContainingClass()) + "#" + member.getName(); } if (member instanceof PsiMethod) { final PsiMethod method = (PsiMethod)member; StringBuffer buf = new StringBuffer(); buf.append(getSignature(method.getContainingClass())); buf.append('#'); buf.append(method.getName()); buf.append('('); final PsiType[] params = method.getSignature(PsiSubstitutor.EMPTY).getParameterTypes(); for (PsiType type : params) { buf.append(type.getCanonicalText()); buf.append(";"); } buf.append(')'); return buf.toString(); } assert false; return null; } }
package org.broadinstitute.hellbender.tools.walkers.contamination; import htsjdk.samtools.util.OverlapDetector; import org.apache.commons.lang.mutable.MutableDouble; import org.apache.commons.lang3.Range; import org.apache.commons.lang3.tuple.Pair; import org.apache.commons.math3.distribution.BinomialDistribution; import org.apache.commons.math3.stat.descriptive.moment.Mean; import org.apache.commons.math3.stat.descriptive.rank.Median; import org.apache.commons.math3.util.FastMath; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.broadinstitute.barclay.argparser.Argument; import org.broadinstitute.barclay.argparser.CommandLineProgramProperties; import org.broadinstitute.barclay.help.DocumentedFeature; import org.broadinstitute.hellbender.cmdline.CommandLineProgram; import org.broadinstitute.hellbender.cmdline.StandardArgumentDefinitions; import picard.cmdline.programgroups.DiagnosticsAndQCProgramGroup; import org.broadinstitute.hellbender.tools.copynumber.utils.segmentation.KernelSegmenter; import org.broadinstitute.hellbender.tools.walkers.mutect.FilterMutectCalls; import org.broadinstitute.hellbender.utils.MathUtils; import org.broadinstitute.hellbender.utils.OptimizationUtils; import org.broadinstitute.hellbender.utils.SimpleInterval; import java.io.File; import java.util.*; import java.util.function.BiFunction; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.IntStream; /** * <p> * Calculates the fraction of reads coming from cross-sample contamination, given results from {@link GetPileupSummaries}. * The resulting contamination table is used with {@link FilterMutectCalls}. * </p> * <p> * This tool is featured in the Somatic Short Mutation calling Best Practice Workflow. * See <a href="https://software.broadinstitute.org/gatk/documentation/article?id=11136">Tutorial#11136</a> for a * step-by-step description of the workflow and <a href="https://software.broadinstitute.org/gatk/documentation/article?id=11127">Article#11127</a> * for an overview of what traditional somatic calling entails. For the latest pipeline scripts, see the * <a href="https://github.com/broadinstitute/gatk/tree/master/scripts/mutect2_wdl">Mutect2 WDL scripts directory</a>. * </p> * * <p>This tool borrows from <a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC3167057/">ContEst</a> by Cibulskis et al the idea of estimating contamination * from ref reads at hom alt sites. However, ContEst uses a probabilistic model that assumes a diploid genotype with no copy number * variation and independent contaminating reads. That is, ContEst assumes that each contaminating read is drawn randomly and * independently from a different human. This tool uses a simpler estimate of contamination that relaxes these assumptions. In particular, * it works in the presence of copy number variations and with an arbitrary number of contaminating samples. In addition, this tool * is designed to work well with no matched normal data. However, one can run {@link GetPileupSummaries} on a matched normal bam file * and input the result to this tool.</p> * * <h3>Usage examples</h3> * * <h4>Tumor-only mode</h4> * * <pre> * gatk CalculateContamination \ * -I pileups.table \ * -O contamination.table * </pre> * * <h4>Matched normal mode</h4> * * <pre> * gatk CalculateContamination \ * -I tumor-pileups.table \ * -matched normal-pileups.table \ * -O contamination.table * </pre> * <p> * The resulting table provides the fraction contamination, one line per sample, e.g. SampleID--TAB--Contamination. * The file has no header. * </p> * */ @CommandLineProgramProperties( summary = "Calculate the fraction of reads coming from cross-sample contamination", oneLineSummary = "Calculate the fraction of reads coming from cross-sample contamination", programGroup = DiagnosticsAndQCProgramGroup.class ) @DocumentedFeature public class CalculateContamination extends CommandLineProgram { private static final Logger logger = LogManager.getLogger(CalculateContamination.class); private static final int MAX_CHANGEPOINTS_PER_CHROMOSOME = 10; private static final int MIN_SITES_PER_SEGMENT = 5; // our analysis only cares about hom alt and het sites, so we throw away hom refs with a very conservative heuristic private static final double ALT_FRACTION_OF_DEFINITE_HOM_REF = 0.05; private static final double STRICT_LOH_MAF_THRESHOLD = 0.4; private static final double INITIAL_CONTAMINATION_GUESS = 0.05; private static final int MAX_ITERATIONS = 10; private static final double CONTAMINATION_CONVERGENCE_THRESHOLD = 0.001; private static final Range<Double> ALT_FRACTIONS_FOR_SEGMENTATION = Range.between(0.1, 0.9); private static final double KERNEL_SEGMENTER_LINEAR_COST = 1.0; private static final double KERNEL_SEGMENTER_LOG_LINEAR_COST = 1.0; private static final int KERNEL_SEGMENTER_DIMENSION = 100; private static final int POINTS_PER_SEGMENTATION_WINDOW = 50; private static final int MIN_COVERAGE = 10; private static final double DEFAULT_LOW_COVERAGE_RATIO_THRESHOLD = 1.0/2; private static final double DEFAULT_HIGH_COVERAGE_RATIO_THRESHOLD = 3.0; public static final int DESIRED_MINIMUM_HOM_ALT_COUNT = 50; public static final double MINOR_ALLELE_FRACTION_STEP_SIZE = 0.05; @Argument(fullName = StandardArgumentDefinitions.INPUT_LONG_NAME, shortName = StandardArgumentDefinitions.INPUT_SHORT_NAME, doc="The input table") private File inputPileupSummariesTable; public static final String MATCHED_NORMAL_LONG_NAME = "matched-normal"; public static final String MATCHED_NORMAL_SHORT_NAME = "matched"; @Argument(fullName = MATCHED_NORMAL_LONG_NAME, shortName = MATCHED_NORMAL_SHORT_NAME, doc="The matched normal input table", optional = true) private File matchedPileupSummariesTable = null; @Argument(fullName= StandardArgumentDefinitions.OUTPUT_LONG_NAME, shortName=StandardArgumentDefinitions.OUTPUT_SHORT_NAME, doc="The output table") private final File outputTable = null; public static final String TUMOR_SEGMENTATION_LONG_NAME = "tumor-segmentation"; public static final String TUMOR_SEGMENTATION_SHORT_NAME = "segments"; @Argument(fullName= TUMOR_SEGMENTATION_LONG_NAME, shortName= TUMOR_SEGMENTATION_SHORT_NAME, doc="The output table containing segmentation of the tumor by minor allele fraction", optional = true) private final File outputTumorSegmentation = null; public static final String LOW_COVERAGE_RATIO_THRESHOLD_NAME = "low-coverage-ratio-threshold"; @Argument(fullName = LOW_COVERAGE_RATIO_THRESHOLD_NAME, doc="The minimum coverage relative to the median.", optional = true) private final double lowCoverageRatioThreshold = DEFAULT_LOW_COVERAGE_RATIO_THRESHOLD; public static final String HIGH_COVERAGE_RATIO_THRESHOLD_NAME = "high-coverage-ratio-threshold"; @Argument(fullName= HIGH_COVERAGE_RATIO_THRESHOLD_NAME, doc="The maximum coverage relative to the mean.", optional = true) private final double highCoverageRatioThreshold = DEFAULT_HIGH_COVERAGE_RATIO_THRESHOLD; private static final double SEGMENTATION_KERNEL_VARIANCE = 0.025; private static final BiFunction<PileupSummary, PileupSummary, Double> SEGMENTATION_KERNEL = (ps1, ps2) -> { final double maf1 = FastMath.min(ps1.getAltFraction(), 1 - ps1.getAltFraction()); final double maf2 = FastMath.min(ps2.getAltFraction(), 1 - ps2.getAltFraction()); return FastMath.exp(-MathUtils.square(maf1 - maf2)/(2 * SEGMENTATION_KERNEL_VARIANCE)); }; @Override public Object doWork() { final List<PileupSummary> sites = filterSites(PileupSummary.readFromFile(inputPileupSummariesTable)); // used the matched normal to genotype (i.e. find hom alt sites) if available final List<PileupSummary> genotypingSites = matchedPileupSummariesTable == null ? sites : filterSites(PileupSummary.readFromFile(matchedPileupSummariesTable)); // we partition the genome into contiguous allelic copy-number segments in order to infer the local minor // allele fraction at each site. This is important because a minor allele fraction close to 1/2 (neutral) // allows hets and hom alts to be distinguished easily, while a low minor allele fraction makes it harder // to discriminate. It is crucial to know which site are true hom alts and which sites are hets with // loss of heterozygosity. We do this for the genotyping sample because that is the sample from which // the hom alts are deduced. final List<List<PileupSummary>> genotypingSegments = findSegments(genotypingSites); List<PileupSummary> homAltGenotypingSites = new ArrayList<>(); final MutableDouble genotypingContamination = new MutableDouble(INITIAL_CONTAMINATION_GUESS); for (int iteration = 0; iteration < MAX_ITERATIONS; iteration++) { List<List<PileupSummary>> homAltSitesBySegment = Arrays.asList(new ArrayList<>()); final MutableDouble minorAlleleFractionThreshold = new MutableDouble(STRICT_LOH_MAF_THRESHOLD); while (homAltSitesBySegment.stream().mapToInt(List::size).sum() < DESIRED_MINIMUM_HOM_ALT_COUNT && minorAlleleFractionThreshold.doubleValue() > 0) { homAltSitesBySegment = genotypingSegments.stream() .map(segment -> segmentHomAlts(segment, genotypingContamination.doubleValue(), minorAlleleFractionThreshold.doubleValue())) .collect(Collectors.toList()); minorAlleleFractionThreshold.subtract(MINOR_ALLELE_FRACTION_STEP_SIZE); } homAltGenotypingSites = homAltSitesBySegment.stream().flatMap(List::stream).collect(Collectors.toList()); final double newGenotypingContamination = calculateContamination(homAltGenotypingSites, errorRate(genotypingSites)).getLeft(); if (Math.abs(newGenotypingContamination - genotypingContamination.doubleValue()) < CONTAMINATION_CONVERGENCE_THRESHOLD) { break; } genotypingContamination.setValue(newGenotypingContamination); } if (outputTumorSegmentation != null) { final List<List<PileupSummary>> tumorSegments = matchedPileupSummariesTable == null ? genotypingSegments : findSegments(sites); List<MinorAlleleFractionRecord> tumorMinorAlleleFractions = tumorSegments.stream() .map(this::makeMinorAlleleFractionRecord).collect(Collectors.toList()); MinorAlleleFractionRecord.writeToFile(tumorMinorAlleleFractions, outputTumorSegmentation); } final List<PileupSummary> homAltSites = subsetSites(sites, homAltGenotypingSites); final Pair<Double, Double> contaminationAndError = calculateContamination(homAltSites, errorRate(sites)); final double contamination = contaminationAndError.getLeft(); final double error = contaminationAndError.getRight(); ContaminationRecord.writeToFile(Arrays.asList(new ContaminationRecord(ContaminationRecord.Level.WHOLE_BAM.toString(), contamination, error)), outputTable); return "SUCCESS"; } private List<List<PileupSummary>> findSegments(final List<PileupSummary> sites) { final Map<String, List<PileupSummary>> sitesByContig = sites.stream().collect(Collectors.groupingBy(PileupSummary::getContig)); return sitesByContig.values().stream() .flatMap(contig -> findContigSegments(contig).stream()) .filter(segment -> segment.size() >= MIN_SITES_PER_SEGMENT) .collect(Collectors.toList()); } // in a biallelic site, essentially every non-ref, non-primary alt base is an error, since there are 2 such possible // errors out of 3 total, we multiply by 3/2 to get the total base error rate private double errorRate(List<PileupSummary> sites) { final long totalBases = sites.stream().mapToInt(PileupSummary::getTotalCount).sum(); final long otherAltBases = sites.stream().mapToInt(PileupSummary::getOtherAltCount).sum(); return 1.5 * ((double) otherAltBases / totalBases); } // subset sites in the contaminated sample to hom alt site found in the genotyping sample private static List<PileupSummary> subsetSites(final List<PileupSummary> sites, final List<PileupSummary> subsetLoci) { final OverlapDetector<PileupSummary> homAltsInMatchedNormalOverlapDetector = OverlapDetector.create(subsetLoci); return sites.stream().filter(homAltsInMatchedNormalOverlapDetector::overlapsAny).collect(Collectors.toList()); } private List<PileupSummary> segmentHomAlts(final List<PileupSummary> segment, final double contamination, double minimiumMinorAlleleFraction) { final double minorAlleleFraction = calculateMinorAlleleFraction(segment); return minorAlleleFraction < minimiumMinorAlleleFraction ? Collections.emptyList() : segment.stream().filter(site -> homAltProbability(site, minorAlleleFraction, contamination) > 0.5).collect(Collectors.toList()); } private double calculateMinorAlleleFraction(final List<PileupSummary> segment) { final List<PileupSummary> hets = getLikelyHetsBasedOnAlleleFraction(segment); final Function<Double, Double> objective = maf -> logLikelihoodOfHetsInSegment(hets, maf); return OptimizationUtils.argmax(objective, ALT_FRACTIONS_FOR_SEGMENTATION.getMinimum(), 0.5, 0.4, 0.01, 0.01, 20); } private MinorAlleleFractionRecord makeMinorAlleleFractionRecord(final List<PileupSummary> segment) { final String contig = segment.get(0).getContig(); final int start = segment.get(0).getStart(); final int end = segment.get(segment.size() - 1).getEnd(); final double minorAlleleFraction = calculateMinorAlleleFraction(segment); return new MinorAlleleFractionRecord(new SimpleInterval(contig, start, end), minorAlleleFraction); } // we want log(1/2 (likelihood of alt minor + likelihood of alt major)) // = logSumLog(log likelihood of alt minor, log likelihood of alt major) - log(2) private final double logLikelihoodOfHetsInSegment(final List<PileupSummary> hets, final double minorAlleleFraction) { return hets.stream().mapToDouble(het -> { final int n = het.getTotalCount(); final int a = het.getAltCount(); final double altMinorLogLikelihood = new BinomialDistribution(null, n, minorAlleleFraction).logProbability(a); final double altMajorLogLikelihood = new BinomialDistribution(null, n, 1 - minorAlleleFraction).logProbability(a); return MathUtils.logSumLog(altMinorLogLikelihood, altMajorLogLikelihood) + MathUtils.LOG_ONE_HALF; }).sum(); } private List<List<PileupSummary>> findContigSegments(List<PileupSummary> sites) { // segment based on obvious hets final List<PileupSummary> hetSites = getLikelyHetsBasedOnAlleleFraction(sites); if (hetSites.isEmpty()) { return Collections.emptyList(); } final List<Integer> changepoints = new ArrayList<>(); // when the kernel segmenter finds a changepoint at index n, that means index n belongs to the left segment, which goes // against the usual end-exclusive intervals of IndexRange etc. This explains adding in the first changepoint of -1 // instead of 0 and all the "changepoint + 1" constructions below changepoints.add(-1); final KernelSegmenter<PileupSummary> segmenter = new KernelSegmenter<>(hetSites); changepoints.addAll(segmenter.findChangepoints(MAX_CHANGEPOINTS_PER_CHROMOSOME, SEGMENTATION_KERNEL, KERNEL_SEGMENTER_DIMENSION, Arrays.asList(POINTS_PER_SEGMENTATION_WINDOW), KERNEL_SEGMENTER_LINEAR_COST, KERNEL_SEGMENTER_LOG_LINEAR_COST, KernelSegmenter.ChangepointSortOrder.INDEX)); changepoints.add(hetSites.size()-1); final List<SimpleInterval> segments = IntStream.range(0, changepoints.size() - 1) .mapToObj(n -> { final PileupSummary firstSiteInSegment = hetSites.get(changepoints.get(n) + 1); final PileupSummary lastSiteInSegment = hetSites.get(changepoints.get(n+1)); return new SimpleInterval(firstSiteInSegment.getContig(), firstSiteInSegment.getStart(), lastSiteInSegment.getEnd()); }).collect(Collectors.toList()); final OverlapDetector<PileupSummary> od = OverlapDetector.create(sites); // for each segment, find overlapping sites and sort by coordinate return segments.stream() .map(segment -> od.getOverlaps(segment).stream().sorted(Comparator.comparingInt(PileupSummary::getStart)).collect(Collectors.toList())) .collect(Collectors.toList()); } private List<PileupSummary> getLikelyHetsBasedOnAlleleFraction(List<PileupSummary> sites) { return sites.stream() .filter(ps -> ALT_FRACTIONS_FOR_SEGMENTATION.contains(ps.getAltFraction())) .collect(Collectors.toList()); } private static Pair<Double, Double> calculateContamination(List<PileupSummary> homAltSites, final double errorRate) { if (homAltSites.isEmpty()) { logger.warn("No hom alt sites found! Perhaps GetPileupSummaries was run on too small of an interval, or perhaps the sample was extremely inbred or haploid."); return Pair.of(0.0, 1.0); } final long totalReadCount = homAltSites.stream().mapToLong(PileupSummary::getTotalCount).sum(); final long totalRefCount = homAltSites.stream().mapToLong(PileupSummary::getRefCount).sum(); // if eg ref is A, alt is C, then # of ref reads due to error is roughly (# of G read + # of T reads)/2 final long errorRefCount = Math.round(totalReadCount * errorRate / 3); final long contaminationRefCount = Math.max(totalRefCount - errorRefCount, 0); final double totalDepthWeightedByRefFrequency = homAltSites.stream() .mapToDouble(ps -> ps.getTotalCount() * (1 - ps.getAlleleFrequency())) .sum(); final double contamination = contaminationRefCount / totalDepthWeightedByRefFrequency; final double standardError = Math.sqrt(contamination / totalDepthWeightedByRefFrequency); logger.info(String.format("In %d homozygous variant sites we find %d reference reads due to contamination and %d" + " due to to sequencing error out of a total %d reads.", homAltSites.size(), contaminationRefCount, errorRefCount, totalReadCount)); logger.info(String.format("Based on population data, we would expect %d reference reads in a contaminant with equal depths at these sites.", (long) totalDepthWeightedByRefFrequency)); logger.info(String.format("Therefore, we estimate a contamination of %.3f.", contamination)); logger.info(String.format("The error bars on this estimate are %.5f.", standardError)); return Pair.of(Math.min(contamination, 1.0), standardError); } private List<PileupSummary> filterSites(final List<PileupSummary> allSites) { // Just in case the intervals given to GetPileupSummaries contained un-covered sites, we remove them // so that a bunch of zeroes don't throw off the median coverage final List<PileupSummary> coveredSites = allSites.stream().filter(s -> s.getTotalCount() > MIN_COVERAGE).collect(Collectors.toList()); final double[] coverage = coveredSites.stream().mapToDouble(PileupSummary::getTotalCount).toArray(); final double medianCoverage = new Median().evaluate(coverage); final double meanCoverage = new Mean().evaluate(coverage); final double lowCoverageThreshold = medianCoverage * lowCoverageRatioThreshold; final double highCoverageThreshold = meanCoverage * highCoverageRatioThreshold; return coveredSites.stream() .filter(ps -> ps.getTotalCount() > lowCoverageThreshold && ps.getTotalCount() < highCoverageThreshold) .filter(ps -> ps.getAltFraction() > ALT_FRACTION_OF_DEFINITE_HOM_REF) .collect(Collectors.toList()); } private double homAltProbability(final PileupSummary site, final double minorAlleleFraction, final double contamination) { final double alleleFrequency = site.getAlleleFrequency(); final double homAltPrior = MathUtils.square(alleleFrequency); final double hetPrior = 2 * alleleFrequency * (1 - alleleFrequency); final int altCount = site.getAltCount(); final int totalCount = altCount + site.getRefCount(); if (altCount < totalCount / 2) { return 0; } final double homAltLikelihood = new BinomialDistribution(null, totalCount, 1 - contamination).probability(altCount); final double hetLikelihood = new BinomialDistribution(null, totalCount, 1 - minorAlleleFraction).probability(altCount); final double unnormalizedHomAltProbability = homAltPrior * homAltLikelihood; final double unnormalizedHetProbability = hetPrior * hetLikelihood; final double result = unnormalizedHomAltProbability / (unnormalizedHetProbability + unnormalizedHomAltProbability); return result; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.jackrabbit.oak.run; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Random; import java.util.Set; import java.util.concurrent.TimeUnit; import ch.qos.logback.classic.Level; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.base.Splitter; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.apache.commons.io.FileUtils; import org.apache.felix.cm.file.ConfigurationHandler; import org.apache.jackrabbit.core.data.DataStore; import org.apache.jackrabbit.core.data.DataStoreException; import org.apache.jackrabbit.oak.api.Blob; import org.apache.jackrabbit.oak.api.CommitFailedException; import org.apache.jackrabbit.oak.blob.cloud.azure.blobstorage.AzureConstants; import org.apache.jackrabbit.oak.blob.cloud.azure.blobstorage.AzureDataStoreUtils; import org.apache.jackrabbit.oak.blob.cloud.s3.S3Constants; import org.apache.jackrabbit.oak.blob.cloud.s3.S3DataStoreUtils; import org.apache.jackrabbit.oak.commons.FileIOUtils; import org.apache.jackrabbit.oak.commons.junit.LogCustomizer; import org.apache.jackrabbit.oak.plugins.blob.BlobGCTest.MemoryBlobStoreNodeStore; import org.apache.jackrabbit.oak.plugins.blob.MarkSweepGarbageCollector; import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore; import org.apache.jackrabbit.oak.plugins.blob.datastore.OakFileDataStore; import org.apache.jackrabbit.oak.plugins.document.DocumentMK; import org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore; import org.apache.jackrabbit.oak.plugins.document.MongoUtils; import org.apache.jackrabbit.oak.plugins.document.Revision; import org.apache.jackrabbit.oak.plugins.document.VersionGarbageCollector; import org.apache.jackrabbit.oak.plugins.document.util.MongoConnection; import org.apache.jackrabbit.oak.run.cli.BlobStoreOptions.Type; import org.apache.jackrabbit.oak.segment.SegmentNodeStore; import org.apache.jackrabbit.oak.segment.SegmentNodeStoreBuilders; import org.apache.jackrabbit.oak.segment.azure.AzureUtilities; import org.apache.jackrabbit.oak.segment.azure.tool.ToolUtils; import org.apache.jackrabbit.oak.segment.compaction.SegmentGCOptions; import org.apache.jackrabbit.oak.segment.file.FileStore; import org.apache.jackrabbit.oak.segment.file.FileStoreBuilder; import org.apache.jackrabbit.oak.segment.spi.persistence.SegmentNodeStorePersistence; import org.apache.jackrabbit.oak.spi.blob.GarbageCollectableBlobStore; import org.apache.jackrabbit.oak.spi.cluster.ClusterRepositoryInfo; import org.apache.jackrabbit.oak.spi.commit.CommitInfo; import org.apache.jackrabbit.oak.spi.commit.EmptyHook; import org.apache.jackrabbit.oak.spi.state.NodeBuilder; import org.apache.jackrabbit.oak.spi.state.NodeStore; import org.apache.jackrabbit.oak.stats.Clock; import org.jetbrains.annotations.Nullable; import org.junit.After; import org.junit.Assert; import org.junit.Assume; import org.junit.Before; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static com.google.common.base.StandardSystemProperty.FILE_SEPARATOR; import static org.apache.jackrabbit.oak.commons.FileIOUtils.sort; import static org.apache.jackrabbit.oak.commons.FileIOUtils.writeStrings; import static org.apache.jackrabbit.oak.plugins.blob.datastore.SharedDataStoreUtils.SharedStoreRecordType.REFERENCES; import static org.apache.jackrabbit.oak.plugins.blob.datastore.SharedDataStoreUtils.SharedStoreRecordType.REPOSITORY; import static org.apache.jackrabbit.oak.run.DataStoreCommand.VerboseIdLogger.DASH; import static org.apache.jackrabbit.oak.run.DataStoreCommand.VerboseIdLogger.HASH; import static org.apache.jackrabbit.oak.run.DataStoreCommand.VerboseIdLogger.filterFiles; import static org.apache.jackrabbit.oak.segment.compaction.SegmentGCOptions.defaultGCOptions; import static org.apache.jackrabbit.oak.segment.file.FileStoreBuilder.fileStoreBuilder; import static org.junit.Assert.assertEquals; /** * Tests for {@link DataStoreCommand} */ @RunWith(Parameterized.class) public class DataStoreCommandTest { private static Logger log = LoggerFactory.getLogger(DataStoreCommandTest.class); @Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder(new File("target")); private DataStoreFixture blobFixture; private StoreFixture storeFixture; private String additionalParams; private DataStoreBlobStore setupDataStore; private NodeStore store; public DataStoreCommandTest(StoreFixture storeFixture, DataStoreFixture blobFixture) { this.storeFixture = storeFixture; this.blobFixture = blobFixture; } @Parameterized.Parameters(name="{index}: ({0} : {1})") public static List<Object[]> fixtures() { return FixtureHelper.get(); } @Before public void setup() throws Exception { setupDataStore = blobFixture.init(temporaryFolder); store = storeFixture.init(setupDataStore, temporaryFolder.newFolder()); additionalParams = "--ds-read-write"; String repoId = ClusterRepositoryInfo.getOrCreateId(store); setupDataStore.addMetadataRecord(new ByteArrayInputStream(new byte[0]), REPOSITORY.getNameFromId(repoId)); } private static Data prepareData(StoreFixture storeFixture, DataStoreFixture blobFixture, int numBlobs, int numMaxDeletions, int missingDataStore) throws Exception { DataStoreBlobStore blobStore = blobFixture.getDataStore(); NodeStore store = storeFixture.getNodeStore(); storeFixture.preDataPrepare(); Data data = new Data(); List<Integer> toBeDeleted = Lists.newArrayList(); Random rand = new Random(); for (int i = 0; i < numMaxDeletions; i++) { int n = rand.nextInt(numBlobs); if (!toBeDeleted.contains(n)) { toBeDeleted.add(n); } } NodeBuilder a = store.getRoot().builder(); for (int i = 0; i < numBlobs; i++) { Blob b = store.createBlob(randomStream(i, 18342)); Iterator<String> idIter = blobStore.resolveChunks(b.getContentIdentity()); while (idIter.hasNext()) { String chunk = idIter.next(); data.added.add(chunk); data.idToPath.put(chunk, "/c" + i); if (toBeDeleted.contains(i)) { data.deleted.add(chunk); } } a.child("c" + i).setProperty("x", b); } store.merge(a, EmptyHook.INSTANCE, CommitInfo.EMPTY); log.info("Created Data : {}", data); for (int id : toBeDeleted) { delete("c" + id, store); } log.info("Deleted nodes : {}", toBeDeleted.size()); int missing = 0; Iterator<String> iterator = data.added.iterator(); while (iterator.hasNext()) { if (missing < missingDataStore) { String id = iterator.next(); if (!data.deleted.contains(id)) { data.missingDataStore.add(id); missing++; } } else { break; } } for (String id : data.missingDataStore) { long count = blobStore.countDeleteChunks(ImmutableList.of(id), 0); assertEquals(1, count); } // Sleep a little to make eligible for cleanup TimeUnit.MILLISECONDS.sleep(10); storeFixture.postDataPrepare(); return data; } protected static void delete(String nodeId, NodeStore nodeStore) throws CommitFailedException { NodeBuilder builder = nodeStore.getRoot().builder(); builder.child(nodeId).remove(); nodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); } @After public void tearDown() { storeFixture.after(); blobFixture.after(); } @Test public void testMissingOpParams() throws Exception { storeFixture.close(); File dump = temporaryFolder.newFolder(); List<String> argsList = Lists .newArrayList("--" + getOption(blobFixture.getType()), blobFixture.getConfigPath(), "--out-dir", dump.getAbsolutePath(), storeFixture.getConnectionString(), "--reset-log-config", "false", "--work-dir", temporaryFolder.newFolder().getAbsolutePath()); if (!Strings.isNullOrEmpty(additionalParams)) { argsList.add(additionalParams); } log.info("Running testMissingOpParams: {}", argsList); testIncorrectParams(argsList, Lists.newArrayList("No actions specified"), DataStoreCommand.class); } /** * Only for Segment/Tar * @throws Exception */ @Test public void testTarNoDS() throws Exception { storeFixture.close(); Assume.assumeTrue(storeFixture instanceof StoreFixture.SegmentStoreFixture); File dump = temporaryFolder.newFolder(); List<String> argsList = Lists .newArrayList("--check-consistency", storeFixture.getConnectionString(), "--out-dir", dump.getAbsolutePath(), "--reset-log-config", "false", "--work-dir", temporaryFolder.newFolder().getAbsolutePath()); if (!Strings.isNullOrEmpty(additionalParams)) { argsList.add(additionalParams); } testIncorrectParams(argsList, Lists.newArrayList("No BlobStore specified"), DataStoreCommand.class); } @Test public void testConsistencyMissing() throws Exception { File dump = temporaryFolder.newFolder(); Data data = prepareData(storeFixture, blobFixture, 10, 5, 1); storeFixture.close(); testConsistency(dump, data, false); } @Test public void testConsistencyVerbose() throws Exception { File dump = temporaryFolder.newFolder(); Data data = prepareData(storeFixture, blobFixture, 10, 5, 1); storeFixture.close(); testConsistency(dump, data, true); } @Test public void testConsistencyNoMissing() throws Exception { File dump = temporaryFolder.newFolder(); Data data = prepareData(storeFixture, blobFixture, 10, 5, 0); storeFixture.close(); testConsistency(dump, data, false); } @Test public void gc() throws Exception { File dump = temporaryFolder.newFolder(); Data data = prepareData(storeFixture, blobFixture, 10, 5, 1); storeFixture.close(); testGc(dump, data, 0, false); } @Test public void gcNoDeletion() throws Exception { File dump = temporaryFolder.newFolder(); Data data = prepareData(storeFixture, blobFixture, 10, 0, 1); storeFixture.close(); testGc(dump, data, 0, false); } @Test public void gcNoneOld() throws Exception { File dump = temporaryFolder.newFolder(); Data data = prepareData(storeFixture, blobFixture, 10, 5, 1); storeFixture.close(); testGc(dump, data, 10000, false); } @Test public void gcOnlyMark() throws Exception { File dump = temporaryFolder.newFolder(); Data data = prepareData(storeFixture, blobFixture, 10, 5, 1); storeFixture.close(); testGc(dump, data, 10000, true); } @Test public void gcMarkOnRemote() throws Exception { MemoryBlobStoreNodeStore memNodeStore = new MemoryBlobStoreNodeStore(setupDataStore); String rep2Id = ClusterRepositoryInfo.getOrCreateId(memNodeStore); setupDataStore.addMetadataRecord(new ByteArrayInputStream(new byte[0]), REPOSITORY.getNameFromId(rep2Id)); Map<String, String> idMapping = dummyData(memNodeStore, rep2Id, store, setupDataStore, temporaryFolder.newFile()); File dump = temporaryFolder.newFolder(); Data data = prepareData(storeFixture, blobFixture, 10, 5, 1); data.added.addAll(idMapping.keySet()); data.idToPath.putAll(idMapping); storeFixture.close(); testGc(dump, data, 0, false); } @Test public void gcNoMarkOnRemote() throws Exception { MemoryBlobStoreNodeStore memNodeStore = new MemoryBlobStoreNodeStore(setupDataStore); String rep2Id = ClusterRepositoryInfo.getOrCreateId(memNodeStore); setupDataStore.addMetadataRecord(new ByteArrayInputStream(new byte[0]), REPOSITORY.getNameFromId(rep2Id)); File dump = temporaryFolder.newFolder(); Data data = prepareData(storeFixture, blobFixture, 10, 5, 1); storeFixture.close(); List<String> argsList = Lists .newArrayList("--collect-garbage", "--max-age", String.valueOf(0), "--" + getOption(blobFixture.getType()), blobFixture.getConfigPath(), storeFixture.getConnectionString(), "--out-dir", dump.getAbsolutePath(), "--reset-log-config", "false", "--work-dir", temporaryFolder.newFolder().getAbsolutePath()); if (!Strings.isNullOrEmpty(additionalParams)) { argsList.add(additionalParams); } testIncorrectParams(argsList, Lists.newArrayList("Not all repositories have marked references available : "), MarkSweepGarbageCollector.class); } /** * Only for Segment/Tar * @throws Exception */ @Test public void testConsistencyFakeDS() throws Exception { Assume.assumeTrue(storeFixture instanceof StoreFixture.SegmentStoreFixture); File dump = temporaryFolder.newFolder(); File dsPath = temporaryFolder.newFolder(); Data data = prepareData(storeFixture, blobFixture, 10, 5, 0); storeFixture.close(); List<String> argsList = Lists .newArrayList("--check-consistency", "--fake-ds-path", dsPath.getAbsolutePath(), storeFixture.getConnectionString(), "--out-dir", dump.getAbsolutePath(), "--work-dir", temporaryFolder.newFolder().getAbsolutePath()); if (!Strings.isNullOrEmpty(additionalParams)) { argsList.add(additionalParams); } DataStoreCommand cmd = new DataStoreCommand(); cmd.execute(argsList.toArray(new String[0])); assertFileEquals(dump, "avail-", Sets.newHashSet()); assertFileEquals(dump, "marked-", Sets.difference(data.added, data.deleted)); } private void testConsistency(File dump, Data data, boolean verbose) throws Exception { List<String> argsList = Lists .newArrayList("--check-consistency", "--" + getOption(blobFixture.getType()), blobFixture.getConfigPath(), storeFixture.getConnectionString(), "--out-dir", dump.getAbsolutePath(), "--work-dir", temporaryFolder.newFolder().getAbsolutePath()); if (!Strings.isNullOrEmpty(additionalParams)) { argsList.add(additionalParams); } if (verbose) { argsList.add("--verbose"); } DataStoreCommand cmd = new DataStoreCommand(); cmd.execute(argsList.toArray(new String[0])); assertFileEquals(dump, "avail-", Sets.difference(data.added, data.missingDataStore)); // Only verbose or Document would have paths suffixed assertFileEquals(dump, "marked-", (verbose || storeFixture instanceof StoreFixture.MongoStoreFixture) ? encodedIdsAndPath(Sets.difference(data.added, data.deleted), blobFixture.getType(), data.idToPath, false) : Sets.difference(data.added, data.deleted)); // Verbose would have paths as well as ids changed but normally only DocumentNS would have paths suffixed assertFileEquals(dump, "gccand-", verbose ? encodedIdsAndPath(data.missingDataStore, blobFixture.getType(), data.idToPath, true) : (storeFixture instanceof StoreFixture.MongoStoreFixture) ? encodedIdsAndPath(data.missingDataStore, blobFixture.getType(), data.idToPath, false) : data.missingDataStore); } private void testGc(File dump, Data data, long maxAge, boolean markOnly) throws Exception { List<String> argsList = Lists .newArrayList("--collect-garbage", String.valueOf(markOnly), "--max-age", String.valueOf(maxAge), "--" + getOption(blobFixture.getType()), blobFixture.getConfigPath(), storeFixture.getConnectionString(), "--out-dir", dump.getAbsolutePath(), "--work-dir", temporaryFolder.newFolder().getAbsolutePath()); if (!Strings.isNullOrEmpty(additionalParams)) { argsList.add(additionalParams); } DataStoreCommand cmd = new DataStoreCommand(); cmd.execute(argsList.toArray(new String[0])); if (!markOnly) { assertFileEquals(dump, "avail-", Sets.difference(data.added, data.missingDataStore)); } else { assertFileNull(dump, "avail-"); } assertFileEquals(dump, "marked-", Sets.difference(data.added, data.deleted)); if (!markOnly) { assertFileEquals(dump, "gccand-", data.deleted); } else { assertFileNull(dump, "gccand-"); } Sets.SetView<String> blobsBeforeGc = Sets.difference(data.added, data.missingDataStore); if (maxAge <= 0) { assertEquals(Sets.difference(blobsBeforeGc, data.deleted), blobs(setupDataStore)); } else { assertEquals(blobsBeforeGc, blobs(setupDataStore)); } } public static void testIncorrectParams(List<String> argList, ArrayList<String> assertMsg, Class logger) { LogCustomizer customLogs = LogCustomizer .forLogger(logger.getName()) .enable(Level.INFO) .filter(Level.INFO) .contains(assertMsg.get(0)) .create(); customLogs.starting(); DataStoreCommand cmd = new DataStoreCommand(); try { cmd.execute(argList.toArray(new String[0])); } catch (Exception e) { log.error("", e); } Assert.assertNotNull(customLogs.getLogs().get(0)); customLogs.finished(); } private static Map<String, String> dummyData(MemoryBlobStoreNodeStore memNodeStore, String rep2Id, NodeStore store, DataStoreBlobStore setupDataStore, File f) throws IOException, CommitFailedException, DataStoreException { List<String> list = Lists.newArrayList(); Map<String, String> idMapping = Maps.newHashMap(); NodeBuilder a = memNodeStore.getRoot().builder(); for (int i = 0; i < 2; i++) { Blob b = store.createBlob(randomStream(i+100, 18342)); Iterator<String> idIter = setupDataStore.resolveChunks(b.getContentIdentity()); while (idIter.hasNext()) { String id = idIter.next(); list.add(id); idMapping.put(id, "/d" + i); } a.child("d" + i).setProperty("x", b); } memNodeStore.merge(a, EmptyHook.INSTANCE, CommitInfo.EMPTY); writeStrings(list.iterator(), f, false); sort(f); setupDataStore.addMetadataRecord(f, REFERENCES.getNameFromId(rep2Id)); return idMapping; } private static void assertFileEquals(File dump, String prefix, Set<String> blobsAdded) throws IOException { File file = filterFiles(dump, prefix); Assert.assertNotNull(file); Assert.assertTrue(file.exists()); assertEquals(blobsAdded, FileIOUtils.readStringsAsSet(new FileInputStream(file), true)); } private static void assertFileNull(File dump, String prefix) { File file = filterFiles(dump, prefix); Assert.assertNull(file); } private static Set<String> blobs(GarbageCollectableBlobStore blobStore) throws Exception { Iterator<String> cur = blobStore.getAllChunkIds(0); Set<String> existing = Sets.newHashSet(); while (cur.hasNext()) { existing.add(cur.next()); } return existing; } static InputStream randomStream(int seed, int size) { Random r = new Random(seed); byte[] data = new byte[size]; r.nextBytes(data); return new ByteArrayInputStream(data); } private static String createTempConfig(File cfgFile, Properties props) throws IOException { FileOutputStream fos = FileUtils.openOutputStream(cfgFile); ConfigurationHandler.write(fos, props); return cfgFile.getAbsolutePath(); } private static Set<String> encodedIdsAndPath(Set<String> ids, Type dsOption, Map<String, String> idToNodes, boolean encodeId) { return Sets.newHashSet(Iterators.transform(ids.iterator(), new Function<String, String>() { @Nullable @Override public String apply(@Nullable String input) { return Joiner.on(",").join(encodeId ? encodeId(input, dsOption) : input, idToNodes.get(input)); } })); } static String encodeId(String id, Type dsType) { List<String> idLengthSepList = Splitter.on(HASH).trimResults().omitEmptyStrings().splitToList(id); String blobId = idLengthSepList.get(0); if (dsType == Type.FDS) { return (blobId.substring(0, 2) + FILE_SEPARATOR.value() + blobId.substring(2, 4) + FILE_SEPARATOR.value() + blobId .substring(4, 6) + FILE_SEPARATOR.value() + blobId); } else if (dsType == Type.S3 || dsType == Type.AZURE) { return (blobId.substring(0, 4) + DASH + blobId.substring(4)); } return id; } private static String getOption(Type dsOption) { if (dsOption == Type.FDS) { return "fds"; } else if (dsOption == Type.S3) { return "s3ds"; } else if (dsOption == Type.AZURE) { return "azureds"; } else { return "fake-ds-path"; } } static class Data { private Set<String> added; private Map<String, String> idToPath; private Set<String> deleted; private Set<String> missingDataStore; public Data() { added = Sets.newHashSet(); idToPath = Maps.newHashMap(); deleted = Sets.newHashSet(); missingDataStore = Sets.newHashSet(); } } interface StoreFixture { NodeStore init(DataStoreBlobStore blobStore, File storeFile) throws Exception; NodeStore getNodeStore() throws Exception; String getConnectionString(); boolean isAvailable(); void preDataPrepare() throws Exception; void postDataPrepare() throws Exception; void close(); void after(); StoreFixture MONGO = new MongoStoreFixture(); StoreFixture SEGMENT = new SegmentStoreFixture(); StoreFixture SEGMENT_AZURE = new AzureSegmentStoreFixture(); class MongoStoreFixture implements StoreFixture { private final Clock.Virtual clock; MongoConnection c; DocumentMK.Builder builder; private DocumentNodeStore nodeStore; public MongoStoreFixture() { c = MongoUtils.getConnection(); if (c != null) { MongoUtils.dropCollections(c.getDBName()); } clock = new Clock.Virtual(); } @Override public NodeStore init(DataStoreBlobStore blobStore, File storeFile) { c = MongoUtils.getConnection(); if (c != null) { MongoUtils.dropCollections(c.getDBName()); } clock.waitUntil(Revision.getCurrentTimestamp()); builder = new DocumentMK.Builder().clock(clock).setMongoDB(c.getMongoClient(), c.getDBName()); nodeStore = builder.setBlobStore(blobStore).getNodeStore(); return nodeStore; } @Override public NodeStore getNodeStore() { return nodeStore; } @Override public String getConnectionString() { return MongoUtils.URL; } @Override public void postDataPrepare() throws Exception { long maxAge = 20; // hours // 1. Go past GC age and check no GC done as nothing deleted clock.waitUntil(clock.getTime() + TimeUnit.MINUTES.toMillis(maxAge)); VersionGarbageCollector vGC = nodeStore.getVersionGarbageCollector(); VersionGarbageCollector.VersionGCStats stats = vGC.gc(0, TimeUnit.MILLISECONDS); } @Override public void close() { nodeStore.dispose(); } @Override public boolean isAvailable() { return c != null; } @Override public void preDataPrepare() { } @Override public void after() { MongoUtils.dropCollections(c.getDBName()); nodeStore.dispose(); } } class SegmentStoreFixture implements StoreFixture { protected FileStore fileStore; protected SegmentNodeStore store; protected SegmentGCOptions gcOptions = defaultGCOptions(); protected String storePath; @Override public NodeStore init(DataStoreBlobStore blobStore, File storeFile) throws Exception { storePath = storeFile.getAbsolutePath(); FileStoreBuilder fileStoreBuilder = FileStoreBuilder.fileStoreBuilder(storeFile); fileStore = fileStoreBuilder.withBlobStore(blobStore).withMaxFileSize(256).withSegmentCacheSize(64).build(); store = SegmentNodeStoreBuilders.builder(fileStore).build(); return store; } @Override public NodeStore getNodeStore() { return store; } @Override public String getConnectionString() { return storePath; } @Override public void postDataPrepare() throws Exception { for (int k = 0; k < gcOptions.getRetainedGenerations(); k++) { fileStore.compactFull(); } fileStore.cleanup(); } @Override public void close() { fileStore.close(); } @Override public void after() { } @Override public boolean isAvailable() { return true; } @Override public void preDataPrepare() throws Exception { NodeBuilder a = store.getRoot().builder(); /* Create garbage by creating in-lined blobs (size < 16KB) */ int number = 500; NodeBuilder content = a.child("content"); for (int i = 0; i < number; i++) { NodeBuilder c = content.child("x" + i); for (int j = 0; j < 5; j++) { c.setProperty("p" + j, store.createBlob(randomStream(j, 16384))); } } store.merge(a, EmptyHook.INSTANCE, CommitInfo.EMPTY); } } /** * Requires 'AZURE_SECRET_KEY' to be set as an environment variable as well */ class AzureSegmentStoreFixture extends SegmentStoreFixture { private static final String AZURE_DIR = "repository"; private String container; @Override public NodeStore init(DataStoreBlobStore blobStore, File storeFile) throws Exception { Properties props = AzureDataStoreUtils.getAzureConfig(); String accessKey = props.getProperty(AzureConstants.AZURE_STORAGE_ACCOUNT_NAME); String secretKey = props.getProperty(AzureConstants.AZURE_STORAGE_ACCOUNT_KEY); container = props.getProperty(AzureConstants.AZURE_BLOB_CONTAINER_NAME); container = container + System.currentTimeMillis(); // Create the azure segment container String connectionString = getAzureConnectionString(accessKey, secretKey, container, AZURE_DIR); AzureUtilities.cloudBlobDirectoryFrom(connectionString, container, AZURE_DIR); // get the azure uri expected by the command storePath = getAzureUri(accessKey, container, AZURE_DIR); // initialize azure segment for test setup SegmentNodeStorePersistence segmentNodeStorePersistence = ToolUtils.newSegmentNodeStorePersistence(ToolUtils.SegmentStoreType.AZURE, storePath); fileStore = fileStoreBuilder(storeFile).withBlobStore(blobStore) .withCustomPersistence(segmentNodeStorePersistence).build(); store = SegmentNodeStoreBuilders.builder(fileStore).build(); return store; } protected String getAzureUri(String accountName, String container, String directory) { StringBuilder uri = new StringBuilder("az:"); uri.append("https://").append(accountName).append(".blob.core.windows.net/"); uri.append(container).append("/"); uri.append(directory); return uri.toString(); } protected String getAzureConnectionString(String accountName, String secret, String container, String directory) { StringBuilder builder = new StringBuilder(); builder.append("AccountName=").append(accountName).append(";"); builder.append("DefaultEndpointsProtocol=https;"); builder.append("BlobEndpoint=https://").append(accountName).append(".blob.core.windows.net").append(";"); builder.append("ContainerName=").append(container).append(";"); builder.append("Directory=").append(directory).append(";"); builder.append("AccountKey=").append(secret); return builder.toString(); } @Override public void after() { try { AzureDataStoreUtils.deleteContainer(container); } catch(Exception e) { log.error("Error in cleaning the container {}", container, e); } } @Override public boolean isAvailable() { return AzureDataStoreUtils.isAzureConfigured(); } } } interface DataStoreFixture { boolean isAvailable(); DataStoreBlobStore init(TemporaryFolder folder) throws Exception; DataStoreBlobStore getDataStore(); String getConfigPath(); Type getType(); void after(); DataStoreFixture S3 = new S3DataStoreFixture(); DataStoreFixture AZURE = new AzureDataStoreFixture(); DataStoreFixture FDS = new FileDataStoreFixture(); class S3DataStoreFixture implements DataStoreFixture { DataStoreBlobStore blobStore; String cfgFilePath; String container; @Override public boolean isAvailable() { return S3DataStoreUtils.isS3Configured(); } @Override public DataStoreBlobStore init(TemporaryFolder folder) throws Exception { Properties props = S3DataStoreUtils.getS3Config(); props.setProperty("cacheSize", "0"); container = props.getProperty(S3Constants.S3_BUCKET); container = container + System.currentTimeMillis(); props.setProperty(S3Constants.S3_BUCKET, container); DataStore ds = S3DataStoreUtils.getS3DataStore(S3DataStoreUtils.getFixtures().get(0), props, folder.newFolder().getAbsolutePath()); blobStore = new DataStoreBlobStore(ds); cfgFilePath = createTempConfig( folder.newFile(getType().name() + String.valueOf(System.currentTimeMillis()) + ".config"), props); return blobStore; } @Override public DataStoreBlobStore getDataStore() { return blobStore; } @Override public String getConfigPath() { return cfgFilePath; } @Override public Type getType() { return Type.S3; } @Override public void after() { try { S3DataStoreUtils.deleteBucket(container, new Date()); } catch (Exception e) { log.error("Error in cleaning the container {}", container, e); } } } class AzureDataStoreFixture implements DataStoreFixture { DataStoreBlobStore blobStore; String cfgFilePath; String container; @Override public boolean isAvailable() { return AzureDataStoreUtils.isAzureConfigured(); } @Override public DataStoreBlobStore init(TemporaryFolder folder) throws Exception { Properties props = AzureDataStoreUtils.getAzureConfig(); props.setProperty("cacheSize", "0"); container = props.getProperty(AzureConstants.AZURE_BLOB_CONTAINER_NAME); container = container + System.currentTimeMillis(); props.setProperty(AzureConstants.AZURE_BLOB_CONTAINER_NAME, container); DataStore ds = AzureDataStoreUtils.getAzureDataStore(props, folder.newFolder().getAbsolutePath()); blobStore = new DataStoreBlobStore(ds); cfgFilePath = createTempConfig( folder.newFile(getType().name() + String.valueOf(System.currentTimeMillis()) + ".config"), props); return blobStore; } @Override public DataStoreBlobStore getDataStore() { return blobStore; } @Override public String getConfigPath() { return cfgFilePath; } @Override public Type getType() { return Type.AZURE; } @Override public void after() { try { AzureDataStoreUtils.deleteContainer(container); } catch (Exception e) { log.error("Error in cleaning the container {}", container, e); } } } class FileDataStoreFixture implements DataStoreFixture { DataStoreBlobStore blobStore; String cfgFilePath; String container; @Override public boolean isAvailable() { return true; } @Override public DataStoreBlobStore init(TemporaryFolder folder) throws Exception { OakFileDataStore delegate = new OakFileDataStore(); container = folder.newFolder().getAbsolutePath(); delegate.setPath(container); delegate.init(null); blobStore = new DataStoreBlobStore(delegate); File cfgFile = folder.newFile(); Properties props = new Properties(); props.put("path", container); props.put("minRecordLength", new Long(4096)); cfgFilePath = createTempConfig(cfgFile, props); return blobStore; } @Override public DataStoreBlobStore getDataStore() { return blobStore; } @Override public String getConfigPath() { return cfgFilePath; } @Override public Type getType() { return Type.FDS; } @Override public void after() { } } } static class FixtureHelper { static List<StoreFixture> getStoreFixtures() { return ImmutableList.of(StoreFixture.MONGO, StoreFixture.SEGMENT, StoreFixture.SEGMENT_AZURE); } static List<DataStoreFixture> getDataStoreFixtures() { return ImmutableList.of(DataStoreFixture.S3, DataStoreFixture.AZURE, DataStoreFixture.FDS); } static List<Object[]> get() { List<Object[]> fixtures = Lists.newArrayList(); for (StoreFixture storeFixture : getStoreFixtures()) { if (storeFixture.isAvailable()) { for (DataStoreFixture dsFixture : getDataStoreFixtures()) { if (dsFixture.isAvailable()) { fixtures.add(new Object[] {storeFixture, dsFixture}); } } } } return fixtures; } } }
/* * Copyright 2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.social.connect.web; import static java.util.Arrays.*; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; import static org.springframework.social.connect.web.test.StubOAuthTemplateBehavior.*; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*; import static org.springframework.test.web.servlet.setup.MockMvcBuilders.*; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import org.junit.Ignore; import org.junit.Test; import org.springframework.http.HttpStatus; import org.springframework.social.connect.Connection; import org.springframework.social.connect.ConnectionData; import org.springframework.social.connect.ConnectionFactory; import org.springframework.social.connect.DuplicateConnectionException; import org.springframework.social.connect.support.ConnectionFactoryRegistry; import org.springframework.social.connect.web.test.StubConnectionRepository; import org.springframework.social.connect.web.test.StubOAuth1ConnectionFactory; import org.springframework.social.connect.web.test.StubOAuth2ConnectionFactory; import org.springframework.social.connect.web.test.StubOAuthTemplateBehavior; import org.springframework.social.connect.web.test.TestApi1; import org.springframework.social.connect.web.test.TestApi2; import org.springframework.social.oauth1.OAuthToken; import org.springframework.test.web.servlet.MockMvc; import org.springframework.util.MultiValueMap; import org.springframework.web.client.HttpClientErrorException; import org.springframework.web.context.request.WebRequest; public class ConnectControllerTest { private static final String OAUTH2_AUTHORIZE_URL = "https://someprovider.com/oauth/authorize?client_id=clientId&response_type=code&redirect_uri=http%3A%2F%2Flocalhost%2Fconnect%2Foauth2Provider"; @Test @Ignore("Revisit this and assert/fix expectations") public void connect_noSuchProvider() throws Exception { ConnectionFactoryRegistry connectionFactoryLocator = new ConnectionFactoryRegistry(); ConnectionFactory<TestApi2> connectionFactory = new StubOAuth2ConnectionFactory("clientId", "clientSecret"); connectionFactoryLocator.addConnectionFactory(connectionFactory); MockMvc mockMvc = standaloneSetup(new ConnectController(connectionFactoryLocator, null)).build(); mockMvc.perform(post("/connect/noSuchProvider")); } @Test public void connectionStatus() throws Exception { ConnectionFactoryRegistry connectionFactoryLocator = new ConnectionFactoryRegistry(); ConnectionFactory<TestApi1> connectionFactory1 = new StubOAuth1ConnectionFactory("clientId", "clientSecret", THROW_EXCEPTION); connectionFactoryLocator.addConnectionFactory(connectionFactory1); ConnectionFactory<TestApi2> connectionFactory2 = new StubOAuth2ConnectionFactory("clientId", "clientSecret", THROW_EXCEPTION); connectionFactoryLocator.addConnectionFactory(connectionFactory2); StubConnectionRepository connectionRepository = new StubConnectionRepository(); connectionRepository.addConnection(connectionFactory1.createConnection(new ConnectionData("oauth1Provider", "provider1User1", null, null, null, null, null, null, null))); MockMvc mockMvc = standaloneSetup(new ConnectController(connectionFactoryLocator, connectionRepository)).build(); mockMvc.perform(get("/connect")) .andExpect(view().name("connect/status")) .andExpect(model().attribute("providerIds", new HashSet<String>(asList("oauth1Provider", "oauth2Provider")))) .andExpect(model().attributeExists("connectionMap")); mockMvc.perform(get("/connect/oauth1Provider")) .andExpect(view().name("connect/oauth1ProviderConnected")) .andExpect(model().attributeExists("connections")) .andExpect(request().attribute("social.addConnection.duplicate", nullValue())) .andExpect(request().attribute("social.provider.error", nullValue())); mockMvc.perform(get("/connect/oauth2Provider")) .andExpect(view().name("connect/oauth2ProviderConnect")) .andExpect(request().attribute("social.addConnection.duplicate", nullValue())) .andExpect(request().attribute("social.provider.error", nullValue())); } @Test public void connectionStatus_withErrorsInFlashScope() throws Exception { ConnectionFactoryRegistry connectionFactoryLocator = new ConnectionFactoryRegistry(); ConnectionFactory<TestApi2> connectionFactory2 = new StubOAuth2ConnectionFactory("clientId", "clientSecret", THROW_EXCEPTION); connectionFactoryLocator.addConnectionFactory(connectionFactory2); StubConnectionRepository connectionRepository = new StubConnectionRepository(); MockMvc mockMvc = standaloneSetup(new ConnectController(connectionFactoryLocator, connectionRepository)).build(); // Should convert errors in "flash" scope to model attributes and remove them from "flash" mockMvc.perform(get("/connect/oauth2Provider").sessionAttr("social_addConnection_duplicate", new DuplicateConnectionException(null))) .andExpect(view().name("connect/oauth2ProviderConnect")) .andExpect(request().sessionAttribute("social_addConnection_duplicate", nullValue())) .andExpect(request().attribute("social_addConnection_duplicate", true)); mockMvc.perform(get("/connect/oauth2Provider").sessionAttr("social_provider_error", new HttpClientErrorException(HttpStatus.INTERNAL_SERVER_ERROR))) .andExpect(view().name("connect/oauth2ProviderConnect")) .andExpect(request().sessionAttribute("social_provider_error", nullValue())) .andExpect(request().attribute("social_provider_error", true)); } @Test public void removeConnections() throws Exception { ConnectionFactoryRegistry connectionFactoryLocator = new ConnectionFactoryRegistry(); ConnectionFactory<TestApi2> connectionFactory = new StubOAuth2ConnectionFactory("clientId", "clientSecret", THROW_EXCEPTION); connectionFactoryLocator.addConnectionFactory(connectionFactory); StubConnectionRepository connectionRepository = new StubConnectionRepository(); connectionRepository.addConnection(connectionFactory.createConnection(new ConnectionData("provider1", "provider1User1", null, null, null, null, null, null, null))); connectionRepository.addConnection(connectionFactory.createConnection(new ConnectionData("provider1", "provider1User2", null, null, null, null, null, null, null))); connectionRepository.addConnection(connectionFactory.createConnection(new ConnectionData("oauth2Provider", "provider2User1", null, null, null, null, null, null, null))); connectionRepository.addConnection(connectionFactory.createConnection(new ConnectionData("oauth2Provider", "provider2User2", null, null, null, null, null, null, null))); assertEquals(2, connectionRepository.findConnections("provider1").size()); assertEquals(2, connectionRepository.findConnections("oauth2Provider").size()); ConnectController connectController = new ConnectController(connectionFactoryLocator, connectionRepository); List<DisconnectInterceptor<?>> interceptors = getDisconnectInterceptor(); connectController.setDisconnectInterceptors(interceptors); MockMvc mockMvc = standaloneSetup(connectController).build(); mockMvc.perform(delete("/connect/oauth2Provider")) .andExpect(redirectedUrl("/connect/oauth2Provider")); assertEquals(2, connectionRepository.findConnections("provider1").size()); assertEquals(0, connectionRepository.findConnections("oauth2Provider").size()); assertFalse(((TestConnectInterceptor<?>)(interceptors.get(0))).preDisconnectInvoked); assertFalse(((TestConnectInterceptor<?>)(interceptors.get(0))).postDisconnectInvoked); assertNull(((TestConnectInterceptor<?>)(interceptors.get(0))).connectionFactory); assertTrue(((TestConnectInterceptor<?>)(interceptors.get(1))).preDisconnectInvoked); assertTrue(((TestConnectInterceptor<?>)(interceptors.get(1))).postDisconnectInvoked); assertSame(connectionFactory, ((TestConnectInterceptor<?>)(interceptors.get(1))).connectionFactory); } @Test public void removeConnection() throws Exception { ConnectionFactoryRegistry connectionFactoryLocator = new ConnectionFactoryRegistry(); ConnectionFactory<TestApi2> connectionFactory = new StubOAuth2ConnectionFactory("clientId", "clientSecret", THROW_EXCEPTION); connectionFactoryLocator.addConnectionFactory(connectionFactory); StubConnectionRepository connectionRepository = new StubConnectionRepository(); connectionRepository.addConnection(connectionFactory.createConnection(new ConnectionData("oauth2Provider", "provider1User1", null, null, null, null, null, null, null))); connectionRepository.addConnection(connectionFactory.createConnection(new ConnectionData("oauth2Provider", "provider1User2", null, null, null, null, null, null, null))); assertEquals(2, connectionRepository.findConnections("oauth2Provider").size()); ConnectController connectController = new ConnectController(connectionFactoryLocator, connectionRepository); List<DisconnectInterceptor<?>> interceptors = getDisconnectInterceptor(); connectController.setDisconnectInterceptors(interceptors); MockMvc mockMvc = standaloneSetup(connectController).build(); mockMvc.perform(delete("/connect/oauth2Provider/provider1User1")) .andExpect(redirectedUrl("/connect/oauth2Provider")); assertEquals(1, connectionRepository.findConnections("oauth2Provider").size()); assertFalse(((TestConnectInterceptor<?>)(interceptors.get(0))).preDisconnectInvoked); assertFalse(((TestConnectInterceptor<?>)(interceptors.get(0))).postDisconnectInvoked); assertNull(((TestConnectInterceptor<?>)(interceptors.get(0))).connectionFactory); assertTrue(((TestConnectInterceptor<?>)(interceptors.get(1))).preDisconnectInvoked); assertTrue(((TestConnectInterceptor<?>)(interceptors.get(1))).postDisconnectInvoked); assertSame(connectionFactory, ((TestConnectInterceptor<?>)(interceptors.get(1))).connectionFactory); } // OAuth 1 @Test public void connect_OAuth1Provider() throws Exception { ConnectionFactoryRegistry connectionFactoryLocator = new ConnectionFactoryRegistry(); ConnectionFactory<TestApi1> connectionFactory = new StubOAuth1ConnectionFactory("clientId", "clientSecret"); connectionFactoryLocator.addConnectionFactory(connectionFactory); ConnectController connectController = new ConnectController(connectionFactoryLocator, null); List<ConnectInterceptor<?>> interceptors = getConnectInterceptor(); connectController.setConnectInterceptors(interceptors); connectController.afterPropertiesSet(); MockMvc mockMvc = standaloneSetup(connectController).build(); mockMvc.perform(post("/connect/oauth1Provider")) .andExpect(redirectedUrl("https://someprovider.com/oauth/authorize?oauth_token=requestToken")) .andExpect(request().sessionAttribute("oauthToken", samePropertyValuesAs(new OAuthToken("requestToken", "requestTokenSecret")))); // Check for preConnect() only. The postConnect() won't be invoked until after callback TestConnectInterceptor<?> textInterceptor1 = (TestConnectInterceptor<?>)(interceptors.get(0)); assertTrue(textInterceptor1.preConnectInvoked); assertEquals("oauth1Provider", textInterceptor1.connectionFactory.getProviderId()); assertFalse(((TestConnectInterceptor<?>)(interceptors.get(1))).preConnectInvoked); } @Test public void connect_OAuth1Provider_exceptionWhileFetchingRequestToken() throws Exception { ConnectionFactoryRegistry connectionFactoryLocator = new ConnectionFactoryRegistry(); ConnectionFactory<TestApi1> connectionFactory = new StubOAuth1ConnectionFactory("clientId", "clientSecret", StubOAuthTemplateBehavior.THROW_EXCEPTION); connectionFactoryLocator.addConnectionFactory(connectionFactory); MockMvc mockMvc = standaloneSetup(new ConnectController(connectionFactoryLocator, null)).build(); mockMvc.perform(post("/connect/oauth1Provider")) .andExpect(redirectedUrl("/connect/oauth1Provider")) .andExpect(request().sessionAttribute("social_provider_error", notNullValue())); } @Test public void oauth1Callback() throws Exception { ConnectionFactoryRegistry connectionFactoryLocator = new ConnectionFactoryRegistry(); ConnectionFactory<TestApi1> connectionFactory = new StubOAuth1ConnectionFactory("clientId", "clientSecret"); connectionFactoryLocator.addConnectionFactory(connectionFactory); StubConnectionRepository connectionRepository = new StubConnectionRepository(); ConnectController connectController = new ConnectController(connectionFactoryLocator, connectionRepository); List<ConnectInterceptor<?>> interceptors = getConnectInterceptor(); connectController.setConnectInterceptors(interceptors); connectController.afterPropertiesSet(); MockMvc mockMvc = standaloneSetup(connectController).build(); assertEquals(0, connectionRepository.findConnections("oauth2Provider").size()); mockMvc.perform(get("/connect/oauth1Provider") .sessionAttr("oauthToken", new OAuthToken("requestToken", "requestTokenSecret")) .param("oauth_token", "requestToken") .param("oauth_verifier", "verifier")) .andExpect(redirectedUrl("/connect/oauth1Provider")); List<Connection<?>> connections = connectionRepository.findConnections("oauth1Provider"); assertEquals(1, connections.size()); assertEquals("oauth1Provider", connections.get(0).getKey().getProviderId()); // Check for postConnect() only. The preConnect() is only invoked during the initial portion of the flow TestConnectInterceptor<?> testInterceptor1 = (TestConnectInterceptor<?>)(interceptors.get(0)); assertTrue(testInterceptor1.postConnectInvoked); assertFalse(((TestConnectInterceptor<?>)(interceptors.get(1))).postConnectInvoked); } @Test public void oauth1Callback_exceptionWhileFetchingAccessToken() throws Exception { ConnectionFactoryRegistry connectionFactoryLocator = new ConnectionFactoryRegistry(); ConnectionFactory<TestApi1> connectionFactory = new StubOAuth1ConnectionFactory("clientId", "clientSecret", THROW_EXCEPTION); connectionFactoryLocator.addConnectionFactory(connectionFactory); StubConnectionRepository connectionRepository = new StubConnectionRepository(); MockMvc mockMvc = standaloneSetup(new ConnectController(connectionFactoryLocator, connectionRepository)).build(); assertEquals(0, connectionRepository.findConnections("oauth2Provider").size()); mockMvc.perform(get("/connect/oauth1Provider") .sessionAttr("oauthToken", new OAuthToken("requestToken", "requestTokenSecret")) .param("oauth_token", "requestToken") .param("oauth_verifier", "verifier")) .andExpect(redirectedUrl("/connect/oauth1Provider")) .andExpect(request().sessionAttribute("social_provider_error", notNullValue())); assertEquals(0, connectionRepository.findConnections("oauth2Provider").size()); } // OAuth 2 @Test public void connect_OAuth2Provider() throws Exception { ConnectionFactoryRegistry connectionFactoryLocator = new ConnectionFactoryRegistry(); ConnectionFactory<TestApi2> connectionFactory = new StubOAuth2ConnectionFactory("clientId", "clientSecret"); connectionFactoryLocator.addConnectionFactory(connectionFactory); ConnectController connectController = new ConnectController(connectionFactoryLocator, null); List<ConnectInterceptor<?>> interceptors = getConnectInterceptor(); connectController.setConnectInterceptors(interceptors); connectController.afterPropertiesSet(); MockMvc mockMvc = standaloneSetup(connectController).build(); mockMvc.perform(post("/connect/oauth2Provider")) .andExpect(redirectedUrl(OAUTH2_AUTHORIZE_URL + "&state=STATE")); // Check for preConnect() only. The postConnect() won't be invoked until after callback assertFalse(((TestConnectInterceptor<?>)(interceptors.get(0))).preConnectInvoked); TestConnectInterceptor<?> testInterceptor2 = (TestConnectInterceptor<?>)(interceptors.get(1)); assertTrue(testInterceptor2.preConnectInvoked); assertEquals("oauth2Provider", testInterceptor2.connectionFactory.getProviderId()); } @Test public void connect_OAuth2Provider_withScope() throws Exception { ConnectionFactoryRegistry connectionFactoryLocator = new ConnectionFactoryRegistry(); ConnectionFactory<TestApi2> connectionFactory = new StubOAuth2ConnectionFactory("clientId", "clientSecret"); connectionFactoryLocator.addConnectionFactory(connectionFactory); ConnectController connectController = new ConnectController(connectionFactoryLocator, null); connectController.afterPropertiesSet(); MockMvc mockMvc = standaloneSetup(connectController).build(); mockMvc.perform(post("/connect/oauth2Provider").param("scope", "read,write")) .andExpect(redirectedUrl(OAUTH2_AUTHORIZE_URL + "&scope=read%2Cwrite&state=STATE")); } @Test public void oauth2Callback() throws Exception { ConnectionFactoryRegistry connectionFactoryLocator = new ConnectionFactoryRegistry(); ConnectionFactory<TestApi2> connectionFactory = new StubOAuth2ConnectionFactory("clientId", "clientSecret"); connectionFactoryLocator.addConnectionFactory(connectionFactory); StubConnectionRepository connectionRepository = new StubConnectionRepository(); ConnectController connectController = new ConnectController(connectionFactoryLocator, connectionRepository); List<ConnectInterceptor<?>> interceptors = getConnectInterceptor(); connectController.setConnectInterceptors(interceptors); connectController.afterPropertiesSet(); MockMvc mockMvc = standaloneSetup(connectController).build(); assertEquals(0, connectionRepository.findConnections("oauth2Provider").size()); mockMvc.perform(get("/connect/oauth2Provider").param("code", "oauth2Code")) .andExpect(redirectedUrl("/connect/oauth2Provider")); List<Connection<?>> connections = connectionRepository.findConnections("oauth2Provider"); assertEquals(1, connections.size()); assertEquals("oauth2Provider", connections.get(0).getKey().getProviderId()); // Check for postConnect() only. The preConnect() is only invoked during the initial portion of the flow assertFalse(((TestConnectInterceptor<?>)(interceptors.get(0))).postConnectInvoked); TestConnectInterceptor<?> testInterceptor2 = (TestConnectInterceptor<?>)(interceptors.get(1)); assertTrue(testInterceptor2.postConnectInvoked); } @Test public void oauth2Callback_exceptionWhileFetchingAccessToken() throws Exception { ConnectionFactoryRegistry connectionFactoryLocator = new ConnectionFactoryRegistry(); ConnectionFactory<TestApi2> connectionFactory = new StubOAuth2ConnectionFactory("clientId", "clientSecret", THROW_EXCEPTION); connectionFactoryLocator.addConnectionFactory(connectionFactory); StubConnectionRepository connectionRepository = new StubConnectionRepository(); MockMvc mockMvc = standaloneSetup(new ConnectController(connectionFactoryLocator, connectionRepository)).build(); assertEquals(0, connectionRepository.findConnections("oauth2Provider").size()); mockMvc.perform(get("/connect/oauth2Provider").param("code", "oauth2Code")) .andExpect(redirectedUrl("/connect/oauth2Provider")) .andExpect(request().sessionAttribute("social_provider_error", notNullValue())); assertEquals(0, connectionRepository.findConnections("oauth2Provider").size()); } @Test public void oauth2ErrorCallback() throws Exception { ConnectionFactoryRegistry connectionFactoryLocator = new ConnectionFactoryRegistry(); ConnectionFactory<TestApi2> connectionFactory = new StubOAuth2ConnectionFactory("clientId", "clientSecret", THROW_EXCEPTION); connectionFactoryLocator.addConnectionFactory(connectionFactory); StubConnectionRepository connectionRepository = new StubConnectionRepository(); MockMvc mockMvc = standaloneSetup(new ConnectController(connectionFactoryLocator, connectionRepository)).build(); assertEquals(0, connectionRepository.findConnections("oauth2Provider").size()); HashMap<String, String> expectedError = new HashMap<String, String>(); expectedError.put("error", "access_denied"); expectedError.put("errorDescription", "The user said no."); expectedError.put("errorUri", "http://provider.com/user/said/no"); mockMvc.perform(get("/connect/oauth2Provider").param("error", "access_denied") .param("error_description", "The user said no.") .param("error_uri", "http://provider.com/user/said/no")) .andExpect(redirectedUrl("/connect/oauth2Provider")) .andExpect(request().sessionAttribute("social_authorization_error", notNullValue())) .andExpect(request().sessionAttribute("social_authorization_error", expectedError)); } @Test public void oauth2ErrorCallback_noDescriptionOrUri() throws Exception { ConnectionFactoryRegistry connectionFactoryLocator = new ConnectionFactoryRegistry(); ConnectionFactory<TestApi2> connectionFactory = new StubOAuth2ConnectionFactory("clientId", "clientSecret", THROW_EXCEPTION); connectionFactoryLocator.addConnectionFactory(connectionFactory); StubConnectionRepository connectionRepository = new StubConnectionRepository(); MockMvc mockMvc = standaloneSetup(new ConnectController(connectionFactoryLocator, connectionRepository)).build(); assertEquals(0, connectionRepository.findConnections("oauth2Provider").size()); HashMap<String, String> expectedError = new HashMap<String, String>(); expectedError.put("error", "access_denied"); mockMvc.perform(get("/connect/oauth2Provider").param("error", "access_denied")) .andExpect(redirectedUrl("/connect/oauth2Provider")) .andExpect(request().sessionAttribute("social_authorization_error", notNullValue())) .andExpect(request().sessionAttribute("social_authorization_error", expectedError)); } private List<ConnectInterceptor<?>> getConnectInterceptor() { List<ConnectInterceptor<?>> interceptors = new ArrayList<ConnectInterceptor<?>>(); interceptors.add(new TestConnectInterceptor<TestApi1>() {}); interceptors.add(new TestConnectInterceptor<TestApi2>() {}); return interceptors; } private List<DisconnectInterceptor<?>> getDisconnectInterceptor() { List<DisconnectInterceptor<?>> interceptors = new ArrayList<DisconnectInterceptor<?>>(); interceptors.add(new TestConnectInterceptor<TestApi1>() {}); interceptors.add(new TestConnectInterceptor<TestApi2>() {}); return interceptors; } private static abstract class TestConnectInterceptor<T> implements ConnectInterceptor<T>, DisconnectInterceptor<T> { ConnectionFactory<T> connectionFactory = null; @SuppressWarnings("unused") MultiValueMap<String, String> parameters = null; @SuppressWarnings("unused") WebRequest preConnectRequest = null; @SuppressWarnings("unused") WebRequest postConnectRequest = null; @SuppressWarnings("unused") WebRequest preDisconnectRequest = null; @SuppressWarnings("unused") WebRequest postDisconnectRequest = null; @SuppressWarnings("unused") Connection<T> connection = null; boolean preConnectInvoked = false; boolean postConnectInvoked = false; boolean preDisconnectInvoked = false; boolean postDisconnectInvoked = false; public void preConnect(ConnectionFactory<T> connectionFactory, MultiValueMap<String, String> parameters, WebRequest request) { this.connectionFactory = connectionFactory; this.parameters = parameters; this.preConnectRequest = request; this.preConnectInvoked = true; } public void postConnect(Connection<T> connection, WebRequest request) { this.connection = connection; this.postConnectRequest = request; this.postConnectInvoked = true; } public void preDisconnect(ConnectionFactory<T> connectionFactory, WebRequest request) { this.connectionFactory = connectionFactory; this.preDisconnectRequest = request; this.preDisconnectInvoked = true; } public void postDisconnect(ConnectionFactory<T> connectionFactory, WebRequest request) { this.connectionFactory = connectionFactory; this.postDisconnectRequest = request; this.postDisconnectInvoked = true; } } }
package org.apache.velocity.runtime.directive; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.velocity.exception.ExtendedParseException; import org.apache.velocity.runtime.parser.ParseException; import org.apache.velocity.runtime.parser.Token; import org.apache.velocity.util.StringUtils; /** * Exception to indicate problem happened while constructing #macro() * * For internal use in parser - not to be passed to app level * * @author <a href="mailto:[email protected]">Geir Magnusson Jr.</a> * @author <a href="[email protected]">Henning P. Schmiedehausen</a> * @version $Id$ */ public class MacroParseException extends ParseException implements ExtendedParseException { private final String templateName; /** * Version Id for serializable */ private static final long serialVersionUID = -4985224672336070689L; /** * @param msg * @param templateName * @param currentToken */ public MacroParseException(final String msg, final String templateName, final Token currentToken) { super(msg + " at "); this.currentToken = currentToken; this.templateName = templateName; } /** * returns the Template name where this exception occured. * @return The Template name where this exception occured. * @since 1.5 */ public String getTemplateName() { return templateName; } /** * returns the line number where this exception occured. * @return The line number where this exception occured. * @since 1.5 */ public int getLineNumber() { if ((currentToken != null) && (currentToken.next != null)) { return currentToken.next.beginLine; } else if (currentToken != null) { return currentToken.beginLine; } else { return -1; } } /** * returns the column number where this exception occured. * @return The column number where this exception occured. * @since 1.5 */ public int getColumnNumber() { if ((currentToken != null) && (currentToken.next != null)) { return currentToken.next.beginColumn; } else if (currentToken != null) { return currentToken.beginColumn; } else { return -1; } } /** * This method has the standard behavior when this object has been * created using the standard constructors. Otherwise, it uses * "currentToken" and "expectedTokenSequences" to generate a parse * error message and returns it. If this object has been created * due to a parse error, and you do not catch it (it gets thrown * from the parser), then this method is called during the printing * of the final stack trace, and hence the correct error message * gets displayed. * @return the current message. * @since 1.5 */ public String getMessage() { if (!specialConstructor) { StringBuilder sb = new StringBuilder(super.getMessage()); appendTemplateInfo(sb); return sb.toString(); } int maxSize = 0; StringBuilder expected = new StringBuilder(); for (int[] expectedTokenSequence : expectedTokenSequences) { if (maxSize < expectedTokenSequence.length) { maxSize = expectedTokenSequence.length; } for (int j = 0; j < expectedTokenSequence.length; j++) { expected.append(tokenImage[expectedTokenSequence[j]]).append(" "); } if (expectedTokenSequence[expectedTokenSequence.length - 1] != 0) { expected.append("..."); } expected.append(eol).append(" "); } StringBuilder retval = new StringBuilder("Encountered \""); Token tok = currentToken.next; for (int i = 0; i < maxSize; i++) { if (i != 0) { retval.append(" "); } if (tok.kind == 0) { retval.append(tokenImage[0]); break; } retval.append(add_escapes(tok.image)); tok = tok.next; } retval.append("\""); appendTemplateInfo(retval); if (expectedTokenSequences.length == 1) { retval.append("Was expecting:").append(eol).append(" "); } else { retval.append("Was expecting one of:").append(eol).append(" "); } // avoid JDK 1.3 StringBuffer.append(Object o) vs 1.4 StringBuffer.append(StringBuffer sb) gotcha. retval.append(expected.toString()); return retval.toString(); } /** * @param sb * @since 1.5 */ protected void appendTemplateInfo(final StringBuilder sb) { sb.append(StringUtils.formatFileString(getTemplateName(), getLineNumber(), getColumnNumber())); sb.append(eol); } }
import java.util.Random; import java.util.Scanner; public class mob implements GameInterface{ String name = "ryan"; String type; String boss; String characterClass; String givenName; int givenHp; String givenType; boolean isNPC; double hp; String useWeapon; int weapon; boolean condition; boolean userCondition; boolean deadCondition; boolean checkCondition; int n = 5; int dmgDone; private int i =0; int weapon1, weapon2; int[] weaponArray = new int[2]; int callCount = 0; String[] bossNameArray ={"BOSS 1","BOSS 2","BOSS 3"}; String[] zombieNameArray ={"ZOMBIE 1", "ZOMBIE 2", "ZOMBIE 3"}; mob[] hero1; public mob(String givenName, String givenType, double givenHp, boolean isNPC){ name = givenName; type = givenType; hp = givenHp; userCondition = isNPC; } public boolean getCondition(){ System.out.println(checkCondition); return checkCondition; } public void isUser(boolean userCondition){ if(userCondition == true){ System.out.println("npc confirmed"); } //System.out.println(checkCondition); if( userCondition == false){ System.out.println("user confirmed"); } } public void increaseHp(double inc){ hp += inc; } public void decreaseHp(double inc){ hp -= inc; } public double getHp(){ return hp; } public boolean isDead(){ deadCondition = false; if( hp <= 0.0){ deadCondition = true; } System.out.println(deadCondition); return deadCondition; } public void weaponDeCoder(int[] weapon){ for(int i=0; i<2;i++){ weaponArray[i] = weapon[i]; } if(weaponArray[0] == 10){ String weaponInUse = "Broad Sword"; System.out.println(weaponInUse); } if(weaponArray[1] == 11){ String weaponInUse = "knife"; System.out.println(weaponInUse); } if(weaponArray[0] == 20){ String weaponInUse = "Katana"; System.out.println(weaponInUse); } if(weaponArray[1] == 21){ String weaponInUse = "shurikens"; System.out.println(weaponInUse); } if(weaponArray[0] == 30){ String weaponInUse = "Long Bow"; System.out.println(weaponInUse); } if(weaponArray[1] == 31){ String weaponInUse = "knife"; System.out.println(weaponInUse); } } public double hpDeCoder(String type){ if( type.equals("Hunter")){ hp = 12.0; } if(type.equals("Warrior")){ hp = 17.0; } if(type.equals("Ninja")){ hp = 13.5; } return hp; } /* public void getClassType(boolean checkCondition){ if( checkCondition = false){ System.out.println("Player is of type "+type); }else{ System.out.println("Mob is of type" +type); } } */ public int typeOffense(String givenType, int useWhichWeapon){ type = givenType; int weaponToUse = useWhichWeapon; // 0 or 1 weaponArray[0] = 00;// intialized at 00 and 01 for unarmed/ non classes weaponArray[1] = 01; Random rn = new Random(); int dmgDone = 0; if(type.equals("Warrior")){ weaponArray[0] = 10; weaponArray[1] = 11; if (weaponToUse == 0){ String weaponInUse = "Broad Sword"; dmgDone = rn.nextInt(7); } if (weaponToUse == 1){ String weaponInUse = "Knife"; dmgDone = rn.nextInt(3); } System.out.println(dmgDone); } if(type.equals("Ninja")){ weaponArray[0] = 20; weaponArray[1] = 21; if (weaponToUse == 0){ String weaponInUse = "Katana"; dmgDone = rn.nextInt(9); } if (weaponToUse == 1){ String weaponInUse = "Shurikens"; dmgDone = rn.nextInt(4); } System.out.println(dmgDone); } if(type.equals("Hunter")){ weaponArray[0] = 30; weaponArray[1] = 31; if (weaponToUse == 0){ String weaponInUse = "Long Bow"; dmgDone = rn.nextInt(9); } if (weaponToUse == 1){ String weaponInUse = "Knife"; dmgDone = rn.nextInt(3); } System.out.println(dmgDone); } return dmgDone; } public void typeDefense(){ } /*public void weaponPopulator(String weaponName){ weapon = weaponName; String[] weapon = new String[i]; for(i = 0; i<n; i++){ weapon[i] = n; n++; } }*/ public void useHealthPot(){ increaseHp(10); System.out.println("Used HEALTHPOT, health increased by 10"); getHp(); System.out.println("Current hp is: "+hp); } public String getName(){ System.out.println(name); return name; } public void getMobHp(mob[] mob, int mobNumber){ mob[] mobObjArray = new mob[mob.length]; for(int i =0; i<mob.length; i++){ mobObjArray[i] = mob[i]; double currentHp = mobObjArray[i].getHp(); } double currentHp = mobObjArray[mobNumber].getHp(); System.out.println("Mob has has "+currentHp+" hp left"); } public void attack(mob[] mob, int mobElement,int weaponNumber,String characterType,int damageDone){ int mobNum = mobElement; int dmgDone = damageDone; int weapNumb = weaponNumber; String type = characterType; typeOffense(type, weaponNumber);// returns array System.out.println("The amount of damage you are about to do is eqal to: "+dmgDone); if (weaponNumber == 0){ mob[mobNum].decreaseHp(dmgDone); } if (weaponNumber == 1){ mob[mobNum].decreaseHp(dmgDone); } } public void mobIncreaseHp(mob[] mob, int mobNumber, double inc){ double hpChange = inc; mob[mobNumber].increaseHp(inc); System.out.println("The new hp for mob "+mobNumber+" is "+hp); } public void moveCurrentLocation(int[][] map, int[][] checkMap, int direction, mob[] miniBossObjArray,mob[] bossObjArray ,mob[] zombieObjArray , String[] bossNameArray){ int k =0; int xPos = 0; int yPos = 0; xPos = findCurrentX(map); yPos = findCurrentY(map); if( direction == 1){ if(checkMap[yPos+1][xPos] == 0){ System.out.println(""); System.out.println("You cant go this way!"); }else{ System.out.println(""); //System.out.println("Entered the else statement gg"); map[yPos+1][xPos] = 5; int yNewPos = yPos+1; //System.out.println("Set "+xPos+", "+yNewPos+" as current location"); if (checkMap[yNewPos][xPos] ==7){ System.out.println("You are on the path..."); } if(checkMap[yNewPos][xPos] == 0){ System.out.println("You cant go this way!"); } if(checkMap[yNewPos][xPos] == 2){ System.out.println("You have discovered a chest!"); atChest(); resetTile(checkMap, xPos, yNewPos); } if(checkMap[yNewPos][xPos] == 3){ System.out.println("You have discovered a boss!"); atBoss(checkMap,xPos, yNewPos,bossObjArray,bossNameArray); } if(checkMap[yNewPos][xPos] == 4){ System.out.println("You have discovered a mini-boss"); atMiniBoss(checkMap,xPos,yNewPos, miniBossObjArray); } if(checkMap[yNewPos][xPos] ==1){ System.out.println("You are attacked by zombies..."); atZombie(checkMap,xPos,yNewPos, zombieObjArray); } if(checkMap[yNewPos][xPos] == 5){ System.out.println("You are back at the start."); resetTile(checkMap,xPos,yNewPos); } map[yPos][xPos] = 7; } } if( direction == 2 ){ if(checkMap[yPos-1][xPos] == 0){ System.out.println(""); System.out.println("You cant go this way!"); }else{ System.out.println(""); //System.out.println("Entered the else statement gg"); map[yPos-1][xPos] = 5; int yNewPos = yPos-1; //System.out.println("Set "+xPos+", "+yNewPos+" as current location"); if (checkMap[yNewPos][xPos] ==7){ System.out.println("You are on the path..."); } if(checkMap[yNewPos][xPos] == 0){ System.out.println("You cant go this way!"); } if(checkMap[yNewPos][xPos] == 2){ System.out.println("You have discovered a chest!"); atChest(); resetTile(checkMap, xPos, yNewPos); } if(checkMap[yNewPos][xPos] == 3){ System.out.println("You have discovered a boss!"); atBoss(checkMap,xPos, yNewPos,bossObjArray,bossNameArray); } if(checkMap[yNewPos][xPos] == 4){ System.out.println("You have discovered a mini-boss"); atMiniBoss(checkMap, xPos, yNewPos, miniBossObjArray); } if(checkMap[yNewPos][xPos] ==1){ System.out.println("You are attacked by zombies..."); atZombie(checkMap,xPos,yNewPos,zombieObjArray); } if(checkMap[yNewPos][xPos] == 5){ System.out.println("You are back at the start."); resetTile(checkMap,xPos,yNewPos); } map[yPos][xPos] = 7; } } if( direction == 3 ){ if(checkMap[yPos][xPos+1] == 0){ System.out.println(""); System.out.println("You cant go this way!"); }else{ System.out.println(""); //System.out.println("Entered the else statement gg"); map[yPos][xPos+1] = 5; int xNewPos = xPos+1; System.out.println("Set "+xNewPos+", "+yPos+" as current location"); if (checkMap[yPos][xNewPos] ==7){ System.out.println("You are on the path..."); } if(checkMap[yPos][xNewPos] == 0){ System.out.println("You cant go this way!"); } if(checkMap[yPos][xNewPos] == 2){ System.out.println("You have discovered a chest!"); atChest(); resetTile(checkMap, xNewPos, yPos); } if(checkMap[yPos][xNewPos] == 3){ System.out.println("You have discovered a boss!"); atBoss(checkMap,xNewPos, yPos,bossObjArray,bossNameArray); } if(checkMap[yPos][xNewPos] == 4){ System.out.println("You have discovered a mini-boss"); atMiniBoss(checkMap, xNewPos, yPos,miniBossObjArray); } if(checkMap[yPos][xNewPos] ==1){ System.out.println("You are attacked by zombies..."); atZombie(checkMap,xNewPos,yPos,zombieObjArray); } if(checkMap[yPos][xNewPos] == 5){ System.out.println("You are back at the start."); resetTile(checkMap,xNewPos,yPos); } map[yPos][xPos] = 7; } } if( direction == 4){ System.out.println(""); if(checkMap[yPos][xPos-1] == 0){ System.out.println("You cant go this way!"); }else{ System.out.println(""); //System.out.println("Entered the else statement gg"); map[yPos][xPos-1] = 5; int xNewPos = xPos-1; //System.out.println("Set "+xNewPos+", "+yPos+" as current location"); if (checkMap[yPos][xNewPos] ==7){ System.out.println("You are on the path..."); } if(checkMap[yPos][xNewPos] == 0){ System.out.println("You cant go this way!"); } if(checkMap[yPos][xNewPos] == 2){ System.out.println("You have discovered a chest!"); atChest(); resetTile(checkMap, xNewPos, yPos); } if(checkMap[yPos][xNewPos] == 3){ System.out.println("You have discovered a boss!"); atBoss(checkMap,xNewPos, yPos,bossObjArray,bossNameArray); } if(checkMap[yPos][xNewPos] == 4){ System.out.println("You have discovered a mini-boss"); atMiniBoss(checkMap, xNewPos, yPos,miniBossObjArray); } if(checkMap[yPos][xNewPos] ==1){ System.out.println("You are attacked by zombies..."); atZombie(checkMap,xNewPos,yPos,zombieObjArray); } if(checkMap[yPos][xNewPos] == 5){ System.out.println("You are back at the start."); resetTile(checkMap,xNewPos,yPos); } map[yPos][xPos] = 7; } } if( direction == 5){ getUserHp(hero1); } if(direction == 6){ printMap(checkMap); } //else{ // System.out.println("Error: can only move one unit in one of the 4 directions."); //} } public void freeCurrentLocation(int[][] map,int tileType){ int xPos = findCurrentX(map); int yPos = findCurrentY(map); System.out.println(map[xPos][yPos]); map[xPos][yPos] = tileType; } public int findCurrentX(int[][] map){ int xPos = 0; int yPos = 0; for(int k=0;k<15;k++){ for(int j = 0;j<10;j++){ if(map[j][k] == 5){ yPos = j; xPos = k; } } } return xPos; } public int findCurrentY(int[][] map){ int xPos = 0; int yPos = 0; for(int k=0;k<15;k++){ for(int j = 0;j<10;j++){ if(map[j][k] == 5){ yPos = j; xPos = k; } } } return yPos; } public void atTile(int[][] map, int[][] checkMap, mob[] miniBossObjArray, mob[] bossObjArray, mob[] zombieObjArray){ Scanner in = new Scanner(System.in); int yPos=findCurrentY(map); int xPos=findCurrentX(map); if(checkMap[7][10] == 1 && checkMap[7][3] == 1 && checkMap[1][14] == 1){ System.out.println("YOU HAVE WON!"); endGame(); } System.out.println("You are at tile "+xPos+","+yPos); System.out.println("Type the direction you want to go: "); System.out.println("1: up, 2: down, 3: right, 4: left, 5: hp, 6: map"); int direction = in.nextInt(); moveCurrentLocation(map, checkMap, direction,miniBossObjArray,bossObjArray,zombieObjArray, bossNameArray); atTile(map, checkMap,miniBossObjArray,bossObjArray,zombieObjArray); } public void checkCurrentTile(int[][] map, int[][] checkMap, mob[] miniBossObjArray,mob[] bossObjArray, mob[] zombieObjArray){ int xPos = findCurrentX(map); int yPos = findCurrentY(map); Random rn = new Random(); //int numberOfZombies = rn.nextInt(1); System.out.println("Checking for things on tile "+xPos+", "+yPos); if(checkMap[yPos][xPos] == 0){ System.out.println("You cant go this way!"); } if(checkMap[yPos][xPos] == 2){ System.out.println("You have discovered a chest!"); atChest(); resetTile(checkMap, xPos, yPos); } if(checkMap[yPos][xPos] == 3){ System.out.println("You have discovered a boss!"); atBoss(checkMap, xPos,yPos, bossObjArray,bossNameArray); } if(checkMap[yPos][xPos] == 4){ System.out.println("You have discovered a mini-boss"); atMiniBoss(checkMap,xPos,yPos, miniBossObjArray); } if(checkMap[yPos][xPos] == 1){ System.out.println("You have discovered a zombie!"); atZombie(checkMap,xPos,yPos, zombieObjArray); } } public void atChest(){ Random rn = new Random(); int chestItem = rn.nextInt(3); if(chestItem == 0){ increaseHp(2); System.out.println("Found Potion! Increased hp by 2."); getHp(); } if(chestItem == 1){ increaseHp(4); System.out.println("Found Potion! Increased hp by 4."); getHp(); } if(chestItem == 2){ increaseHp(6); System.out.println("Found Potion! Increased hp by 6."); getHp(); } } public void resetTile(int[][] checkMap, int xPos, int yPos){ int xLoc = xPos; int yLoc = yPos; checkMap[yLoc][xLoc] = 7; } public int atMiniBoss(int[][] checkMap, int xPos, int yPos,mob[] miniBossObjArray ){ boolean isThere = false; int mobNumber = 0; System.out.println("You have found a mini dungeon"); if(yPos == 2 && xPos == 1){ System.out.println("You have entered BOSS1's lair."); mobNumber = 0; battleMiniBoss(checkMap,xPos, yPos, miniBossObjArray,mobNumber); System.out.println("YOU HAVE SUCCESFULLY DEFEATED THE FIRST MINI BOSS"); resetTile(checkMap,xPos,yPos); return mobNumber; } if(yPos == 6 && xPos == 8){ System.out.println("You have entered BOSS2's lair."); mobNumber = 1; battleMiniBoss(checkMap,xPos, yPos, miniBossObjArray,mobNumber ); System.out.println("YOU HAVE SUCCESFULLY DEFEATED THE SECOND MINI BOSS"); resetTile(checkMap,xPos,yPos); return mobNumber; } return mobNumber; } public void miniBossDialogue(){ System.out.println(); } public mob[] spawnMiniBoss(int numberOfMiniBosses){//must be 2 int numOf = numberOfMiniBosses; numOf = 2; String bossType; double bossHp; boolean nPCcondition; mob[] miniBossObjArray = new mob[numOf]; String [] miniBossNameArray = {"miniBoss1", "miniBoss2"}; for(int k = 0; k<numOf; k++){ miniBossObjArray[k] = new mob(miniBossNameArray[k], "mini boss", 30.0, true); } return miniBossObjArray; } public void battleMiniBoss(int[][] checkMap, int xPos, int yPos, mob[] miniBossObjArray, int mobNumber){ Scanner in = new Scanner(System.in); characterClass = "Warrior"; Random rn = new Random(); int dmgTaken= rn.nextInt(4); while(miniBossObjArray[mobNumber].getHp()>0){ System.out.println("Choose which weapon you wish to use by typing 0 for main or 1 for secondary"); int weapon = in.nextInt(); dmgDone = typeOffense(type,weapon); System.out.println("The dmgDone variable will pass through this value: "+dmgDone); miniBossObjArray[mobNumber].attack(miniBossObjArray,mobNumber,weapon, characterClass,dmgDone); dmgUser(hero1,dmgTaken); getMobHp(miniBossObjArray,mobNumber); } } public void bossDialogue(){ } public mob[] spawnBoss(int numberOfBosses){ //must be 3 Scanner in = new Scanner(System.in); int numOf = 3; String bossType; double bossHp; boolean nPCcondition; mob[] bossObjArray = new mob[numOf]; for(int k=0;k<numOf;k++){ bossObjArray[k]= new mob(bossNameArray[k], "boss", 50.0, true); } return bossObjArray; } public int atBoss(int[][] checkMap, int xPos, int yPos,mob[] bossObjArray,String[] bossNameArray ){ boolean isThere = false; int mobNumber = 0; int endCounter = 0; System.out.println("You have found a dungeon"); if(yPos == 1 && xPos == 14){ System.out.println("You have entered "+bossNameArray[0]+" lair."); mobNumber = 0; battleBoss(checkMap,xPos, yPos, bossObjArray,mobNumber); System.out.println("YOU HAVE SUCCESFULLY DEFEATED "+bossNameArray[0]); endCounter++; resetTile(checkMap,xPos,yPos); return mobNumber; } if(yPos == 7 && xPos == 3){ System.out.println("You have entered "+bossNameArray[1]+" lair."); mobNumber = 1; battleBoss(checkMap,xPos, yPos, bossObjArray,mobNumber ); System.out.println("YOU HAVE SUCCESFULLY DEFEATED "+bossNameArray[1]); endCounter++; resetTile(checkMap,xPos,yPos); return mobNumber; } if(yPos == 7 && xPos == 10){ System.out.println("You have entered "+bossNameArray[2]+" lair."); mobNumber = 2; battleBoss(checkMap,xPos, yPos, bossObjArray,mobNumber ); System.out.println("YOU HAVE SUCCESFULLY DEFEATED "+bossNameArray[2]); endCounter++; resetTile(checkMap,xPos,yPos); return mobNumber; } return mobNumber; } public void battleBoss(int[][] checkMap, int xPos, int yPos, mob[] bossObjArray, int mobNumber){ Scanner in = new Scanner(System.in); characterClass = "Warrior"; Random rn = new Random(); int dmgTaken = rn.nextInt(5); while(bossObjArray[mobNumber].getHp()>0){ System.out.println("Choose which weapon you wish to use by typing 0 for main or 1 for secondary"); int weapon = in.nextInt(); dmgDone = typeOffense(type,weapon); System.out.println("The dmgDone variable will pass through this value: "+dmgDone); bossObjArray[mobNumber].attack(bossObjArray,mobNumber,weapon, characterClass,dmgDone); dmgUser(hero1,dmgTaken); getMobHp(bossObjArray,mobNumber); } } public mob[] spawnZombie(int numberOfZombies){//must be less than or equal to 3 int numOf = numberOfZombies; numOf = 1; String zombieType; double zombieHp; boolean nPCcondition; mob[] zombieObjArray = new mob[numOf]; for(int k = 0; k<numOf; k++){ zombieObjArray[k] = new mob(zombieNameArray[k], "Zombie", 5.0, true); } return zombieObjArray; } public int atZombie(int[][] checkMap, int xPos, int yPos,mob[] zombieObjArray ){ int mobNumber = 0; System.out.println("before if......"); System.out.println("xPos should be 10... it is " +xPos); System.out.println("yPos should be 4... it is "+yPos); System.out.println("This value is supposed to be 1.... it is actually: "+checkMap[yPos][xPos]); if(checkMap[yPos][xPos] == 1){ System.out.println("just inside if......"); mobNumber = 1; battleZombie(checkMap,xPos, yPos, zombieObjArray,mobNumber ); System.out.println("YOU HAVE SUCCESFULLY DEFEATED THE ZOMBIE"); while(zombieObjArray[0].getHp()!= 5.0){ zombieObjArray[0].increaseHp(1); } System.out.println(""); resetTile(checkMap,xPos,yPos); return mobNumber; } return mobNumber; } public void battleZombie(int[][] checkMap, int xPos, int yPos, mob[] zombieObjArray, int mobNumber){ Scanner in = new Scanner(System.in); characterClass = "Warrior"; Random rn =new Random(); int dmgTaken = rn.nextInt(2); while(zombieObjArray[0].getHp()>0){ System.out.println("Choose which weapon you wish to use by typing 0 for main or 1 for secondary"); int weapon = in.nextInt(); dmgDone = typeOffense(type,weapon); System.out.println(""); System.out.println("You strike for: "+dmgDone+" damage."); zombieObjArray[0].attack(zombieObjArray,0,weapon, characterClass,dmgDone); dmgUser(hero1,dmgTaken); getMobHp(zombieObjArray,0); System.out.println(""); } } public void endGame(){ System.exit(0); } public double getUserHp( mob[] hero1){ System.out.println(hp); return hp; } public double dmgUser( mob[] hero1, int dmgTaken){ hp = hp-dmgTaken; System.out.println("The user is hurt for "+dmgTaken+" hp."); return hp; } public void printMap(int[][] checkMap){ for(int p=0;p<11;p++){ System.out.println(""); for (int q=0;q<15;q++){ System.out.print(checkMap[p][q]+" "); } } System.out.println(""); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jruyi.io.tcpclient; import java.lang.reflect.Method; import java.util.Arrays; import java.util.Collection; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; import org.jruyi.common.Service; import org.jruyi.common.StrUtil; import org.jruyi.io.IBufferFactory; import org.jruyi.io.ISession; import org.jruyi.io.ISessionListener; import org.jruyi.io.ISessionService; import org.jruyi.io.channel.IChannel; import org.jruyi.io.channel.IChannelAdmin; import org.jruyi.io.channel.IChannelService; import org.jruyi.io.common.Util; import org.jruyi.io.filter.IFilterList; import org.jruyi.io.filter.IFilterManager; import org.jruyi.io.tcp.TcpChannel; import org.jruyi.io.tcp.TcpChannelConf; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public abstract class AbstractTcpClient<I, O> extends Service implements IChannelService<I, O>, ISessionService<I, O> { private static final Logger c_logger = LoggerFactory.getLogger(AbstractTcpClient.class); private final AtomicLong m_sequence = new AtomicLong(0L); private IChannelAdmin m_ca; private IFilterManager m_fm; private IBufferFactory m_bf; private String m_caption; private IFilterList m_filters; private volatile boolean m_stopped = true; private ISessionListener<I, O> m_listener; private ConcurrentHashMap<Long, IChannel> m_channels; static final class TcpClientChannel extends TcpChannel { private Object m_request; TcpClientChannel(IChannelService<Object, Object> cs) { super(cs); } public void attachRequest(Object request) { m_request = request; } public Object detachRequest() { final Object request = m_request; m_request = null; return request; } } @Override public long generateId() { return m_sequence.incrementAndGet(); } @Override public final Object getConfiguration() { return configuration(); } @Override public IChannelAdmin getChannelAdmin() { return m_ca; } @Override public final IBufferFactory getBufferFactory() { return m_bf; } @Override public void setSessionListener(ISessionListener<I, O> listener) { m_listener = listener; } @Override public void openSession() { throw new UnsupportedOperationException(); } @Override public void write(ISession session, O msg) { final IChannel channel = m_channels.get(session.id()); if (channel != null) { channel.write(msg); return; } c_logger.warn(StrUtil.join(session, " failed to send(channel closed): ", msg)); if (msg instanceof AutoCloseable) { try { ((AutoCloseable) msg).close(); } catch (Throwable t) { c_logger.error(StrUtil.join(session, " failed to close message: ", msg), t); } } } @Override public long throttle() { return configuration().throttle(); } @Override public final IFilterList getFilterChain() { return m_filters; } @Override public void onChannelClosed(IChannel channel) { c_logger.debug("{}: CLOSED", channel); final ConcurrentHashMap<Long, IChannel> channels = m_channels; if (channels != null) channels.remove(channel.id()); } @Override public void closeSession(ISession session) { ((IChannel) session).close(); } @Override public void onChannelException(IChannel channel, Throwable t) { try { final Object attachment = channel.detach(); if (attachment != null) { c_logger.error(StrUtil.join(channel, " got an error: ", attachment), t); if (attachment instanceof AutoCloseable) { try { ((AutoCloseable) attachment).close(); } catch (Throwable e) { c_logger.error(StrUtil.join(channel, "Failed to close: ", attachment), e); } } } else c_logger.error(StrUtil.join(channel, " got an error"), t); } finally { channel.close(); } } @Override public void onChannelOpened(IChannel channel) { c_logger.debug("{}: OPENED", channel); final Long id = channel.id(); final ConcurrentHashMap<Long, IChannel> channels = m_channels; if (channels != null) channels.put(id, channel); if (m_stopped) { channel.close(); return; } } @Override public void onChannelIdleTimedOut(IChannel channel) { throw new UnsupportedOperationException(); } @Override public final String toString() { return m_caption; } @Override protected final boolean updateInternal(Map<String, ?> properties) throws Exception { final TcpClientConf newConf = createConf(properties); updateFilters(newConf); final TcpClientConf oldConf = configuration(); boolean changed = oldConf.isMandatoryChanged(newConf); if (!changed) { final int timeout = timeout(newConf); if (timeout == 0) closeChannels(); } configuration(newConf); return changed; } @Override protected void startInternal() { m_stopped = false; } @Override protected void stopInternal() { m_stopped = true; closeChannels(); } public void setChannelAdmin(IChannelAdmin cm) { m_ca = cm; } public void unsetChannelAdmin(IChannelAdmin cm) { m_ca = null; } public void setFilterManager(IFilterManager fm) { m_fm = fm; } public void unsetFilterManager(IFilterManager fm) { m_fm = null; } public synchronized void setBufferFactory(IBufferFactory bf) { m_bf = bf; } public synchronized void unsetBufferFactory(IBufferFactory bf) { if (m_bf == bf) m_bf = bf; } public void activate(Map<String, ?> properties) throws Exception { final TcpClientConf conf = createConf(properties); updateFilters(conf); configuration(conf); m_caption = Util.genServiceId(properties, conf.ip(), conf.port(), "TcpClient"); m_channels = new ConcurrentHashMap<>(conf.initialCapacityOfChannelMap()); } public void deactivate() { stop(); updateFilters(null); } abstract TcpClientConf createConf(Map<String, ?> props); abstract TcpClientConf configuration(); abstract void configuration(TcpClientConf conf); int timeout(TcpClientConf conf) { return Util.max(conf.connectTimeoutInSeconds(), conf.readTimeoutInSeconds()); } Method[] getMandatoryPropsAccessors() { return TcpClientConf.getMandatoryPropsAccessors(); } final ISessionListener<I, O> listener() { return m_listener; } final void connect() { final TcpChannel channel = newChannel(); channel.connect(configuration().connectTimeoutInSeconds()); } final void connect(Object attachment) { final TcpChannel channel = newChannel(); channel.attach(attachment); channel.connect(configuration().connectTimeoutInSeconds()); } final void connect(Object attachment, int selectorId) { final TcpChannel channel = newChannel(selectorId); channel.attach(attachment); channel.connect(configuration().connectTimeoutInSeconds()); } final boolean cancelReadTimeout(IChannel channel) { return channel.cancelTimeout(); } final void scheduleReadTimeout(IChannel channel, int timeout) { channel.scheduleReadTimeout(timeout); } @SuppressWarnings({ "unchecked" }) TcpChannel newChannel() { return new TcpClientChannel((IChannelService<Object, Object>) this); } TcpChannel newChannel(int selectorId) { throw new UnsupportedOperationException(); } private void updateFilters(TcpChannelConf newConf) { final String[] newNames = newConf == null ? StrUtil.getEmptyStringArray() : newConf.filters(); String[] oldNames = StrUtil.getEmptyStringArray(); final IFilterManager fm = m_fm; final TcpChannelConf oldConf = configuration(); if (oldConf == null) m_filters = fm.getFilters(oldNames); else oldNames = oldConf.filters(); if (Arrays.equals(newNames, oldNames)) return; m_filters = fm.getFilters(newNames); fm.ungetFilters(oldNames); } private void closeChannels() { final Collection<IChannel> channels = m_channels.values(); for (IChannel channel : channels) channel.close(); } }
/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is part of dcm4che, an implementation of DICOM(TM) in * Java(TM), available at http://sourceforge.net/projects/dcm4che. * * The Initial Developer of the Original Code is * TIANI Medgraph AG. * Portions created by the Initial Developer are Copyright (C) 2003-2005 * the Initial Developer. All Rights Reserved. * * Contributor(s): * Gunter Zeilinger <[email protected]> * Franz Willer <[email protected]> * * Alternatively, the contents of this file may be used under the terms of * either the GNU General Public License Version 2 or later (the "GPL"), or * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), * in which case the provisions of the GPL or the LGPL are applicable instead * of those above. If you wish to allow use of your version of this file only * under the terms of either the GPL or the LGPL, and not to allow others to * use your version of this file under the terms of the MPL, indicate your * decision by deleting the provisions above and replace them with the notice * and other provisions required by the GPL or the LGPL. If you do not delete * the provisions above, a recipient may use your version of this file under * the terms of any one of the MPL, the GPL or the LGPL. * * ***** END LICENSE BLOCK ***** */ package org.dcm4chex.rid.mbean.xml; import java.io.IOException; import java.io.OutputStream; import java.net.MalformedURLException; import java.net.URL; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.Iterator; import java.util.List; import javax.xml.transform.OutputKeys; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.sax.SAXTransformerFactory; import javax.xml.transform.sax.TransformerHandler; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import org.apache.log4j.Logger; import org.dcm4che.data.Dataset; import org.dcm4che.data.DcmElement; import org.dcm4che.data.DcmObjectFactory; import org.dcm4che.data.PersonName; import org.dcm4che.dict.Tags; import org.dcm4che.dict.UIDs; import org.xml.sax.SAXException; import org.xml.sax.helpers.AttributesImpl; /** * @author franz.willer * * TODO To change the template for this generated type comment go to * Window - Preferences - Java - Code Style - Code Templates */ public class IHEDocumentList implements XMLResponseObject{ private static final SimpleDateFormat DATE_FORMATTER = new SimpleDateFormat("yyyyMMdd"); private static final SimpleDateFormat DATETIME_FORMATTER = new SimpleDateFormat("yyyyMMddHHmmss"); private static final AttributesImpl EMPTY_ATTRIBUTES = new AttributesImpl(); private static Logger log = Logger.getLogger( IHEDocumentList.class.getName() ); private List datasets = new ArrayList(); private Dataset queryDS = null; private TransformerHandler th = null; private XMLUtil util = null; private String docCode; private String docCodeSystem; private String docDisplayName; private long lowerDateTime = Long.MIN_VALUE; private long upperDateTime = Long.MAX_VALUE; private int mostRecentResults = 0; private String xslFile; /** the request URL which is used from client to get this document list (with query string!). */ private String reqURL = ""; private String docRIDUrl = "http://localhost:8080";//default; overwritten with base url of request! private URL xslt; public IHEDocumentList() { } public IHEDocumentList( Collection datasets ) { addAll( datasets ); } public void setQueryDS( Dataset ds ) { queryDS = ds; } public boolean add( Dataset ds ) { Date date = ds.getDateTime( Tags.ContentDate, Tags.ContentTime ); if (date != null) { long ms = date.getTime(); if (ms < lowerDateTime || ms > upperDateTime) { return false; } } return datasets.add(ds); } /** * @param ds * @return */ private void applyMostRecentResults() { Collections.sort( datasets, new DatasetDateComparator() ); if ( mostRecentResults > 0 && datasets.size() > mostRecentResults ) { datasets.subList( mostRecentResults, datasets.size() ).clear();//Remains mostRecentResults items in list; removes all older dataset } } /** * @param ds * @return */ private Date getDateFromDS(Dataset ds) { Date d = ds.getDateTime( Tags.ContentDate, Tags.ContentTime ); if ( d == null ) d = ds.getDate( Tags.AcquisitionDatetime ); return d; } public void addAll( Collection col ) { if ( col == null || col.isEmpty() ) return; for ( Iterator iter = col.iterator() ; iter.hasNext() ; ) { add( (Dataset) iter.next() ); } } public int size() { return datasets.size(); } /** * @return Returns the docCode. */ public String getDocCode() { return docCode; } /** * @param docCode The docCode to set. */ public void setDocCode(String docCode) { this.docCode = docCode; } /** * @return Returns the docCodeSystem. */ public String getDocCodeSystem() { return docCodeSystem; } /** * @param docCodeSystem The docCodeSystem to set. */ public void setDocCodeSystem(String docCodeSystem) { this.docCodeSystem = docCodeSystem; } /** * @return Returns the docDisplayName. */ public String getDocDisplayName() { return docDisplayName; } /** * @param docDisplayName The docDisplayName to set. */ public void setDocDisplayName(String docDisplayName) { this.docDisplayName = docDisplayName; } /** * @param lowerDateTime The lowerDateTime to set. */ public void setLowerDateTime(Date lowerDateTime) { this.lowerDateTime = lowerDateTime.getTime(); } /** * @return Returns the mostRecentResults. */ public int getMostRecentResults() { return mostRecentResults; } /** * @param mostRecentResults The mostRecentResults to set. */ public void setMostRecentResults(int mostRecentResults) { this.mostRecentResults = mostRecentResults; } /** * @param upperDateTime The upperDateTime to set. */ public void setUpperDateTime(Date upperDateTime) { this.upperDateTime = upperDateTime.getTime(); } /** * @return Returns the xslFile. */ public String getXslFile() { return xslFile; } /** * @param xslFile The xslFile to set. */ public void setXslFile(String xslFile) { this.xslFile = xslFile; } /** * @return Returns the xslt. */ public URL getXslt() { return xslt; } /** * Set the URL to an xsl file that is used to transform the xml result of this DocumentList. * * @param xslt The xslt to set. * @throws MalformedURLException */ public void setXslt(String xslt) throws MalformedURLException { if ( xslt != null ) { if ( xslt.startsWith("http:") ) { this.xslt = new URL( xslt ); } else { this.xslt = new URL( getDocRIDUrl()+"/"+xslt ); } } else { this.xslt = null; } } /** * @param reqURL The reqURL to set. */ public void setReqURL(String reqURL) { this.reqURL = reqURL; } /** * @return Returns the docRIDUrl. */ public String getDocRIDUrl() { return docRIDUrl; } /** * @param docRIDUrl The docRIDUrl to set. */ public void setDocRIDUrl(String docRIDUrl) { this.docRIDUrl = docRIDUrl; } public void toXML( OutputStream out ) throws TransformerConfigurationException, SAXException { SAXTransformerFactory tf = (SAXTransformerFactory) TransformerFactory.newInstance(); applyMostRecentResults();//sorts the list and ( if mostRecentResults > 0 ) shrink the list. if (xslt != null) { try { th = tf.newTransformerHandler(new StreamSource(xslt.openStream(), xslt.toExternalForm())); } catch ( IOException x ) { log.error("Cant open xsl file:"+xslt, x ); } Transformer t = th.getTransformer(); } else { th = tf.newTransformerHandler(); th.getTransformer().setOutputProperty(OutputKeys.INDENT, "yes"); } th.setResult( new StreamResult(out) ); th.startDocument(); if ( xslFile != null ) { th.processingInstruction("xml-stylesheet", "href='"+xslFile+"' type='text/xsl'"); } toXML(); th.endDocument(); try { out.flush(); out.close(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } private void toXML() throws SAXException { util = new XMLUtil( th ); util.startElement("IHEDocumentList", EMPTY_ATTRIBUTES ); addDocCode( ); addActivityTime( ); Dataset ds; if ( datasets.size() > 0 ) { ds = (Dataset) datasets.get( 0 ); } else { if ( queryDS != null ) { ds = queryDS; } else { ds = DcmObjectFactory.getInstance().newDataset(); ds.putLO(Tags.PatientID, ""); } } addRecordTarget( ds ); addAuthor(); addDocuments(); util.endElement("IHEDocumentList"); } public void embedXML( TransformerHandler th ) throws SAXException { this.th = th; toXML(); } /** * @throws SAXException */ private void addDocCode() throws SAXException { AttributesImpl attr = new AttributesImpl(); if ( docCode != null ) util.addAttribute(attr, "code", docCode ); if ( docCodeSystem != null ) util.addAttribute(attr, "codeSystem", docCodeSystem ); if ( docDisplayName != null ) util.addAttribute(attr, "displayName", docDisplayName ); util.startElement("code", attr ); util.endElement("code"); } /** * @throws SAXException */ private void addActivityTime() throws SAXException { AttributesImpl attr = new AttributesImpl(); util.addAttribute( attr, "value", DATETIME_FORMATTER.format( new Date() )); util.startElement( "activityTime", attr ); util.endElement( "activityTime"); } /** * @throws SAXException */ private void addRecordTarget(Dataset ds) throws SAXException { util.startElement("recordTarget", EMPTY_ATTRIBUTES ); util.startElement("patient", EMPTY_ATTRIBUTES); //patient id AttributesImpl attrsPatID = new AttributesImpl(); util.addAttribute( attrsPatID, "root", ds.getString( Tags.IssuerOfPatientID ) );//issuer id util.addAttribute( attrsPatID, "extension", ds.getString( Tags.PatientID ));//patient id within issuer util.startElement("id", attrsPatID ); util.endElement("id"); //patientPatient addPatientPatient( ds ); util.startElement( "providerOrganization", EMPTY_ATTRIBUTES); AttributesImpl attrsOrgID = new AttributesImpl(); util.addAttribute( attrsOrgID, "id", "");//TODO where can i get the id? util.startElement("id", attrsOrgID ); util.endElement("id"); util.startElement("name", EMPTY_ATTRIBUTES ); String orgName = ds.getString( Tags.InstitutionName );//TODO Institution name correct? if ( orgName != null ) th.characters( orgName.toCharArray(), 0, orgName.length() ); util.endElement("name"); util.endElement( "providerOrganization" ); util.endElement("patient"); util.endElement("recordTarget" ); } /** * @throws SAXException */ private void addPatientPatient( Dataset ds ) throws SAXException { String familyName = ""; String givenName = ""; String genderCode = "121103"; // Code Value for Patient's Sex 'O' (Undetermined sex) String birthDate = ""; try { PersonName pn = ds.getPersonName(Tags.PatientName ); if ( pn != null ) { familyName = pn.get( PersonName.FAMILY ); givenName = pn.get( PersonName.GIVEN ); if ( givenName == null ) givenName = ""; } String s = ds.getString( Tags.PatientSex ); if ( "M".equals(s) || "F".equals(s) ) genderCode = s; Date date = ds.getDate( Tags.PatientBirthDate ); if ( date != null ) birthDate = DATE_FORMATTER.format( date ); } catch ( Exception x ) { log.warn("Exception getting person informations:", x); } util.startElement("patientPatient", EMPTY_ATTRIBUTES ); //Names util.startElement("name", EMPTY_ATTRIBUTES ); util.startElement("family", EMPTY_ATTRIBUTES ); th.characters(familyName.toCharArray(),0,familyName.length()); util.endElement("family" ); util.startElement("given", EMPTY_ATTRIBUTES ); th.characters(givenName.toCharArray(),0,givenName.length()); util.endElement("given" ); util.endElement("name" ); //genderCode AttributesImpl attr = new AttributesImpl(); util.addAttribute( attr, "code", genderCode ); util.addAttribute( attr, "codeSystem", "1.2.840.10008.2.16.4" );//?? util.startElement("administrativeGenderCode", attr ); util.endElement("administrativeGenderCode" ); //birth AttributesImpl attrBirth = new AttributesImpl(); util.addAttribute( attrBirth, "value", birthDate ); util.startElement("birthTime", attrBirth ); util.endElement("birthTime" ); util.endElement("patientPatient" ); } private void addAuthor() throws SAXException { //TODO util.startElement("author", EMPTY_ATTRIBUTES ); util.startElement("noteText", EMPTY_ATTRIBUTES ); AttributesImpl attr = new AttributesImpl(); util.addAttribute( attr, "value", reqURL ); util.startElement("reference", attr ); util.endElement("reference" ); util.endElement("noteText" ); util.startElement("assignedAuthor", EMPTY_ATTRIBUTES ); AttributesImpl attrsID = new AttributesImpl(); util.addAttribute( attrsID, "root", "" );//TODO util.addAttribute( attrsID, "extension", "");//TODO util.startElement("id", attrsID ); util.endElement("id"); util.startElement("assignedDevice", EMPTY_ATTRIBUTES ); AttributesImpl attrsCode = new AttributesImpl(); util.addAttribute( attrsCode, "code", "" );//TODO util.addAttribute( attrsCode, "codeSystem", "");//TODO util.addAttribute( attrsCode, "displayName", "");//TODO util.startElement("code", attrsCode ); util.endElement("code"); util.startElement("manufacturerModelName", EMPTY_ATTRIBUTES ); //TODO th.characters("TODO".toCharArray(),0,4); util.endElement("manufacturerModelName" ); util.startElement("softwareName", EMPTY_ATTRIBUTES ); //TODO th.characters("TODO".toCharArray(),0,4); util.endElement("softwareName" ); util.endElement("assignedDevice" ); util.endElement("assignedAuthor" ); util.endElement("author" ); } private void addDocuments() throws SAXException { for ( Iterator iter = datasets.iterator() ; iter.hasNext() ; ) { addComponent( (Dataset) iter.next() ); } } private void addComponent( Dataset ds ) throws SAXException { String uid = ds.getString( Tags.SOPInstanceUID ); if ( uid == null ) uid = "---"; Date date = null; if ( "SR".equals(ds.getString( Tags.Modality ) ) || UIDs.EncapsulatedPDFStorage.equals(ds.getString(Tags.SOPClassUID))) { date = ds.getDateTime( Tags.ContentDate, Tags.ContentTime ); } else { date = ds.getDate( Tags.AcquisitionDatetime ); } String acquisTime = ""; if ( date != null ) acquisTime = DATETIME_FORMATTER.format( date ); String title = "DocumentTitle"; String link = docRIDUrl+"/IHERetrieveDocument?requestType=DOCUMENT&documentUID="+ uid + "&preferredContentType=application/pdf"; util.startElement("component", EMPTY_ATTRIBUTES ); util.startElement("documentInformation", EMPTY_ATTRIBUTES ); //id AttributesImpl attrID = new AttributesImpl(); util.addAttribute( attrID, "root", uid ); util.startElement("id", attrID ); util.endElement("id" ); //component code (SUMMARY, SUMMARY_RADIOLOGY,..) addComponentCode( ds ); //title util.startElement("title", EMPTY_ATTRIBUTES ); th.characters(title.toCharArray(), 0, title.length() ); util.endElement("title" ); //text util.startElement("text", EMPTY_ATTRIBUTES ); AttributesImpl attrTxt = new AttributesImpl(); util.addAttribute( attrTxt, "value", link ); util.startElement("reference", attrTxt ); util.endElement("reference" ); util.endElement("text" ); //statusCode addComponentStatusCode( ds ); //effective time AttributesImpl attrEff = new AttributesImpl(); util.addAttribute( attrEff, "value", acquisTime ); util.startElement("effectiveTime", attrEff ); util.endElement("effectiveTime" ); util.endElement("documentInformation" ); util.endElement("component" ); } /** * @param ds * @throws SAXException */ private void addComponentStatusCode(Dataset ds) throws SAXException { //statusCode /SR: CompletionFlag, Verification flag; ecg: ??? String statusCode = ds.getString(Tags.CompletionFlag, "") + "/" + ds.getString(Tags.VerificationFlag, ""); AttributesImpl attrStatusCode = new AttributesImpl(); util.addAttribute( attrStatusCode, "code", statusCode ); util.addAttribute( attrStatusCode, "codeSystem", "" ); util.startElement("statusCode", attrStatusCode ); util.endElement("statusCode" ); } /** * @param ds * @throws SAXException */ private void addComponentCode(Dataset ds) throws SAXException { //code SR: aus (0008,1032) ProcedureCodeSequence Code ?; ECG: from SOP Class UID String code = ""; String codeSystem = ""; String displayname = ""; String cuid = ds.getString( Tags.SOPClassUID ); if ( "SR".equals(ds.getString( Tags.Modality ) ) || UIDs.EncapsulatedPDFStorage.equals(cuid)) { DcmElement elem = ds.get( Tags.ConceptNameCodeSeq ); if ( elem != null ) { Dataset ds1 = elem.getItem(0); if ( ds1 != null ) { code = ds1.getString(Tags.CodeValue); codeSystem = ds1.getString(Tags.CodingSchemeDesignator); displayname = ds1.getString(Tags.CodeMeaning); } } if ( displayname == null ) displayname = ds.getString( Tags.StudyDescription ); } else { //ECG if ( UIDs.TwelveLeadECGWaveformStorage.equals( cuid ) ) displayname = "12-lead ECG"; else if ( UIDs.GeneralECGWaveformStorage.equals( cuid ) ) displayname = "General ECG"; else if ( UIDs.AmbulatoryECGWaveformStorage.equals( cuid ) ) displayname = "Ambulatory ECG"; else if ( UIDs.HemodynamicWaveformStorage.equals( cuid ) ) displayname = "Hemodynamic"; else if ( UIDs.CardiacElectrophysiologyWaveformStorage.equals( cuid ) ) displayname = "Cardiac Electrophysiology"; } AttributesImpl attrCode = new AttributesImpl(); util.addAttribute( attrCode, "code", code ); util.addAttribute( attrCode, "codeSystem", codeSystem ); util.addAttribute( attrCode, "displayName", displayname ); util.startElement("code", attrCode ); util.endElement("code" ); } public class DatasetDateComparator implements Comparator { public DatasetDateComparator() { } /** * Compares the modification time of two File objects. * <p> * Compares its two arguments for order. Returns a negative integer, zero, or a positive integer * as the first argument is less than, equal to, or greater than the second. * <p> * Throws an Exception if one of the arguments is null or not a Dataset object. * * @param arg0 First argument * @param arg1 Second argument * * @return <0 if arg0<arg1, 0 if equal and >0 if arg0>arg1 */ public int compare( Object arg0, Object arg1 ) { Date d1 = getDateFromDS( (Dataset) arg0 ); if ( d1 == null) return 1; Date d2 = getDateFromDS( (Dataset) arg1 ); return d2 == null ? -1 : d2.compareTo( d1 ); } } }
package sso; import java.util.ArrayList; import sso.io.Parameters; import sso.io.Problem; import sso.io.Razpon; import sso.io.Result; /** * Particle used by the sphere swarm optimization class. * Knows how to move by itself, only thing it neads is * a global best position object. * @author Jure * */ public class Particle { //---- public int noOfDim; Problem prob; Parameters param; public Result globalBest; public final ArrayList<Double> razponi; public boolean IZPIS; public ArrayList<Double> po; //old position public ArrayList<Double> pn; //new position public ArrayList<Double> oldOldPos; //old old position public double score; public ArrayList<Double> pb; //best position public double personalBest; //---- /** * Const */ public Particle(int noOfDim, Problem prob, Parameters param, Result globalBest, boolean IZPIS) { this.noOfDim = noOfDim; this.prob = prob; this.param = param; this.globalBest = globalBest; this.IZPIS = IZPIS; //personalBest = -Double.MAX_VALUE; //MINMAX razponi = new ArrayList<Double>(noOfDim-1); for (int i = 0; i < noOfDim-1; i++) { double razpon = prob.razponi.get(i).to - prob.razponi.get(i).from; razponi.add(razpon); } po = new ArrayList<Double>(noOfDim); pn = new ArrayList<Double>(noOfDim); oldOldPos = new ArrayList<Double>(noOfDim); if ( prob.MAX ) { //INININ score = -Double.MAX_VALUE; //MINMAX personalBest = -Double.MAX_VALUE; //MINMAX } else { score = Double.MAX_VALUE; //MINMAX personalBest = Double.MAX_VALUE; //MINMAX } pb = new ArrayList<Double>(noOfDim); /* double r = 0; for ( int j = 0; j < noOfDim; j++) { double pnTemp = Math.random() - 0.5; pn.add(pnTemp); r += Math.pow( pnTemp, 2 ); } for ( int j = 0; j < noOfDim; j++) { double pnTemp = pn.get(j) / Math.sqrt(r); pn.set(j, pnTemp ); po.add( pnTemp ); oldOldPos.add(pnTemp + Math.random()*0.05); } correct(); */ //calculateScore(); } /** * Premakne delec na novo pozicijo * Uplivajo: * hitrost (stara pozicija) * personal best * global best * globalna hitrost */ public void premakni(double hitrost) { double r; //stara pozicija if ( oldOldPos.size() > 0 ) { r = 1; for(int i = 0; i < noOfDim; i++) // in each dimension.. { r -= oldOldPos.get(i) // calculate the resulting distance * po.get(i); } r = Math.sqrt(2*r) * (hitrost); if ( r != Double.NaN && r > param.ZELO_BLIZU ) { for(int i = 0; i < noOfDim; i++) { // in each dimension.. double r2 = ( oldOldPos.get(i) - po.get(i) ) / r; r2 = r2 * param.fHitrost; double position = pn.get(i); pn.set( i, position - r2 ); } } } //personal Best: if ( pb.size() > 0 ) { r = 1; for(int i = 0; i < noOfDim; i++) // in each dimension.. { r -= pb.get(i) // calculate the resulting distance * po.get(i); } r = Math.sqrt(2*r) * hitrost; if ( r != Double.NaN && r > param.ZELO_BLIZU ) { //ININININ for(int i = 0; i < noOfDim; i++) { // in each dimension.. double r2 = ( pb.get(i) - po.get(i) ) / r; r2 = r2 * param.fPersonal; double position = pn.get(i); pn.set( i, position + r2 ); } } } //global Best: if ( globalBest.xxx.size() > 0 ) { r = 1; //System.out.println("NO of dim: "+noOfDim); for(int i = 0; i < noOfDim; i++) // in each dimension.. { r -= globalBest.xxx.get(i) // calculate the resulting distance * po.get(i); } r = Math.sqrt(2*r) * hitrost; if ( r != Double.NaN && r > param.ZELO_BLIZU ) { //ININININ for(int i = 0; i < noOfDim; i++) { // in each dimension.. double r2 = ( globalBest.xxx.get(i) - po.get(i) ) / r; r2 = r2 * param.fGlobal; double position = pn.get(i); pn.set( i, position + r2 ); } } } //spravi v staro pozicijo for(int i = 0; i < noOfDim; i++) { // in each dimension.. double position = po.get(i); oldOldPos.set( i, position ); } correct(); // Correct movements into spere with radius 1 and update them } /** * Correct data within radius 1 sphere and update model. */ public void correct() { double r = 0; for( int j=0; j < pn.size() ; j++) { r += Math.pow( pn.get(j), 2 ); } r = Math.sqrt(r); for( int j=0; j < pn.size() ; j++) { double pnTemp = pn.get(j); pnTemp /= r; pn.set(j, pnTemp ); po.set(j, pnTemp ); } } /** * Calculates the f(x) of a given function. */ public void calculateScore() { //preslikaj brez zadnje dimenzije ArrayList<Double> preslikaniXi = preslikajParticle(pn); //poslji funkciji score = prob.enacba.calculate(preslikaniXi); if ( isBetter(score , personalBest) ) { //MINMAX personalBest = score; pb.clear(); for (double p : pn) { pb.add(p); } if ( isBetter(score, globalBest.fx) ) { //MINMAX globalBest.fx = score; globalBest.xxx.clear(); for (double p : pn) { globalBest.xxx.add(p); } globalBest.xxxPreslikani.clear(); globalBest.xxxPreslikani = preslikajParticle(pn); if ( IZPIS ) { System.out.print("Novi najboljsi rezultat: "+globalBest.fx); System.out.print(" Resitev: "+ globalBest.xxx.toString() + " | "); /* for ( double a : pn ) { double b = preslikaj(a); System.out.print(" "+b+" "); } */ System.out.print(preslikajParticle(pn).toString()); System.out.println(); } } } } /** * preslika cel particle iz osnovnih koordinat (-1,1) v razpon, * ki ga je podal uporabnik. (in odstrani zadnjo dimenzijo) */ public ArrayList<Double> preslikajParticle (ArrayList<Double> par) { int size = prob.steviloSpremenljivk; ArrayList<Double> preslikaniXi = new ArrayList<Double>(size); //IZRACUNA FAKTOR (oddaljenost izhodisca od projekcije na kocko) //odstrani zadnjo dimenzijo for ( int i = 0; i < size; i++ ) { double a = par.get(i); preslikaniXi.add(a); } //preslika tocko na kroglo correct(preslikaniXi); //preslika tocko na kocko correctKocka(preslikaniXi); double razdalja = dobiRazdaljo(preslikaniXi); for ( int i = 0; i < size; i++ ) { double a = par.get(i) * razdalja; double x = preslikajDimenzijo(a, i); preslikaniXi.set(i, x); } return preslikaniXi; } public double preslikajDimenzijo (double stevilo, int dimenzija) { double min = prob.razponi.get(dimenzija).from; return ((stevilo + 1.0) * (razponi.get(dimenzija) / 2.0) ) + min; } public double dobiRazdaljo(ArrayList<Double> in) { double out = 0; for ( double os : in ) { out += Math.pow(os, 2); } return Math.sqrt(out); } /** * Correct data within radius 1 sphere and update model * @param in: particle */ public void correct(ArrayList<Double> in) { double r = 0; for( int j=0; j < in.size() ; j++) { r += Math.pow( in.get(j), 2 ); } r = Math.sqrt(r); for( int j=0; j < in.size() ; j++) { double pnTemp = in.get(j); pnTemp /= r; in.set(j, pnTemp ); } } /** * Correct data within radius 1 CUBE and update model. * @param in */ public void correctKocka(ArrayList<Double> in) { //najdi dominantno os int domIndex = 0; double domAbs = Math.abs(in.get(0)); for( int i = 1; i < in.size(); i++ ) { if ( Math.abs(in.get(i)) > domAbs ) { domAbs = Math.abs(in.get(i)); domIndex = i; } } double dom = in.get(domIndex); //izracunaj nove vrednosti for( int i = 0; i < in.size(); i++ ) { if ( i != domIndex ) { double temp = in.get(i); temp = temp / in.get(domIndex); in.set(i, temp); } } //izracunaj novo vrednost za dom if ( in.get(domIndex) > 0.0 ) { in.set(domIndex, 1.0); } else { in.set(domIndex, -1.0); } } public boolean isBetter( double a, double b) { if ( prob.MAX ) { if ( a > b ) {return true;} else {return false;} } else { if ( a < b ) {return true;} else {return false;} } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.federation.store.records.impl.pb; import java.io.IOException; import org.apache.hadoop.hdfs.federation.protocol.proto.HdfsServerFederationProtos.RouterRecordProto; import org.apache.hadoop.hdfs.federation.protocol.proto.HdfsServerFederationProtos.RouterRecordProto.Builder; import org.apache.hadoop.hdfs.federation.protocol.proto.HdfsServerFederationProtos.RouterRecordProtoOrBuilder; import org.apache.hadoop.hdfs.federation.protocol.proto.HdfsServerFederationProtos.StateStoreVersionRecordProto; import org.apache.hadoop.hdfs.server.federation.router.RouterServiceState; import org.apache.hadoop.hdfs.server.federation.store.driver.StateStoreSerializer; import org.apache.hadoop.hdfs.server.federation.store.protocol.impl.pb.FederationProtocolPBTranslator; import org.apache.hadoop.hdfs.server.federation.store.records.RouterState; import org.apache.hadoop.hdfs.server.federation.store.records.StateStoreVersion; import com.google.protobuf.Message; /** * Protobuf implementation of the RouterState record. */ public class RouterStatePBImpl extends RouterState implements PBRecord { private FederationProtocolPBTranslator<RouterRecordProto, Builder, RouterRecordProtoOrBuilder> translator = new FederationProtocolPBTranslator<RouterRecordProto, Builder, RouterRecordProtoOrBuilder>(RouterRecordProto.class); public RouterStatePBImpl() { } public RouterStatePBImpl(RouterRecordProto proto) { this.translator.setProto(proto); } @Override public RouterRecordProto getProto() { return this.translator.build(); } @Override public void setProto(Message proto) { this.translator.setProto(proto); } @Override public void readInstance(String base64String) throws IOException { this.translator.readInstance(base64String); } @Override public void setAddress(String address) { RouterRecordProto.Builder builder = this.translator.getBuilder(); if (address == null) { builder.clearAddress(); } else { builder.setAddress(address); } } @Override public String getAddress() { RouterRecordProtoOrBuilder proto = this.translator.getProtoOrBuilder(); if (!proto.hasAddress()) { return null; } return proto.getAddress(); } @Override public void setStateStoreVersion(StateStoreVersion version) { RouterRecordProto.Builder builder = this.translator.getBuilder(); if (version instanceof StateStoreVersionPBImpl) { StateStoreVersionPBImpl versionPB = (StateStoreVersionPBImpl)version; StateStoreVersionRecordProto versionProto = (StateStoreVersionRecordProto)versionPB.getProto(); builder.setStateStoreVersion(versionProto); } else { builder.clearStateStoreVersion(); } } @Override public StateStoreVersion getStateStoreVersion() throws IOException { RouterRecordProtoOrBuilder proto = this.translator.getProtoOrBuilder(); if (!proto.hasStateStoreVersion()) { return null; } StateStoreVersionRecordProto versionProto = proto.getStateStoreVersion(); StateStoreVersion version = StateStoreSerializer.newRecord(StateStoreVersion.class); if (version instanceof StateStoreVersionPBImpl) { StateStoreVersionPBImpl versionPB = (StateStoreVersionPBImpl)version; versionPB.setProto(versionProto); return versionPB; } else { throw new IOException("Cannot get State Store version"); } } @Override public RouterServiceState getStatus() { RouterRecordProtoOrBuilder proto = this.translator.getProtoOrBuilder(); if (!proto.hasStatus()) { return null; } return RouterServiceState.valueOf(proto.getStatus()); } @Override public void setStatus(RouterServiceState newStatus) { RouterRecordProto.Builder builder = this.translator.getBuilder(); if (newStatus == null) { builder.clearStatus(); } else { builder.setStatus(newStatus.toString()); } } @Override public String getVersion() { RouterRecordProtoOrBuilder proto = this.translator.getProtoOrBuilder(); if (!proto.hasVersion()) { return null; } return proto.getVersion(); } @Override public void setVersion(String version) { RouterRecordProto.Builder builder = this.translator.getBuilder(); if (version == null) { builder.clearVersion(); } else { builder.setVersion(version); } } @Override public String getCompileInfo() { RouterRecordProtoOrBuilder proto = this.translator.getProtoOrBuilder(); if (!proto.hasCompileInfo()) { return null; } return proto.getCompileInfo(); } @Override public void setCompileInfo(String info) { RouterRecordProto.Builder builder = this.translator.getBuilder(); if (info == null) { builder.clearCompileInfo(); } else { builder.setCompileInfo(info); } } @Override public void setDateStarted(long dateStarted) { this.translator.getBuilder().setDateStarted(dateStarted); } @Override public long getDateStarted() { return this.translator.getProtoOrBuilder().getDateStarted(); } @Override public void setDateModified(long time) { this.translator.getBuilder().setDateModified(time); } @Override public long getDateModified() { return this.translator.getProtoOrBuilder().getDateModified(); } @Override public void setDateCreated(long time) { this.translator.getBuilder().setDateCreated(time); } @Override public long getDateCreated() { return this.translator.getProtoOrBuilder().getDateCreated(); } }
package sh.isaac.komet.changeset.view; import java.io.File; import java.io.FileNotFoundException; import java.net.URL; import java.util.ResourceBundle; import java.util.stream.Stream; import javafx.beans.property.ReadOnlyObjectWrapper; import javafx.beans.value.ObservableValue; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import javafx.fxml.FXML; import javafx.scene.control.TreeItem; import javafx.scene.control.TreeTableColumn; import javafx.scene.control.TreeTableView; import javafx.util.Callback; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import sh.isaac.api.Get; import sh.isaac.api.chronicle.Version; import sh.isaac.api.component.semantic.version.ComponentNidVersion; import sh.isaac.api.component.semantic.version.DescriptionVersion; import sh.isaac.api.component.semantic.version.LogicGraphVersion; import sh.isaac.api.component.semantic.version.StringVersion; import sh.isaac.api.externalizable.BinaryDataReaderService; import sh.isaac.api.externalizable.IsaacExternalizable; import sh.isaac.api.externalizable.IsaacObjectType; import sh.isaac.api.util.time.DateTimeUtil; import sh.isaac.model.concept.ConceptChronologyImpl; import sh.isaac.model.semantic.SemanticChronologyImpl; public class FXMLController { protected static final Logger LOG = LogManager.getLogger(); @FXML // ResourceBundle that was given to the FXMLLoader private ResourceBundle resources; @FXML // URL location of the FXML file that was given to the FXMLLoader private URL location; @FXML // fx:id="changeSetTreeTable" private TreeTableView<?> changeSetTreeTable; // Value injected by FXMLLoader @FXML // fx:id="typeColumn" private TreeTableColumn typeColumn; // Value injected by FXMLLoader @FXML // fx:id="infoColumn" private TreeTableColumn infoColumn; // Value injected by FXMLLoader @FXML // fx:id="statusColumn" private TreeTableColumn statusColumn; // Value injected by FXMLLoader @FXML // fx:id="timeColumn" private TreeTableColumn timeColumn; // Value injected by FXMLLoader @FXML // fx:id="authorColumn" private TreeTableColumn authorColumn; // Value injected by FXMLLoader @FXML // fx:id="moduleColumn" private TreeTableColumn moduleColumn; // Value injected by FXMLLoader @FXML // fx:id="pathColumn" private TreeTableColumn pathColumn; // Value injected by FXMLLoader @FXML // This method is called by the FXMLLoader when initialization is complete void initialize() { assert changeSetTreeTable != null : "fx:id=\"changeSetTreeTable\" was not injected: check your FXML file 'Scene.fxml'."; assert typeColumn != null : "fx:id=\"typeColumn\" was not injected: check your FXML file 'Scene.fxml'."; assert infoColumn != null : "fx:id=\"infoColumn\" was not injected: check your FXML file 'Scene.fxml'."; assert statusColumn != null : "fx:id=\"statusColumn\" was not injected: check your FXML file 'Scene.fxml'."; assert timeColumn != null : "fx:id=\"timeColumn\" was not injected: check your FXML file 'Scene.fxml'."; assert authorColumn != null : "fx:id=\"authorColumn\" was not injected: check your FXML file 'Scene.fxml'."; assert moduleColumn != null : "fx:id=\"moduleColumn\" was not injected: check your FXML file 'Scene.fxml'."; assert pathColumn != null : "fx:id=\"pathColumn\" was not injected: check your FXML file 'Scene.fxml'."; this.statusColumn.setCellValueFactory(new StatusCallbackImpl()); this.timeColumn.setCellValueFactory(new TimeCallbackImpl()); this.authorColumn.setCellValueFactory(new AuthorCallbackImpl()); this.moduleColumn.setCellValueFactory(new ModuleCallbackImpl()); this.pathColumn.setCellValueFactory(new PathCallbackImpl()); this.typeColumn.setCellValueFactory(new TypeCallbackImpl()); this.infoColumn.setCellValueFactory(new InfoCallbackImpl()); } public void setFile(File changeSetFile) { try { BinaryDataReaderService reader = Get.binaryDataReader(changeSetFile.toPath()); Stream<IsaacExternalizable> externalizableStream = reader.getStream(); ObservableList<IsaacExternalizable> itemList = FXCollections.observableArrayList(); TreeItem root = new TreeItem("root"); changeSetTreeTable.setRoot(root); changeSetTreeTable.showRootProperty().set(false); externalizableStream.forEach(item -> processItem(root, item)); //typeColumn.setCellValueFactory(new PropertyValueFactory("isaacObjectType")); } catch (FileNotFoundException ex) { LOG.error(ex.getLocalizedMessage(), ex); } } private void processItem(TreeItem parent, Object item) { if (item instanceof IsaacExternalizable) { IsaacObjectType objectType = ((IsaacExternalizable) item).getIsaacObjectType(); switch (objectType) { case CONCEPT: { ConceptChronologyImpl conceptItem = (ConceptChronologyImpl) item; TreeItem conceptTreeItem = new TreeItem(item); conceptTreeItem.setExpanded(true); parent.getChildren().add(conceptTreeItem); for (Version version : conceptItem.getVersionList()) { processItem(conceptTreeItem, version); } break; } case SEMANTIC: { SemanticChronologyImpl semanticItem = (SemanticChronologyImpl) item; TreeItem semanticTreeItem = new TreeItem(item); semanticTreeItem.setExpanded(true); parent.getChildren().add(semanticTreeItem); for (Version version : semanticItem.getVersionList()) { processItem(semanticTreeItem, version); } break; } case STAMP: { parent.getChildren().add(new TreeItem(objectType)); break; } case STAMP_ALIAS: { parent.getChildren().add(new TreeItem(objectType)); break; } case STAMP_COMMENT: { parent.getChildren().add(new TreeItem(objectType)); break; } case UNKNOWN: default: throw new UnsupportedOperationException("Can't handle: " + objectType); } } else if (item instanceof Version) { Version version = (Version) item; //SemanticVersionImpl, ConceptVersionImpl, DescriptionVersionImpl, ComponentNidVersionImpl, LogicGraphVersionImpl, StringVersionImpl TreeItem versionTreeItem = new TreeItem(item); parent.getChildren().add(versionTreeItem); } } private class TypeCallbackImpl implements Callback<TreeTableColumn.CellDataFeatures, ObservableValue> { @Override public ObservableValue<?> call(TreeTableColumn.CellDataFeatures cellData) { Object item = cellData.getValue().getValue(); if (item instanceof IsaacExternalizable) { IsaacObjectType objectType = ((IsaacExternalizable) item).getIsaacObjectType(); if (objectType == IsaacObjectType.SEMANTIC) { SemanticChronologyImpl semanticItem = (SemanticChronologyImpl) item; return new ReadOnlyObjectWrapper(semanticItem.getVersionType()); } return new ReadOnlyObjectWrapper(objectType); } else if (item instanceof Version) { Version version = (Version) item; return new ReadOnlyObjectWrapper("version"); } return new ReadOnlyObjectWrapper(item.getClass().getName()); } } private class StatusCallbackImpl implements Callback<TreeTableColumn.CellDataFeatures, ObservableValue> { @Override public ObservableValue<?> call(TreeTableColumn.CellDataFeatures cellData) { Object item = cellData.getValue().getValue(); if (item instanceof IsaacExternalizable) { return null; } else if (item instanceof Version) { Version version = (Version) item; return new ReadOnlyObjectWrapper(version.getStatus()); } return new ReadOnlyObjectWrapper(item.getClass().getName()); } } private class TimeCallbackImpl implements Callback<TreeTableColumn.CellDataFeatures, ObservableValue> { @Override public ObservableValue<?> call(TreeTableColumn.CellDataFeatures cellData) { Object item = cellData.getValue().getValue(); if (item instanceof IsaacExternalizable) { return null; } else if (item instanceof Version) { Version version = (Version) item; return new ReadOnlyObjectWrapper(DateTimeUtil.format(version.getTime())); } return new ReadOnlyObjectWrapper(item.getClass().getName()); } } private class AuthorCallbackImpl implements Callback<TreeTableColumn.CellDataFeatures, ObservableValue> { @Override public ObservableValue<?> call(TreeTableColumn.CellDataFeatures cellData) { Object item = cellData.getValue().getValue(); if (item instanceof IsaacExternalizable) { return null; } else if (item instanceof Version) { Version version = (Version) item; return new ReadOnlyObjectWrapper(Get.defaultCoordinate().getPreferredDescriptionText(version.getAuthorNid())); } return new ReadOnlyObjectWrapper(item.getClass().getName()); } } private class ModuleCallbackImpl implements Callback<TreeTableColumn.CellDataFeatures, ObservableValue> { @Override public ObservableValue<?> call(TreeTableColumn.CellDataFeatures cellData) { Object item = cellData.getValue().getValue(); if (item instanceof IsaacExternalizable) { return null; } else if (item instanceof Version) { Version version = (Version) item; return new ReadOnlyObjectWrapper(Get.defaultCoordinate().getPreferredDescriptionText(version.getModuleNid())); } return new ReadOnlyObjectWrapper(item.getClass().getName()); } } private class PathCallbackImpl implements Callback<TreeTableColumn.CellDataFeatures, ObservableValue> { @Override public ObservableValue<?> call(TreeTableColumn.CellDataFeatures cellData) { Object item = cellData.getValue().getValue(); if (item instanceof IsaacExternalizable) { return null; } else if (item instanceof Version) { Version version = (Version) item; return new ReadOnlyObjectWrapper(Get.defaultCoordinate().getPreferredDescriptionText(version.getPathNid())); } return new ReadOnlyObjectWrapper(item.getClass().getName()); } } private class InfoCallbackImpl implements Callback<TreeTableColumn.CellDataFeatures, ObservableValue> { @Override public ObservableValue<?> call(TreeTableColumn.CellDataFeatures cellData) { Object item = cellData.getValue().getValue(); if (item instanceof IsaacExternalizable) { IsaacObjectType objectType = ((IsaacExternalizable) item).getIsaacObjectType(); if (objectType == IsaacObjectType.SEMANTIC) { SemanticChronologyImpl semanticItem = (SemanticChronologyImpl) item; return new ReadOnlyObjectWrapper("id: " + semanticItem.getPrimordialUuid().toString() + "\nrc: " + Get.identifierService().getUuidPrimordialForNid(semanticItem.getReferencedComponentNid()).toString() + "\n " + Get.defaultCoordinate().getPreferredDescriptionText(semanticItem.getAssemblageNid())); } else if (objectType == IsaacObjectType.CONCEPT) { ConceptChronologyImpl concept = (ConceptChronologyImpl) item; return new ReadOnlyObjectWrapper("id: " + concept.getPrimordialUuid().toString() + "\n " + Get.defaultCoordinate().getPreferredDescriptionText(concept.getAssemblageNid())); } return new ReadOnlyObjectWrapper(objectType); } else if (item instanceof Version) { Version version = (Version) item; switch (version.getSemanticType()) { case DESCRIPTION: { DescriptionVersion descriptionVersion = (DescriptionVersion) version; return new ReadOnlyObjectWrapper(descriptionVersion.getText()); } case COMPONENT_NID: { ComponentNidVersion typedVersion = (ComponentNidVersion) version; return new ReadOnlyObjectWrapper(Get.defaultCoordinate().getPreferredDescriptionText(typedVersion.getComponentNid())); } case LOGIC_GRAPH:{ LogicGraphVersion typedVersion = (LogicGraphVersion) version; return new ReadOnlyObjectWrapper(typedVersion.getLogicalExpression().toString()); } case STRING: { StringVersion typedVersion = (StringVersion) version; return new ReadOnlyObjectWrapper(typedVersion.getString()); } case CONCEPT: { return null; } case DYNAMIC: case Int1_Int2_Str3_Str4_Str5_Nid6_Nid7: case LONG: case MEASURE_CONSTRAINTS: case MEMBER: case Nid1_Int2: case Nid1_Long2: case Nid1_Int2_Str3_Str4_Nid5_Nid6: case Nid1_Nid2: case Nid1_Nid2_Int3: case Nid1_Nid2_Str3: case Nid1_Str2: case RF2_RELATIONSHIP: case Str1_Nid2_Nid3_Nid4: case Str1_Str2: case Str1_Str2_Nid3_Nid4: case Str1_Str2_Nid3_Nid4_Nid5: case Str1_Str2_Str3_Str4_Str5_Str6_Str7: case UNKNOWN: } return new ReadOnlyObjectWrapper(version.toUserString()); } return new ReadOnlyObjectWrapper(item.getClass().getName()); } } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.coverage; import com.intellij.CommonBundle; import com.intellij.codeEditor.printing.ExportToHTMLSettings; import com.intellij.coverage.view.CoverageViewExtension; import com.intellij.coverage.view.CoverageViewManager; import com.intellij.coverage.view.JavaCoverageViewExtension; import com.intellij.execution.CommonJavaRunConfigurationParameters; import com.intellij.execution.application.ApplicationConfiguration; import com.intellij.execution.configurations.RunConfigurationBase; import com.intellij.execution.configurations.coverage.CoverageEnabledConfiguration; import com.intellij.execution.configurations.coverage.JavaCoverageEnabledConfiguration; import com.intellij.execution.testframework.AbstractTestProxy; import com.intellij.ide.BrowserUtil; import com.intellij.openapi.actionSystem.DataContext; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.compiler.CompileContext; import com.intellij.openapi.compiler.CompileStatusNotification; import com.intellij.openapi.compiler.CompilerManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.fileTypes.StdFileTypes; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleUtilCore; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.Task; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.roots.CompilerModuleExtension; import com.intellij.openapi.roots.ModuleRootManager; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.roots.TestSourcesFilter; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.io.FileUtilRt; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VfsUtilCore; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.controlFlow.*; import com.intellij.psi.impl.source.tree.java.PsiSwitchStatementImpl; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.GlobalSearchScopesCore; import com.intellij.psi.util.ClassUtil; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.rt.coverage.data.JumpData; import com.intellij.rt.coverage.data.LineData; import com.intellij.rt.coverage.data.ProjectData; import com.intellij.rt.coverage.data.SwitchData; import com.intellij.rt.coverage.instrumentation.SaveHook; import com.intellij.util.containers.HashSet; import jetbrains.coverage.report.ClassInfo; import jetbrains.coverage.report.ReportBuilderFactory; import jetbrains.coverage.report.ReportGenerationFailedException; import jetbrains.coverage.report.SourceCodeProvider; import jetbrains.coverage.report.html.HTMLReportBuilder; import jetbrains.coverage.report.idea.IDEACoverageData; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.jps.model.java.JavaSourceRootType; import java.io.File; import java.io.IOException; import java.util.*; /** * @author Roman.Chernyatchik */ public class JavaCoverageEngine extends CoverageEngine { private static final Logger LOG = Logger.getInstance(JavaCoverageEngine.class.getName()); public static JavaCoverageEngine getInstance() { return Extensions.findExtension(EP_NAME, JavaCoverageEngine.class); } @Override public boolean isApplicableTo(@Nullable final RunConfigurationBase conf) { if (conf instanceof CommonJavaRunConfigurationParameters) { return true; } for (JavaCoverageEngineExtension extension : Extensions.getExtensions(JavaCoverageEngineExtension.EP_NAME)) { if (extension.isApplicableTo(conf)) { return true; } } return false; } @Override public boolean canHavePerTestCoverage(@Nullable RunConfigurationBase conf) { return !(conf instanceof ApplicationConfiguration) && conf instanceof CommonJavaRunConfigurationParameters; } @NotNull @Override public CoverageEnabledConfiguration createCoverageEnabledConfiguration(@Nullable final RunConfigurationBase conf) { return new JavaCoverageEnabledConfiguration(conf, this); } @Nullable @Override public CoverageSuite createCoverageSuite(@NotNull final CoverageRunner covRunner, @NotNull final String name, @NotNull final CoverageFileProvider coverageDataFileProvider, String[] filters, long lastCoverageTimeStamp, String suiteToMerge, boolean coverageByTestEnabled, boolean tracingEnabled, boolean trackTestFolders, Project project) { return createSuite(covRunner, name, coverageDataFileProvider, filters, null, lastCoverageTimeStamp, coverageByTestEnabled, tracingEnabled, trackTestFolders, project); } @Override public CoverageSuite createCoverageSuite(@NotNull final CoverageRunner covRunner, @NotNull final String name, @NotNull final CoverageFileProvider coverageDataFileProvider, @NotNull final CoverageEnabledConfiguration config) { if (config instanceof JavaCoverageEnabledConfiguration) { final JavaCoverageEnabledConfiguration javaConfig = (JavaCoverageEnabledConfiguration)config; return createSuite(covRunner, name, coverageDataFileProvider, javaConfig.getPatterns(), javaConfig.getExcludePatterns(), new Date().getTime(), javaConfig.isTrackPerTestCoverage() && !javaConfig.isSampling(), !javaConfig.isSampling(), javaConfig.isTrackTestFolders(), config.getConfiguration().getProject()); } return null; } @Nullable @Override public CoverageSuite createEmptyCoverageSuite(@NotNull CoverageRunner coverageRunner) { return new JavaCoverageSuite(this); } @NotNull @Override public CoverageAnnotator getCoverageAnnotator(Project project) { return JavaCoverageAnnotator.getInstance(project); } /** * Determines if coverage information should be displayed for given file * @param psiFile * @return */ public boolean coverageEditorHighlightingApplicableTo(@NotNull final PsiFile psiFile) { if (!(psiFile instanceof PsiClassOwner)) { return false; } // let's show coverage only for module files final Module module = ReadAction.compute(() -> ModuleUtilCore.findModuleForPsiElement(psiFile)); return module != null; } public boolean acceptedByFilters(@NotNull final PsiFile psiFile, @NotNull final CoverageSuitesBundle suite) { final VirtualFile virtualFile = psiFile.getVirtualFile(); if (virtualFile == null) return false; final Project project = psiFile.getProject(); if (!suite.isTrackTestFolders() && TestSourcesFilter.isTestSources(virtualFile, project)) { return false; } for (CoverageSuite coverageSuite : suite.getSuites()) { final JavaCoverageSuite javaSuite = (JavaCoverageSuite)coverageSuite; if (javaSuite.isPackageFiltered(ReadAction.compute(() ->((PsiClassOwner)psiFile).getPackageName()))) { return true; } else { final List<PsiClass> classes = javaSuite.getCurrentSuiteClasses(project); for (PsiClass aClass : classes) { final PsiFile containingFile = ReadAction.compute(aClass::getContainingFile); if (psiFile.equals(containingFile)) { return true; } } } } return false; } @Override public boolean recompileProjectAndRerunAction(@NotNull final Module module, @NotNull final CoverageSuitesBundle suite, @NotNull final Runnable chooseSuiteAction) { final VirtualFile outputpath = CompilerModuleExtension.getInstance(module).getCompilerOutputPath(); final VirtualFile testOutputpath = CompilerModuleExtension.getInstance(module).getCompilerOutputPathForTests(); if ((outputpath == null && isModuleOutputNeeded(module, JavaSourceRootType.SOURCE)) || (suite.isTrackTestFolders() && testOutputpath == null && isModuleOutputNeeded(module, JavaSourceRootType.TEST_SOURCE))) { final Project project = module.getProject(); if (suite.isModuleChecked(module)) return false; suite.checkModule(module); final Runnable runnable = () -> { if (Messages.showOkCancelDialog( "Project class files are out of date. Would you like to recompile? The refusal to do it will result in incomplete coverage information", "Project is out of date", Messages.getWarningIcon()) == Messages.OK) { final CompilerManager compilerManager = CompilerManager.getInstance(project); compilerManager.make(compilerManager.createProjectCompileScope(project), new CompileStatusNotification() { public void finished(final boolean aborted, final int errors, final int warnings, final CompileContext compileContext) { if (aborted || errors != 0) return; ApplicationManager.getApplication().invokeLater(() -> { if (project.isDisposed()) return; CoverageDataManager.getInstance(project).chooseSuitesBundle(suite); }); } }); } else if (!project.isDisposed()) { CoverageDataManager.getInstance(project).chooseSuitesBundle(null); } }; ApplicationManager.getApplication().invokeLater(runnable); return true; } return false; } private static boolean isModuleOutputNeeded(Module module, final JavaSourceRootType rootType) { CompilerManager compilerManager = CompilerManager.getInstance(module.getProject()); return ModuleRootManager.getInstance(module).getSourceRoots(rootType).stream().anyMatch(vFile -> !compilerManager.isExcludedFromCompilation(vFile)); } @Nullable public List<Integer> collectSrcLinesForUntouchedFile(@NotNull final File classFile, @NotNull final CoverageSuitesBundle suite) { final List<Integer> uncoveredLines = new ArrayList<>(); final byte[] content; try { content = FileUtil.loadFileBytes(classFile); } catch (IOException e) { return null; } try { SourceLineCounterUtil.collectSrcLinesForUntouchedFiles(uncoveredLines, content, suite.isTracingEnabled(), suite.getProject()); } catch (Exception e) { LOG.error("Fail to process class from: " + classFile.getPath(), e); } return uncoveredLines; } public boolean includeUntouchedFileInCoverage(@NotNull final String qualifiedName, @NotNull final File outputFile, @NotNull final PsiFile sourceFile, @NotNull CoverageSuitesBundle suite) { for (CoverageSuite coverageSuite : suite.getSuites()) { final JavaCoverageSuite javaSuite = (JavaCoverageSuite)coverageSuite; if (javaSuite.isClassFiltered(qualifiedName) || javaSuite.isPackageFiltered(getPackageName(sourceFile))) return true; } return false; } @NotNull public String getQualifiedName(@NotNull final File outputFile, @NotNull final PsiFile sourceFile) { final String packageFQName = getPackageName(sourceFile); return StringUtil.getQualifiedName(packageFQName, FileUtil.getNameWithoutExtension(outputFile)); } @NotNull @Override public Set<String> getQualifiedNames(@NotNull final PsiFile sourceFile) { final PsiClass[] classes = ReadAction.compute(() -> ((PsiClassOwner)sourceFile).getClasses()); final Set<String> qNames = new HashSet<>(); for (final JavaCoverageEngineExtension nameExtension : Extensions.getExtensions(JavaCoverageEngineExtension.EP_NAME)) { if (ReadAction.compute(() -> nameExtension.suggestQualifiedName(sourceFile, classes, qNames))) { return qNames; } } for (final PsiClass aClass : classes) { final String qName = ReadAction.compute(() -> aClass.getQualifiedName()); if (qName == null) continue; qNames.add(qName); } return qNames; } @NotNull public Set<File> getCorrespondingOutputFiles(@NotNull final PsiFile srcFile, @Nullable final Module module, @NotNull final CoverageSuitesBundle suite) { if (module == null) { return Collections.emptySet(); } final Set<File> classFiles = new HashSet<>(); final VirtualFile outputpath = CompilerModuleExtension.getInstance(module).getCompilerOutputPath(); final VirtualFile testOutputpath = CompilerModuleExtension.getInstance(module).getCompilerOutputPathForTests(); for (JavaCoverageEngineExtension extension : Extensions.getExtensions(JavaCoverageEngineExtension.EP_NAME)) { if (extension.collectOutputFiles(srcFile, outputpath, testOutputpath, suite, classFiles)) return classFiles; } final String packageFQName = getPackageName(srcFile); final String packageVmName = packageFQName.replace('.', '/'); final List<File> children = new ArrayList<>(); final File vDir = outputpath == null ? null : packageVmName.length() > 0 ? new File(outputpath.getPath() + File.separator + packageVmName) : VfsUtilCore.virtualToIoFile(outputpath); if (vDir != null && vDir.exists()) { Collections.addAll(children, vDir.listFiles()); } if (suite.isTrackTestFolders()) { final File testDir = testOutputpath == null ? null : packageVmName.length() > 0 ? new File(testOutputpath.getPath() + File.separator + packageVmName) : VfsUtilCore.virtualToIoFile(testOutputpath); if (testDir != null && testDir.exists()) { Collections.addAll(children, testDir.listFiles()); } } final PsiClass[] classes = ReadAction.compute(() -> ((PsiClassOwner)srcFile).getClasses()); for (final PsiClass psiClass : classes) { final String className = ReadAction.compute(() -> psiClass.getName()); for (File child : children) { if (FileUtilRt.extensionEquals(child.getName(), StdFileTypes.CLASS.getDefaultExtension())) { final String childName = FileUtil.getNameWithoutExtension(child); if (childName.equals(className) || //class or inner childName.startsWith(className) && childName.charAt(className.length()) == '$') { classFiles.add(child); } } } } return classFiles; } public String generateBriefReport(@NotNull Editor editor, @NotNull PsiFile psiFile, int lineNumber, int startOffset, int endOffset, @Nullable LineData lineData) { final StringBuffer buf = new StringBuffer(); buf.append("Hits: "); if (lineData == null) { buf.append(0); return buf.toString(); } buf.append(lineData.getHits()).append("\n"); for (JavaCoverageEngineExtension extension : Extensions.getExtensions(JavaCoverageEngineExtension.EP_NAME)) { String report = extension.generateBriefReport(editor, psiFile, lineNumber, startOffset, endOffset, lineData); if (report != null) { buf.append(report); return report; } } final List<PsiExpression> expressions = new ArrayList<>(); final Project project = editor.getProject(); for(int offset = startOffset; offset < endOffset; offset++) { PsiElement parent = PsiTreeUtil.getParentOfType(psiFile.findElementAt(offset), PsiStatement.class); PsiElement condition = null; if (parent instanceof PsiIfStatement) { condition = ((PsiIfStatement)parent).getCondition(); } else if (parent instanceof PsiSwitchStatement) { condition = ((PsiSwitchStatement)parent).getExpression(); } else if (parent instanceof PsiDoWhileStatement) { condition = ((PsiDoWhileStatement)parent).getCondition(); } else if (parent instanceof PsiForStatement) { condition = ((PsiForStatement)parent).getCondition(); } else if (parent instanceof PsiWhileStatement) { condition = ((PsiWhileStatement)parent).getCondition(); } else if (parent instanceof PsiForeachStatement) { condition = ((PsiForeachStatement)parent).getIteratedValue(); } else if (parent instanceof PsiAssertStatement) { condition = ((PsiAssertStatement)parent).getAssertCondition(); } if (condition != null && PsiTreeUtil.isAncestor(condition, psiFile.findElementAt(offset), false)) { try { final ControlFlow controlFlow = ControlFlowFactory.getInstance(project).getControlFlow( parent, AllVariablesControlFlowPolicy.getInstance()); for (Instruction instruction : controlFlow.getInstructions()) { if (instruction instanceof ConditionalBranchingInstruction) { final PsiExpression expression = ((ConditionalBranchingInstruction)instruction).expression; if (!expressions.contains(expression)) { expressions.add(expression); } } } } catch (AnalysisCanceledException e) { return buf.toString(); } } } final String indent = " "; try { int idx = 0; int hits = 0; if (lineData.getJumps() != null) { for (Object o : lineData.getJumps()) { final JumpData jumpData = (JumpData)o; if (jumpData.getTrueHits() + jumpData.getFalseHits() > 0) { final PsiExpression expression = expressions.get(idx++); final PsiElement parentExpression = expression.getParent(); boolean reverse = parentExpression instanceof PsiPolyadicExpression && ((PsiPolyadicExpression)parentExpression).getOperationTokenType() == JavaTokenType.OROR || parentExpression instanceof PsiDoWhileStatement || parentExpression instanceof PsiAssertStatement; buf.append(indent).append(expression.getText()).append("\n"); buf.append(indent).append(indent).append("true hits: ").append(reverse ? jumpData.getFalseHits() : jumpData.getTrueHits()).append("\n"); buf.append(indent).append(indent).append("false hits: ").append(reverse ? jumpData.getTrueHits() : jumpData.getFalseHits()).append("\n"); hits += jumpData.getTrueHits() + jumpData.getFalseHits(); } } } if (lineData.getSwitches() != null) { for (Object o : lineData.getSwitches()) { final SwitchData switchData = (SwitchData)o; final PsiExpression conditionExpression = expressions.get(idx++); buf.append(indent).append(conditionExpression.getText()).append("\n"); int i = 0; for (int key : switchData.getKeys()) { final int switchHits = switchData.getHits()[i++]; buf.append(indent).append(indent).append("case ").append(key).append(": ").append(switchHits).append("\n"); hits += switchHits; } int defaultHits = switchData.getDefaultHits(); final boolean hasDefaultLabel = hasDefaultLabel(conditionExpression); if (hasDefaultLabel || defaultHits > 0) { if (!hasDefaultLabel) { defaultHits -= hits; } if (hasDefaultLabel || defaultHits > 0) { buf.append(indent).append(indent).append("default: ").append(defaultHits).append("\n"); hits += defaultHits; } } } } if (lineData.getHits() > hits && hits > 0) { buf.append("Unknown outcome: ").append(lineData.getHits() - hits); } } catch (Exception e) { LOG.info(e); return "Hits: " + lineData.getHits(); } return buf.toString(); } @Nullable public String getTestMethodName(@NotNull final PsiElement element, @NotNull final AbstractTestProxy testProxy) { if (element instanceof PsiMethod) { PsiMethod method = (PsiMethod)element; PsiClass aClass = method.getContainingClass(); if (aClass != null) { String qualifiedName = aClass.getQualifiedName(); if (qualifiedName != null) { return qualifiedName + "." + method.getName(); } } } return testProxy.toString(); } @NotNull public List<PsiElement> findTestsByNames(@NotNull String[] testNames, @NotNull Project project) { final List<PsiElement> elements = new ArrayList<>(); final JavaPsiFacade facade = JavaPsiFacade.getInstance(project); final GlobalSearchScope projectScope = GlobalSearchScope.projectScope(project); for (String testName : testNames) { PsiClass psiClass = facade.findClass(StringUtil.getPackageName(testName, '_').replaceAll("\\_", "\\."), projectScope); int lastIdx = testName.lastIndexOf("_"); if (psiClass != null) { collectTestsByName(elements, testName, psiClass, lastIdx); } else { String className = testName; while (lastIdx > 0) { className = className.substring(0, lastIdx); psiClass = facade.findClass(StringUtil.getPackageName(className, '_').replaceAll("\\_", "\\."), projectScope); lastIdx = className.lastIndexOf("_"); if (psiClass != null) { collectTestsByName(elements, testName, psiClass, lastIdx); break; } } } } return elements; } private static void collectTestsByName(List<PsiElement> elements, String testName, PsiClass psiClass, int lastIdx) { final PsiMethod[] testsByName = psiClass.findMethodsByName(testName.substring(lastIdx + 1), true); if (testsByName.length == 1) { elements.add(testsByName[0]); } } private static boolean hasDefaultLabel(final PsiElement conditionExpression) { boolean hasDefault = false; final PsiSwitchStatement switchStatement = PsiTreeUtil.getParentOfType(conditionExpression, PsiSwitchStatement.class); final PsiCodeBlock body = ((PsiSwitchStatementImpl)conditionExpression.getParent()).getBody(); if (body != null) { final PsiElement bodyElement = body.getFirstBodyElement(); if (bodyElement != null) { PsiSwitchLabelStatement label = PsiTreeUtil.getNextSiblingOfType(bodyElement, PsiSwitchLabelStatement.class); while (label != null) { if (label.getEnclosingSwitchStatement() == switchStatement) { hasDefault |= label.isDefaultCase(); } label = PsiTreeUtil.getNextSiblingOfType(label, PsiSwitchLabelStatement.class); } } } return hasDefault; } protected JavaCoverageSuite createSuite(CoverageRunner acceptedCovRunner, String name, CoverageFileProvider coverageDataFileProvider, String[] filters, String[] excludePatterns, long lastCoverageTimeStamp, boolean coverageByTestEnabled, boolean tracingEnabled, boolean trackTestFolders, Project project) { return new JavaCoverageSuite(name, coverageDataFileProvider, filters, excludePatterns, lastCoverageTimeStamp, coverageByTestEnabled, tracingEnabled, trackTestFolders, acceptedCovRunner, this, project); } @NotNull protected static String getPackageName(final PsiFile sourceFile) { return ReadAction.compute(() -> ((PsiClassOwner)sourceFile).getPackageName()); } protected static void generateJavaReport(@NotNull final Project project, final File tempFile, final CoverageSuitesBundle currentSuite) { final ExportToHTMLSettings settings = ExportToHTMLSettings.getInstance(project); final ProjectData projectData = currentSuite.getCoverageData(); ProgressManager.getInstance().run(new Task.Backgroundable(project, "Generating coverage report ...") { final Exception[] myExceptions = new Exception[1]; public void run(@NotNull final ProgressIndicator indicator) { try { new SaveHook(tempFile, true, new IdeaClassFinder(project, currentSuite)).save(projectData); final HTMLReportBuilder builder = ReportBuilderFactory.createHTMLReportBuilder(); builder.setReportDir(new File(settings.OUTPUT_DIRECTORY)); final SourceCodeProvider sourceCodeProvider = new SourceCodeProvider() { public String getSourceCode(@NotNull final String classname) throws IOException { return DumbService.getInstance(project).runReadActionInSmartMode(() -> { if (project.isDisposed()) return ""; final PsiClass psiClass = ClassUtil.findPsiClassByJVMName(PsiManager.getInstance(project), classname); return psiClass != null ? psiClass.getNavigationElement().getContainingFile().getText() : ""; }); } }; builder.generateReport(new IDEACoverageData(projectData, sourceCodeProvider) { @NotNull @Override public Collection<ClassInfo> getClasses() { final Collection<ClassInfo> classes = super.getClasses(); if (!currentSuite.isTrackTestFolders()) { final JavaPsiFacade psiFacade = JavaPsiFacade.getInstance(project); final GlobalSearchScope productionScope = GlobalSearchScopesCore.projectProductionScope(project); for (Iterator<ClassInfo> iterator = classes.iterator(); iterator.hasNext();) { final ClassInfo aClass = iterator.next(); final PsiClass psiClass = DumbService.getInstance(project).runReadActionInSmartMode(() -> { if (project.isDisposed()) return null; return psiFacade.findClass(aClass.getFQName(), productionScope); }); if (psiClass == null) { iterator.remove(); } } } return classes; } }); } catch (IOException e) { LOG.error(e); } catch (ReportGenerationFailedException e) { myExceptions[0] = e; } } @Override public void onSuccess() { if (myExceptions[0] != null) { Messages.showErrorDialog(project, myExceptions[0].getMessage(), CommonBundle.getErrorTitle()); return; } if (settings.OPEN_IN_BROWSER) { BrowserUtil.browse(new File(settings.OUTPUT_DIRECTORY, "index.html")); } } }); } @Override public boolean isReportGenerationAvailable(@NotNull Project project, @NotNull DataContext dataContext, @NotNull CoverageSuitesBundle currentSuite) { Sdk projectSdk = ProjectRootManager.getInstance(project).getProjectSdk(); return projectSdk != null; } @Override public final void generateReport(@NotNull final Project project, @NotNull final DataContext dataContext, @NotNull final CoverageSuitesBundle currentSuite) { try { final File tempFile = FileUtil.createTempFile("temp", ""); tempFile.deleteOnExit(); generateJavaReport(project, tempFile, currentSuite); } catch (IOException e1) { LOG.error(e1); } } @Override public String getPresentableText() { return "Java Coverage"; } @Override public boolean isGeneratedCode(Project project, String qualifiedName, Object lineData) { if (JavaCoverageOptionsProvider.getInstance(project).ignoreEmptyPrivateConstructors()) { PsiClass psiClass = ReadAction.compute(() -> ClassUtil.findPsiClassByJVMName(PsiManager.getInstance(project), qualifiedName)); return PackageAnnotator.isGeneratedDefaultConstructor(psiClass, ((LineData)lineData).getMethodSignature()); } return super.isGeneratedCode(project, qualifiedName, lineData); } @Override public CoverageViewExtension createCoverageViewExtension(Project project, CoverageSuitesBundle suiteBundle, CoverageViewManager.StateBean stateBean) { return new JavaCoverageViewExtension((JavaCoverageAnnotator)getCoverageAnnotator(project), project, suiteBundle, stateBean); } public boolean isSourceMapNeeded(RunConfigurationBase configuration) { for (final JavaCoverageEngineExtension extension : Extensions.getExtensions(JavaCoverageEngineExtension.EP_NAME)) { if (extension.isSourceMapNeeded(configuration)) { return true; } } return false; } }
/* * CPAchecker is a tool for configurable software verification. * This file is part of CPAchecker. * * Copyright (C) 2007-2013 Dirk Beyer * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * * CPAchecker web page: * http://cpachecker.sosy-lab.org */ package org.sosy_lab.cpachecker.core.reachedset; import static com.google.common.base.Preconditions.checkNotNull; import java.util.AbstractCollection; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Set; import org.sosy_lab.common.Pair; import org.sosy_lab.cpachecker.cfa.model.CFANode; import org.sosy_lab.cpachecker.core.interfaces.AbstractState; import org.sosy_lab.cpachecker.core.interfaces.Precision; import org.sosy_lab.cpachecker.core.waitlist.Waitlist; import org.sosy_lab.cpachecker.core.waitlist.Waitlist.WaitlistFactory; import com.google.common.base.Preconditions; import com.google.common.collect.Iterators; /** * Basic implementation of ReachedSet. * It does not group states by location or any other key. */ class DefaultReachedSet implements ReachedSet { private final LinkedHashMap<AbstractState, Precision> reached; private final Set<AbstractState> unmodifiableReached; private AbstractState lastState = null; private AbstractState firstState = null; private final Waitlist waitlist; DefaultReachedSet(WaitlistFactory waitlistFactory) { reached = new LinkedHashMap<>(); unmodifiableReached = Collections.unmodifiableSet(reached.keySet()); waitlist = waitlistFactory.createWaitlistInstance(); } @Override public void add(AbstractState state, Precision precision) throws IllegalArgumentException { Preconditions.checkNotNull(state); Preconditions.checkNotNull(precision); if (reached.size() == 0) { firstState = state; } Precision previousPrecision = reached.put(state, precision); if (previousPrecision == null) { // State wasn't already in the reached set. waitlist.add(state); lastState = state; } else { // State was already in the reached set. // This happens only if the MergeOperator produces a state that is already there. // The state may or may not be currently in the waitlist. // In the first case, we are not allowed to add it to the waitlist, // otherwise it would be in there twice (this method is responsible for // enforcing the set semantics of the waitlist). // In the second case, we do not need // to add it to the waitlist, because it was already handled // (we assume that the CPA would always produce the same successors if we // give it the same state twice). // So do nothing here. // But check if the new and the old precisions are equal. if (!precision.equals(previousPrecision)) { // Restore previous state of reached set // (a method shouldn't change state if it throws an IAE). reached.put(state, previousPrecision); throw new IllegalArgumentException("State added to reached set which is already contained, but with a different precision"); } } } @Override public void addAll(Iterable<Pair<AbstractState, Precision>> toAdd) { for (Pair<AbstractState, Precision> pair : toAdd) { add(pair.getFirst(), pair.getSecond()); } } @Override public void reAddToWaitlist(AbstractState s) { Preconditions.checkNotNull(s); Preconditions.checkArgument(reached.containsKey(s), "State has to be in the reached set"); if (!waitlist.contains(s)) { waitlist.add(s); } } @Override public void updatePrecision(AbstractState s, Precision newPrecision) { Preconditions.checkNotNull(s); Preconditions.checkNotNull(newPrecision); Precision oldPrecision = reached.put(s, newPrecision); if (oldPrecision == null) { // State was not contained in the reached set. // Restore previous state and throw exception. reached.remove(s); throw new IllegalArgumentException("State needs to be in the reached set in order to change the precision."); } } @Override public void remove(AbstractState state) { Preconditions.checkNotNull(state); int hc = state.hashCode(); if ((firstState == null) || hc == firstState.hashCode() && state.equals(firstState)) { firstState = null; } if ((lastState == null) || (hc == lastState.hashCode() && state.equals(lastState))) { lastState = null; } waitlist.remove(state); reached.remove(state); } @Override public void removeAll(Iterable<? extends AbstractState> toRemove) { for (AbstractState state : toRemove) { remove(state); } assert firstState != null || reached.isEmpty() : "firstState may only be removed if the whole reached set is cleared"; } @Override public void removeOnlyFromWaitlist(AbstractState state) { checkNotNull(state); waitlist.remove(state); } @Override public void clear() { firstState = null; lastState = null; waitlist.clear(); reached.clear(); } @Override public Set<AbstractState> asCollection() { return unmodifiableReached; } @Override public Iterator<AbstractState> iterator() { return unmodifiableReached.iterator(); } @Override public Collection<Precision> getPrecisions() { return Collections.unmodifiableCollection(reached.values()); } @Override public Collection<AbstractState> getReached(AbstractState state) { return asCollection(); } @Override public Collection<AbstractState> getReached(CFANode location) { return asCollection(); } @Override public AbstractState getFirstState() { Preconditions.checkState(firstState != null); return firstState; } @Override public AbstractState getLastState() { return lastState; } @Override public boolean hasWaitingState() { return !waitlist.isEmpty(); } @Override public Collection<AbstractState> getWaitlist() { return new AbstractCollection<AbstractState>() { @Override public Iterator<AbstractState> iterator() { return Iterators.unmodifiableIterator(waitlist.iterator()); } @Override public boolean contains(Object obj) { if (!(obj instanceof AbstractState)) { return false; } return waitlist.contains((AbstractState)obj); } @Override public boolean isEmpty() { return waitlist.isEmpty(); } @Override public int size() { return waitlist.size(); } @Override public String toString() { return waitlist.toString(); } }; } @Override public AbstractState popFromWaitlist() { return waitlist.pop(); } @Override public int getWaitlistSize() { return waitlist.size(); } @Override public Precision getPrecision(AbstractState state) { Preconditions.checkNotNull(state); Precision prec = reached.get(state); Preconditions.checkArgument(prec != null, "State not in reached set."); return prec; } @Override public boolean contains(AbstractState state) { Preconditions.checkNotNull(state); return reached.containsKey(state); } @Override public int size() { return reached.size(); } @Override public boolean isEmpty() { return (size() == 0); } @Override public String toString() { return reached.keySet().toString(); } }
package com.codahale.metrics.health; import com.codahale.metrics.Clock; import org.junit.Test; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class HealthCheckTest { private static final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSXXX"); private static class ExampleHealthCheck extends HealthCheck { private final HealthCheck underlying; private ExampleHealthCheck(HealthCheck underlying) { this.underlying = underlying; } @Override protected Result check() { return underlying.execute(); } } private final HealthCheck underlying = mock(HealthCheck.class); private final HealthCheck healthCheck = new ExampleHealthCheck(underlying); @Test public void canHaveHealthyResults() { final HealthCheck.Result result = HealthCheck.Result.healthy(); assertThat(result.isHealthy()) .isTrue(); assertThat(result.getMessage()) .isNull(); assertThat(result.getError()) .isNull(); } @Test public void canHaveHealthyResultsWithMessages() { final HealthCheck.Result result = HealthCheck.Result.healthy("woo"); assertThat(result.isHealthy()) .isTrue(); assertThat(result.getMessage()) .isEqualTo("woo"); assertThat(result.getError()) .isNull(); } @Test public void canHaveHealthyResultsWithFormattedMessages() { final HealthCheck.Result result = HealthCheck.Result.healthy("foo %s", "bar"); assertThat(result.isHealthy()) .isTrue(); assertThat(result.getMessage()) .isEqualTo("foo bar"); assertThat(result.getError()) .isNull(); } @Test public void canHaveUnhealthyResults() { final HealthCheck.Result result = HealthCheck.Result.unhealthy("bad"); assertThat(result.isHealthy()) .isFalse(); assertThat(result.getMessage()) .isEqualTo("bad"); assertThat(result.getError()) .isNull(); } @Test public void canHaveUnhealthyResultsWithFormattedMessages() { final HealthCheck.Result result = HealthCheck.Result.unhealthy("foo %s %d", "bar", 123); assertThat(result.isHealthy()) .isFalse(); assertThat(result.getMessage()) .isEqualTo("foo bar 123"); assertThat(result.getError()) .isNull(); } @Test public void canHaveUnhealthyResultsWithExceptions() { final RuntimeException e = mock(RuntimeException.class); when(e.getMessage()).thenReturn("oh noes"); final HealthCheck.Result result = HealthCheck.Result.unhealthy(e); assertThat(result.isHealthy()) .isFalse(); assertThat(result.getMessage()) .isEqualTo("oh noes"); assertThat(result.getError()) .isEqualTo(e); } @Test public void canHaveHealthyBuilderWithFormattedMessage() { final HealthCheck.Result result = HealthCheck.Result.builder() .healthy() .withMessage("There are %d %s in the %s", 42, "foos", "bar") .build(); assertThat(result.isHealthy()) .isTrue(); assertThat(result.getMessage()) .isEqualTo("There are 42 foos in the bar"); } @Test public void canHaveHealthyBuilderWithDetail() { final HealthCheck.Result result = HealthCheck.Result.builder() .healthy() .withDetail("detail", "value") .build(); assertThat(result.isHealthy()) .isTrue(); assertThat(result.getMessage()) .isNull(); assertThat(result.getError()) .isNull(); assertThat(result.getDetails()) .containsEntry("detail", "value"); } @Test public void canHaveUnHealthyBuilderWithDetail() { final HealthCheck.Result result = HealthCheck.Result.builder() .unhealthy() .withDetail("detail", "value") .build(); assertThat(result.isHealthy()) .isFalse(); assertThat(result.getMessage()) .isNull(); assertThat(result.getError()) .isNull(); assertThat(result.getDetails()) .containsEntry("detail", "value"); } @Test public void canHaveUnHealthyBuilderWithDetailAndError() { final RuntimeException e = mock(RuntimeException.class); when(e.getMessage()).thenReturn("oh noes"); final HealthCheck.Result result = HealthCheck.Result .builder() .unhealthy(e) .withDetail("detail", "value") .build(); assertThat(result.isHealthy()) .isFalse(); assertThat(result.getMessage()) .isEqualTo("oh noes"); assertThat(result.getError()) .isEqualTo(e); assertThat(result.getDetails()) .containsEntry("detail", "value"); } @Test public void returnsResultsWhenExecuted() { final HealthCheck.Result result = mock(HealthCheck.Result.class); when(underlying.execute()).thenReturn(result); assertThat(healthCheck.execute()) .isEqualTo(result); verify(result).setDuration(anyLong()); } @Test public void wrapsExceptionsWhenExecuted() { final RuntimeException e = mock(RuntimeException.class); when(e.getMessage()).thenReturn("oh noes"); when(underlying.execute()).thenThrow(e); HealthCheck.Result actual = healthCheck.execute(); assertThat(actual.isHealthy()) .isFalse(); assertThat(actual.getMessage()) .isEqualTo("oh noes"); assertThat(actual.getError()) .isEqualTo(e); assertThat(actual.getDetails()) .isNull(); assertThat(actual.getDuration()) .isGreaterThanOrEqualTo(0); } @Test public void canHaveUserSuppliedClockForTimestamp() { ZonedDateTime dateTime = ZonedDateTime.now().minusMinutes(10); Clock clock = clockWithFixedTime(dateTime); HealthCheck.Result result = HealthCheck.Result.builder() .healthy() .usingClock(clock) .build(); assertThat(result.isHealthy()).isTrue(); assertThat(result.getTime()).isEqualTo(clock.getTime()); assertThat(result.getTimestamp()) .isEqualTo(DATE_TIME_FORMATTER.format(dateTime)); } @Test public void toStringWorksEvenForNullAttributes() { ZonedDateTime dateTime = ZonedDateTime.now().minusMinutes(25); Clock clock = clockWithFixedTime(dateTime); final HealthCheck.Result resultWithNullDetailValue = HealthCheck.Result.builder() .unhealthy() .withDetail("aNullDetail", null) .usingClock(clock) .build(); assertThat(resultWithNullDetailValue.toString()) .contains( "Result{isHealthy=false, duration=0, timestamp=" + DATE_TIME_FORMATTER.format(dateTime), ", aNullDetail=null}"); } private static Clock clockWithFixedTime(ZonedDateTime dateTime) { return new Clock() { @Override public long getTick() { return 0; } @Override public long getTime() { return dateTime.toInstant().toEpochMilli(); } }; } }
package org.neo4j.graphalgo.core.leightweight; import org.apache.lucene.util.ArrayUtil; import java.util.Arrays; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.function.IntSupplier; /** * Abstraction of an array of integer values that can contain more than 2B elements. * * @author [email protected] */ public final class IntArray implements Iterable<IntArray.Cursor> { private long size; private int[][] pages; /** * Page size in bytes: 16KB */ private static final int PAGE_SIZE_IN_BYTES = 1 << 14; private static final int PAGE_SIZE = PAGE_SIZE_IN_BYTES / Integer.BYTES; private static final int PAGE_SHIFT = Integer.numberOfTrailingZeros(PAGE_SIZE); private static final int PAGE_MASK = PAGE_SIZE - 1; /** * Allocate a new {@link IntArray}. * @param size the initial length of the array */ public static IntArray newArray(long size) { return new IntArray(size); } private IntArray(long size) { this.size = size; pages = new int[numPages(size)][]; int maxPageSize = (int) Math.min(size, PAGE_SIZE); for (int i = 0; i < pages.length; ++i) { pages[i] = newIntPage(maxPageSize); } } /** * Return the length of this array. */ public final long size() { return size; } /** * Get an element given its index. */ public int get(long index) { final int pageIndex = pageIndex(index); final int indexInPage = indexInPage(index); return pages[pageIndex][indexInPage]; } /** * Set a value at the given index and return the previous value. */ public int set(long index, int value) { final int pageIndex = pageIndex(index); final int indexInPage = indexInPage(index); final int[] page = pages[pageIndex]; final int ret = page[indexInPage]; page[indexInPage] = value; return ret; } /** * Fill slots between {@code fromIndex} (inclusive) to {@code toIndex} (exclusive) with the value provided by {@code value}. */ public void fill( final long fromIndex, final long toIndex, final IntSupplier value) { assert fromIndex <= toIndex : "can only fill positive slice"; final int fromPage = pageIndex(fromIndex); final int toPage = pageIndex(toIndex - 1); if (fromPage == toPage) { fill( pages[fromPage], indexInPage(fromIndex), indexInPage(toIndex - 1) + 1, value); } else { fill(pages[fromPage], indexInPage(fromIndex), PAGE_SIZE, value); for (int i = fromPage + 1; i < toPage; ++i) { fill(pages[i], value); } fill(pages[toPage], 0, indexInPage(toIndex - 1) + 1, value); } } /** * Grows the IntArray to the new size. The existing content will be preserved. * If the current size is large enough, this is no-op and no downsizing is happening. */ public void grow(final long newSize) { if (size < newSize) { final int currentNumPages = pages.length; final int numPages = numPages(newSize); if (numPages > currentNumPages) { if (currentNumPages == 1) { // fill first page to full size pages[0] = Arrays.copyOf( pages[currentNumPages - 1], PAGE_SIZE); } pages = Arrays.copyOf(pages, numPages); for (int i = currentNumPages; i < numPages ; i++) { // we don't strip the last page here as we're already somewhat big pages[i] = newIntPage(); } this.size = (long) numPages << (long) PAGE_SHIFT; } else if (currentNumPages == 1) { int firstPageSize = Math.min( PAGE_SIZE, ArrayUtil.oversize((int) newSize, Integer.BYTES)); pages[0] = Arrays.copyOf(pages[0], firstPageSize); this.size = (long) firstPageSize; } } } public BulkAdder bulkAdder() { return new BulkAdder(); } public BulkAdder bulkAdder(long offset, long length) { return bulkAdder(offset, length, bulkAdder()); } public BulkAdder bulkAdder(long offset, long length, BulkAdder reuse) { return reuse.init(offset, length); } public Cursor newCursor() { return new Cursor(); } public Cursor cursor(long offset, long length) { return cursor(offset, length, newCursor()); } public Cursor cursor(long offset, long length, Cursor reuse) { return reuse.init(offset, length); } @Override public Iterator<Cursor> iterator() { return new Iter(cursor(0, size)); } public Iterator<Cursor> iterator(Cursor reuse) { return new Iter(reuse.init(0, size)); } public Iterator<Cursor> iterator(Iterator<Cursor> reuse) { if (reuse instanceof Iter) { Iter iter = (Iter) reuse; iter.cursor.init(0, size); iter.state = Iter.UNKNOWN; return iter; } return iterator(); } private static int numPages(long capacity) { final long numPages = (capacity + PAGE_MASK) >>> PAGE_SHIFT; assert numPages <= Integer.MAX_VALUE : "pageSize=" + (PAGE_MASK + 1) + " is too small for such as capacity: " + capacity; return (int) numPages; } private static int pageIndex(long index) { return (int) (index >>> PAGE_SHIFT); } private static int indexInPage(long index) { return (int) (index & PAGE_MASK); } private static int[] newIntPage() { return new int[PAGE_SIZE]; } private static int[] newIntPage(int size) { return new int[size]; } private static void fill(int[] array, IntSupplier value) { fill(array, 0, array.length, value); } private static void fill(int[] array, int from, int to, IntSupplier value) { for (int i = from; i < to; i++) { array[i] = value.getAsInt(); } } public interface IntAction<E extends Exception> { boolean accept(int value) throws E; } private abstract class BaseCursor { public int[] array; public int offset; public int limit; private long from; private long to; private long size; private int fromPage; private int toPage; private int currentPage; BaseCursor init(long fromIndex, long length) { array = null; from = fromIndex; to = fromIndex + length; size = length; fromPage = pageIndex(fromIndex); toPage = pageIndex(to - 1); currentPage = fromPage - 1; return this; } public boolean next() { currentPage++; if (currentPage == fromPage) { array = pages[currentPage]; offset = indexInPage(from); int length = (int) Math.min(PAGE_SIZE - offset, size); limit = offset + length; return true; } if (currentPage < toPage) { array = pages[currentPage]; offset = 0; limit = offset + PAGE_SIZE; return true; } if (currentPage == toPage) { array = pages[currentPage]; offset = 0; int length = indexInPage(to - 1) + 1; limit = offset + length; return true; } array = null; return false; } } public final class BulkAdder extends BaseCursor { BulkAdder init(long fromIndex, long length) { grow(fromIndex + length); super.init(fromIndex, length); next(); return this; } public boolean add(int v) { int offset = this.offset++; if (offset < limit) { array[offset] = v; return true; } return next() && add(v); } } public final class Cursor extends BaseCursor { Cursor init(long fromIndex, long length) { super.init(fromIndex, length); return this; } public <E extends Exception> void forEach(IntAction<E> action) throws E { final int[] array = this.array; final int limit = this.limit; int offset = this.offset; //noinspection StatementWithEmptyBody while (offset < limit && action.accept(array[offset++])); } } private final class Iter implements Iterator<Cursor> { private final static int UNKNOWN = 0; private final static int HAS_NEXT = 1; private final static int DONE = 2; private int state = UNKNOWN; private Cursor cursor; private Iter(final Cursor cursor) { this.cursor = cursor; } @Override public boolean hasNext() { if (state == UNKNOWN) { state = cursor.next() ? HAS_NEXT : DONE; } return state == HAS_NEXT; } @Override public Cursor next() { if (!hasNext()) { throw new NoSuchElementException("exhausted"); } state = UNKNOWN; return cursor; } } }
package org.batfish.dataplane.protocols; import static org.batfish.datamodel.BgpRoute.DEFAULT_LOCAL_PREFERENCE; import static org.batfish.datamodel.BgpRoute.DEFAULT_LOCAL_WEIGHT; import static org.batfish.datamodel.OriginMechanism.GENERATED; import static org.batfish.datamodel.OriginMechanism.LEARNED; import static org.batfish.datamodel.Route.UNSET_ROUTE_NEXT_HOP_IP; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import java.util.List; import java.util.Set; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.annotation.ParametersAreNonnullByDefault; import org.batfish.datamodel.AbstractRoute; import org.batfish.datamodel.AbstractRouteDecorator; import org.batfish.datamodel.AsPath; import org.batfish.datamodel.AsSet; import org.batfish.datamodel.BgpPeerConfig; import org.batfish.datamodel.BgpProcess; import org.batfish.datamodel.BgpRoute; import org.batfish.datamodel.BgpSessionProperties; import org.batfish.datamodel.Bgpv4Route; import org.batfish.datamodel.Bgpv4Route.Builder; import org.batfish.datamodel.EvpnRoute; import org.batfish.datamodel.EvpnType5Route; import org.batfish.datamodel.GeneratedRoute; import org.batfish.datamodel.Ip; import org.batfish.datamodel.OriginMechanism; import org.batfish.datamodel.OriginType; import org.batfish.datamodel.RoutingProtocol; import org.batfish.datamodel.bgp.AddressFamily; import org.batfish.datamodel.bgp.AddressFamily.Type; import org.batfish.datamodel.bgp.AllowRemoteAsOutMode; import org.batfish.datamodel.bgp.BgpAggregate; import org.batfish.datamodel.bgp.BgpTopologyUtils.ConfedSessionType; import org.batfish.datamodel.bgp.EvpnAddressFamily; import org.batfish.datamodel.bgp.community.StandardCommunity; import org.batfish.datamodel.route.nh.NextHop; import org.batfish.datamodel.route.nh.NextHopDiscard; import org.batfish.datamodel.route.nh.NextHopIp; import org.batfish.datamodel.route.nh.NextHopVtep; import org.batfish.datamodel.routing_policy.Environment.Direction; import org.batfish.datamodel.routing_policy.RoutingPolicy; import org.batfish.datamodel.routing_policy.communities.CommunitySet; @ParametersAreNonnullByDefault public final class BgpProtocolHelper { /** * Perform BGP export transformations on a given route when sending an advertisement from {@code * localNeighbor} to {@code remoteNeighbor} before export policy is applied. * * @param localNeighbor {@link BgpPeerConfig} exporting {@code route} * @param remoteNeighbor {@link BgpPeerConfig} to which to export {@code route} * @param localSessionProperties {@link BgpSessionProperties} representing the <em>outgoing</em> * edge: i.e. the edge from {@code localNeighbor} to {@code remoteNeighbor} * @param afType {@link AddressFamily.Type} the address family for which to look up the settings */ @Nullable public static <R extends BgpRoute<B, R>, B extends BgpRoute.Builder<B, R>> B transformBgpRoutePreExport( BgpPeerConfig localNeighbor, BgpPeerConfig remoteNeighbor, BgpSessionProperties localSessionProperties, BgpProcess localBgpProcess, BgpProcess remoteBgpProcess, BgpRoute<B, R> route, Type afType) { // Make a new builder B builder = route.toBuilder(); // this will be set later during export policy transformation or after it is exported builder.clearNextHop(); RoutingProtocol routeProtocol = route.getProtocol(); RoutingProtocol outgoingProtocol = localSessionProperties.isEbgp() ? RoutingProtocol.BGP : RoutingProtocol.IBGP; builder.setProtocol(outgoingProtocol); builder.setSrcProtocol(routeProtocol); // Clear a bunch of non-transitive attributes builder.setWeight(0); if (!(route instanceof EvpnRoute<?, ?>)) { // These attributes are constants for EVPN routes and cannot be set builder.setNonRouting(false); builder.setNonForwarding(false); builder.setAdmin(remoteBgpProcess.getAdminCost(outgoingProtocol)); } builder.setTag(null); // Set originatorIP if (localSessionProperties.isEbgp() || !routeProtocol.equals(RoutingProtocol.IBGP)) { // eBGP session and not iBGP route: override the originator builder.setOriginatorIp(localBgpProcess.getRouterId()); } // note whether new route is received from route reflector client AddressFamily toNeighborAf = remoteNeighbor.getAddressFamily(afType); assert toNeighborAf != null; // invariant of proper queue setup and route exchange for this AF type builder.setReceivedFromRouteReflectorClient( !localSessionProperties.isEbgp() && toNeighborAf.getRouteReflectorClient()); AddressFamily af = localNeighbor.getAddressFamily(afType); assert af != null; // Do not export route if it has NO_ADVERTISE community. if (route.getCommunities().getCommunities().contains(StandardCommunity.NO_ADVERTISE)) { return null; } // For eBGP, do not export if AS path contains the peer's AS in a disallowed position if (localSessionProperties.isEbgp() && !allowAsPathOut( route.getAsPath(), localSessionProperties.getRemoteAs(), af.getAddressFamilyCapabilities().getAllowRemoteAsOut())) { return null; } // Also do not export if route has NO_EXPORT community and this is a true ebgp session if (route.getCommunities().getCommunities().contains(StandardCommunity.NO_EXPORT) && localSessionProperties.isEbgp() && localSessionProperties.getConfedSessionType() != ConfedSessionType.WITHIN_CONFED) { return null; } /* * iBGP speaker should not send out routes to iBGP neighbor whose router-id is * same as originator id of advertisement */ if (!localSessionProperties.isEbgp() && remoteBgpProcess.getRouterId().equals(route.getOriginatorIp())) { return null; } builder.setClusterList(ImmutableSet.of()); boolean routeOriginatedLocally = Ip.ZERO.equals(route.getReceivedFromIp()); if (routeProtocol.equals(RoutingProtocol.IBGP) && !localSessionProperties.isEbgp()) { /* * The remote route is iBGP. The session is iBGP. We consider whether to reflect, and * modify the outgoing route as appropriate. * * For route reflection: reflect everything received from * clients to clients and non-clients. reflect everything * received from non-clients to clients. Do not reflect to * originator */ boolean remoteRouteReceivedFromRouteReflectorClient = route.getReceivedFromRouteReflectorClient(); boolean sendingToRouteReflectorClient = af.getRouteReflectorClient(); if (!remoteRouteReceivedFromRouteReflectorClient && !sendingToRouteReflectorClient && !routeOriginatedLocally) { /* * Neither reflecting nor originating this iBGP route, so don't send */ return null; } builder.addClusterList(route.getClusterList()); if (!routeOriginatedLocally) { // we are reflecting, so we need to get the clusterid associated with the // remoteRoute Long newClusterId = localNeighbor.getClusterId(); if (newClusterId != null) { builder.addToClusterList(newClusterId); } } Set<Long> localClusterIds = remoteBgpProcess.getClusterIds(); Set<Long> outgoingClusterList = builder.getClusterList(); if (localClusterIds.stream().anyMatch(outgoingClusterList::contains)) { /* * receiver will reject new route if it contains any of its local cluster ids */ return null; } } // Outgoing metric (MED) is preserved only if advertising to IBGP peer, within a confederation, // or for locally originated routes if (!localSessionProperties.advertiseUnchangedMed() && !routeOriginatedLocally) { builder.setMetric(0); } // Local preference: only transitive for iBGP or within a confederation builder.setLocalPreference( localSessionProperties.advertiseUnchangedLocalPref() ? route.getLocalPreference() : DEFAULT_LOCAL_PREFERENCE); return builder; } /** * Return {@code true} if an outgoing eBGP advertisement with given {@code asPath} to {@code * peerAs} should be allowed under the given {@code mode}. */ @VisibleForTesting static boolean allowAsPathOut(AsPath asPath, long peerAs, AllowRemoteAsOutMode mode) { List<AsSet> asSets = asPath.getAsSets(); if (asPath.getAsSets().isEmpty()) { return true; } switch (mode) { case ALWAYS: return true; case NEVER: return asSets.stream().noneMatch(asSet -> asSet.containsAs(peerAs)); case EXCEPT_FIRST: return !asSets.get(0).containsAs(peerAs); default: throw new IllegalArgumentException( String.format("Unsupported AllowsRemoteAsOutMode: %s", mode)); } } /** * Perform BGP import transformations on a given route after receiving an advertisement. * * <p>Return {@code null} if the route should not be imported. */ @Nullable public static <R extends BgpRoute<B, R>, B extends BgpRoute.Builder<B, R>> B transformBgpRouteOnImport( BgpRoute<B, R> route, long localAs, boolean allowLocalAsIn, boolean isEbgp, BgpProcess toProcess, Ip peerIp, @Nullable String peerInterface) { // skip routes containing peer's AS unless explicitly allowed if (!allowLocalAsIn && route.getAsPath().containsAs(localAs)) { return null; } RoutingProtocol targetProtocol = isEbgp ? RoutingProtocol.BGP : RoutingProtocol.IBGP; B importBuilder = route.toBuilder(); if (!(route instanceof EvpnRoute<?, ?>)) { // Only set admin for non-EVPN routes; EVPN routes have a constant admin distance. // Only set next hop for non-EVPN routes; EVPN routes come in with VTEP next hops. importBuilder .setAdmin(toProcess.getAdminCost(targetProtocol)) .setNextHop(NextHop.legacyConverter(peerInterface, route.getNextHopIp())); } return importBuilder .setProtocol(targetProtocol) .setReceivedFromIp(peerIp) .setSrcProtocol(targetProtocol) .setOriginMechanism(LEARNED); } /** * Convert an aggregate/generated route to a BGP route. * * @param generatedRoute a {@link GeneratedRoute} to convert to a {@link Bgpv4Route}. * @param attributePolicy a {@link RoutingPolicy} to use to set BGP route attributes after * conversion * @param routerId Router ID to set as the originatorIp for the resulting BGP route. * @param nonRouting Whether to mark the Bgpv4Route as non-routing */ @Nonnull public static Bgpv4Route convertGeneratedRouteToBgp( GeneratedRoute generatedRoute, @Nullable RoutingPolicy attributePolicy, Ip routerId, Ip nextHopIp, boolean nonRouting) { NextHop nextHop = nextHopIp.equals(Ip.AUTO) ? NextHopDiscard.instance() : NextHopIp.of(nextHopIp); Builder builder = convertGeneratedRouteToBgp(generatedRoute, routerId, nextHop, nonRouting); if (attributePolicy == null) { return builder.build(); } boolean accepted = attributePolicy.process(builder.build(), builder.clearNextHop(), Direction.OUT); assert accepted; return builder.setNextHop(nextHop).build(); } /** * Convert an aggregate/generated route to a BGP route builder. * * @param generatedRoute a {@link GeneratedRoute} to convert to a {@link Bgpv4Route}. * @param routerId Router ID to set as the originatorIp for the resulting BGP route. * @param nonRouting Whether to mark the Bgpv4Route as non-routing */ @Nonnull @VisibleForTesting static Builder convertGeneratedRouteToBgp( GeneratedRoute generatedRoute, Ip routerId, NextHop nextHop, boolean nonRouting) { return Bgpv4Route.builder() .setAdmin(generatedRoute.getAdministrativeCost()) .setAsPath(generatedRoute.getAsPath()) .setCommunities(generatedRoute.getCommunities()) .setMetric(generatedRoute.getMetric()) .setSrcProtocol(RoutingProtocol.AGGREGATE) .setProtocol(RoutingProtocol.AGGREGATE) .setNextHop(nextHop) .setNetwork(generatedRoute.getNetwork()) .setLocalPreference(DEFAULT_LOCAL_PREFERENCE) /* * Note: Origin type and originator IP should get overwritten by export policy, * but are needed initially */ .setOriginatorIp(routerId) .setOriginMechanism(GENERATED) .setOriginType(generatedRoute.getOriginType()) .setReceivedFromIp(/* Originated locally. */ Ip.ZERO) .setNonRouting(nonRouting); } /** Create a BGP route from an activated aggregate. */ public static @Nonnull Bgpv4Route toBgpv4Route( BgpAggregate aggregate, @Nullable RoutingPolicy attributePolicy, int admin, Ip routerId) { Bgpv4Route.Builder builder = Bgpv4Route.builder() .setAdmin(admin) // TODO: support merging as-path from contributors via generationPolicy .setAsPath(AsPath.empty()) // TODO: support merging communities from contributors via generationPolicy .setCommunities(CommunitySet.empty()) .setMetric(0L) .setSrcProtocol(RoutingProtocol.AGGREGATE) .setProtocol(RoutingProtocol.AGGREGATE) .setNextHop(NextHopDiscard.instance()) .setNetwork(aggregate.getNetwork()) .setLocalPreference(DEFAULT_LOCAL_PREFERENCE) .setOriginatorIp(routerId) .setOriginMechanism(GENERATED) // TODO: confirm default is IGP for all devices initializing aggregates from BGP RIB .setOriginType(OriginType.IGP) .setReceivedFromIp(/* Originated locally. */ Ip.ZERO) .setWeight(DEFAULT_LOCAL_WEIGHT); if (attributePolicy == null) { return builder.build(); } boolean accepted = attributePolicy.process(builder.build(), builder, Direction.OUT); assert accepted; return builder.build(); } /** * Convert a route that is neither a {@link BgpRoute} nor a {@link GeneratedRoute} to a {@link * Bgpv4Route.Builder}. * * <p>Intended for converting main RIB routes into their BGP equivalents before passing {@code * routeDecorator} to the export policy * * <p>The builder returned will have default local preference, redistribute origin mechanism, * incomplete origin type, and most other fields unset. */ @Nonnull public static Bgpv4Route.Builder convertNonBgpRouteToBgpRoute( AbstractRouteDecorator routeDecorator, Ip routerId, Ip nextHopIp, int adminDistance, RoutingProtocol protocol, OriginMechanism originMechanism) { assert protocol == RoutingProtocol.BGP || protocol == RoutingProtocol.IBGP; assert !(routeDecorator.getAbstractRoute() instanceof BgpRoute); AbstractRoute route = routeDecorator.getAbstractRoute(); return Bgpv4Route.builder() .setNetwork(route.getNetwork()) .setAdmin(adminDistance) .setOriginatorIp(routerId) .setProtocol(protocol) .setSrcProtocol(route.getProtocol()) .setOriginMechanism(originMechanism) .setOriginType(OriginType.INCOMPLETE) // TODO: support customization of route preference .setLocalPreference(DEFAULT_LOCAL_PREFERENCE) .setReceivedFromIp(/* Originated locally. */ Ip.ZERO) .setNextHopIp(nextHopIp) .setMetric(route.getMetric()) .setTag(routeDecorator.getAbstractRoute().getTag()); // Let everything else default to unset/empty/etc. } /** * Perform BGP export transformations on a given route <em>after</em> export policy has been * applied to the route, route was accepted, but before route is sent "onto the wire". * * <p>Sets next hop - if not already set by export policy - for non-EVPN-type-5 routes only. EVPN * type 5 routes' next hops should be set by {@link #setEvpnType5NhPostExport}. * * @param routeBuilder Builder for the output (exported) route * @param ourSessionProperties properties for the sender's session * @param af sender's address family configuration * @param originalRouteNhip Next hop IP of the original route */ public static <R extends BgpRoute<B, R>, B extends BgpRoute.Builder<B, R>> void transformBgpRoutePostExport( B routeBuilder, BgpSessionProperties ourSessionProperties, AddressFamily af, Ip originalRouteNhip) { transformBgpRoutePostExport( routeBuilder, ourSessionProperties.isEbgp(), af.getAddressFamilyCapabilities().getSendCommunity(), af.getAddressFamilyCapabilities().getSendExtendedCommunity(), ourSessionProperties.getConfedSessionType(), ourSessionProperties.getLocalAs(), ourSessionProperties.getLocalIp(), originalRouteNhip); } /** * Perform BGP export transformations on a given route <em>after</em> export policy has been * applied to the route, route was accepted, but before route is sent "onto the wire". * * <p>Sets next hop - if not already set by export policy - for non-EVPN-type-5 routes only. EVPN * type 5 routes' next hops should be set by {@link #setEvpnType5NhPostExport}. * * @param routeBuilder Builder for the output (exported) route * @param isEbgp true for ebgp sessions * @param sendStandardCommunities whether to send standard communities to the neighbor * @param sendExtendedCommunities whether to send extended communities to the neighbor * @param confedSessionType type of confederation session, if any * @param localAs local AS * @param localIp IP of the neighbor which is exporting the route * @param confedSessionType sender's address family configuration * @param originalRouteNhip Next hop IP of the original route */ @VisibleForTesting static <R extends BgpRoute<B, R>, B extends BgpRoute.Builder<B, R>> void transformBgpRoutePostExport( B routeBuilder, boolean isEbgp, boolean sendStandardCommunities, boolean sendExtendedCommunities, ConfedSessionType confedSessionType, long localAs, Ip localIp, Ip originalRouteNhip) { // if eBGP, prepend as-path sender's as-path number if (isEbgp) { AsSet asSetToPrepend = confedSessionType == ConfedSessionType.WITHIN_CONFED ? AsSet.confed(localAs) : AsSet.of(localAs); // Remove any confederations if propagating route outside of the confederation border AsPath routeAsPath = routeBuilder.getAsPath(); if (confedSessionType.equals(ConfedSessionType.ACROSS_CONFED_BORDER)) { routeAsPath = routeAsPath.removeConfederations(); } routeBuilder.setAsPath( AsPath.of( ImmutableList.<AsSet>builder() .add(asSetToPrepend) .addAll(routeAsPath.getAsSets()) .build())); } // Tags are non-transitive routeBuilder.setTag(null); // Only send communities that are supported if (!sendStandardCommunities) { // No standard: Extended or nothing. routeBuilder.setCommunities( sendExtendedCommunities ? routeBuilder.getCommunities().getExtendedCommunities() : ImmutableSet.of()); } else if (!sendExtendedCommunities) { // Standard, not extended. routeBuilder.setCommunities(routeBuilder.getCommunities().getStandardCommunities()); } // else preserve all communities as-is. // Skip setting our own next hop if it has already been set by the routing policy // TODO: When sending out a BGP route with a NextHopVtep, should that next hop be preserved? // If so, this should step be skipped for such routes. // TODO: This next hop is incorrect for EVPN type 3 routes (not critical since type 3 routes' // next hops have no function). if (!(routeBuilder instanceof EvpnType5Route.Builder) && routeBuilder.getNextHopIp().equals(UNSET_ROUTE_NEXT_HOP_IP)) { if (isEbgp) { routeBuilder.setNextHopIp(localIp); } else { // iBGP session /* Note: implementation of next-hop-self in the general case is delegated to routing policy. If original route has next-hop ip, preserve it. If not, set our own. */ routeBuilder.setNextHopIp( originalRouteNhip.equals(UNSET_ROUTE_NEXT_HOP_IP) ? localIp : originalRouteNhip); } } /* Routes can be aggregate only when generated locally. When transiting across nodes they must be BGP or IBGP. This is a bit of a hack since we currently overload AGGREGATE to mean aggregate protocol (i.e., just like on Juniper) AND to mean "locally generated". :( */ if (routeBuilder.getProtocol() == RoutingProtocol.AGGREGATE) { routeBuilder.setProtocol(isEbgp ? RoutingProtocol.BGP : RoutingProtocol.IBGP); } } /** * Sets next hop on EVPN type 5 route builder in preparation for export. Uses original route's * next hop unless it is {@link NextHopDiscard}, which indicates the original route was * originated; in this case the route builder is given a {@link NextHopVtep}. * * @return {@code true} if the next hop was successfully set. May be {@code false} if the export * candidate was originated on this node and our EVPN address family has no NVE IP. */ public static boolean setEvpnType5NhPostExport( EvpnType5Route.Builder routeBuilder, EvpnAddressFamily af, NextHop originalRouteNh, int originalRouteVni) { if (!originalRouteNh.equals(NextHopDiscard.instance())) { // Original route has a non-discard next hop. Use that. routeBuilder.setNextHop(originalRouteNh); return true; } // Original route has NextHopDiscard, so create a NextHopVtep for the exported route. if (af.getNveIp() == null) { // Can't create a NextHopVtep. return false; } routeBuilder.setNextHop(NextHopVtep.of(originalRouteVni, af.getNveIp())); return true; } private BgpProtocolHelper() {} }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import static org.apache.hadoop.hdfs.server.namenode.TestEditLog.TXNS_PER_FAIL; import static org.apache.hadoop.hdfs.server.namenode.TestEditLog.TXNS_PER_ROLL; import static org.apache.hadoop.hdfs.server.namenode.TestEditLog.setupEdits; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.io.RandomAccessFile; import java.net.URI; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.PriorityQueue; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory; import org.apache.hadoop.hdfs.server.namenode.JournalManager.CorruptionException; import org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeDirType; import org.apache.hadoop.hdfs.server.namenode.TestEditLog.AbortSpec; import org.apache.hadoop.io.IOUtils; import org.junit.Before; import org.junit.Test; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; public class TestFileJournalManager { static final Log LOG = LogFactory.getLog(TestFileJournalManager.class); private Configuration conf; static { // No need to fsync for the purposes of tests. This makes // the tests run much faster. EditLogFileOutputStream.setShouldSkipFsyncForTesting(true); } @Before public void setUp() { conf = new Configuration(); } /** * Find out how many transactions we can read from a * FileJournalManager, starting at a given transaction ID. * * @param jm The journal manager * @param fromTxId Transaction ID to start at * @param inProgressOk Should we consider edit logs that are not finalized? * @return The number of transactions * @throws IOException */ static long getNumberOfTransactions(FileJournalManager jm, long fromTxId, boolean inProgressOk, boolean abortOnGap) throws IOException { long numTransactions = 0, txId = fromTxId; final PriorityQueue<EditLogInputStream> allStreams = new PriorityQueue<EditLogInputStream>(64, JournalSet.EDIT_LOG_INPUT_STREAM_COMPARATOR); jm.selectInputStreams(allStreams, fromTxId, inProgressOk); EditLogInputStream elis = null; try { while ((elis = allStreams.poll()) != null) { try { elis.skipUntil(txId); while (true) { FSEditLogOp op = elis.readOp(); if (op == null) { break; } if (abortOnGap && (op.getTransactionId() != txId)) { LOG.info("getNumberOfTransactions: detected gap at txId " + fromTxId); return numTransactions; } txId = op.getTransactionId() + 1; numTransactions++; } } finally { IOUtils.cleanup(LOG, elis); } } } finally { IOUtils.cleanup(LOG, allStreams.toArray(new EditLogInputStream[0])); } return numTransactions; } /** * Test the normal operation of loading transactions from * file journal manager. 3 edits directories are setup without any * failures. Test that we read in the expected number of transactions. */ @Test public void testNormalOperation() throws IOException { File f1 = new File(TestEditLog.TEST_DIR + "/normtest0"); File f2 = new File(TestEditLog.TEST_DIR + "/normtest1"); File f3 = new File(TestEditLog.TEST_DIR + "/normtest2"); List<URI> editUris = ImmutableList.of(f1.toURI(), f2.toURI(), f3.toURI()); NNStorage storage = setupEdits(editUris, 5); long numJournals = 0; for (StorageDirectory sd : storage.dirIterable(NameNodeDirType.EDITS)) { FileJournalManager jm = new FileJournalManager(conf, sd, storage); assertEquals(6*TXNS_PER_ROLL, getNumberOfTransactions(jm, 1, true, false)); numJournals++; } assertEquals(3, numJournals); } /** * Test that inprogress files are handled correct. Set up a single * edits directory. Fail on after the last roll. Then verify that the * logs have the expected number of transactions. */ @Test public void testInprogressRecovery() throws IOException { File f = new File(TestEditLog.TEST_DIR + "/inprogressrecovery"); // abort after the 5th roll NNStorage storage = setupEdits(Collections.<URI>singletonList(f.toURI()), 5, new AbortSpec(5, 0)); StorageDirectory sd = storage.dirIterator(NameNodeDirType.EDITS).next(); FileJournalManager jm = new FileJournalManager(conf, sd, storage); assertEquals(5*TXNS_PER_ROLL + TXNS_PER_FAIL, getNumberOfTransactions(jm, 1, true, false)); } /** * Test a mixture of inprogress files and finalised. Set up 3 edits * directories and fail the second on the last roll. Verify that reading * the transactions, reads from the finalised directories. */ @Test public void testInprogressRecoveryMixed() throws IOException { File f1 = new File(TestEditLog.TEST_DIR + "/mixtest0"); File f2 = new File(TestEditLog.TEST_DIR + "/mixtest1"); File f3 = new File(TestEditLog.TEST_DIR + "/mixtest2"); List<URI> editUris = ImmutableList.of(f1.toURI(), f2.toURI(), f3.toURI()); // abort after the 5th roll NNStorage storage = setupEdits(editUris, 5, new AbortSpec(5, 1)); Iterator<StorageDirectory> dirs = storage.dirIterator(NameNodeDirType.EDITS); StorageDirectory sd = dirs.next(); FileJournalManager jm = new FileJournalManager(conf, sd, storage); assertEquals(6*TXNS_PER_ROLL, getNumberOfTransactions(jm, 1, true, false)); sd = dirs.next(); jm = new FileJournalManager(conf, sd, storage); assertEquals(5*TXNS_PER_ROLL + TXNS_PER_FAIL, getNumberOfTransactions(jm, 1, true, false)); sd = dirs.next(); jm = new FileJournalManager(conf, sd, storage); assertEquals(6*TXNS_PER_ROLL, getNumberOfTransactions(jm, 1, true, false)); } /** * Test that FileJournalManager behaves correctly despite inprogress * files in all its edit log directories. Set up 3 directories and fail * all on the last roll. Verify that the correct number of transaction * are then loaded. */ @Test public void testInprogressRecoveryAll() throws IOException { File f1 = new File(TestEditLog.TEST_DIR + "/failalltest0"); File f2 = new File(TestEditLog.TEST_DIR + "/failalltest1"); File f3 = new File(TestEditLog.TEST_DIR + "/failalltest2"); List<URI> editUris = ImmutableList.of(f1.toURI(), f2.toURI(), f3.toURI()); // abort after the 5th roll NNStorage storage = setupEdits(editUris, 5, new AbortSpec(5, 0), new AbortSpec(5, 1), new AbortSpec(5, 2)); Iterator<StorageDirectory> dirs = storage.dirIterator(NameNodeDirType.EDITS); StorageDirectory sd = dirs.next(); FileJournalManager jm = new FileJournalManager(conf, sd, storage); assertEquals(5*TXNS_PER_ROLL + TXNS_PER_FAIL, getNumberOfTransactions(jm, 1, true, false)); sd = dirs.next(); jm = new FileJournalManager(conf, sd, storage); assertEquals(5*TXNS_PER_ROLL + TXNS_PER_FAIL, getNumberOfTransactions(jm, 1, true, false)); sd = dirs.next(); jm = new FileJournalManager(conf, sd, storage); assertEquals(5*TXNS_PER_ROLL + TXNS_PER_FAIL, getNumberOfTransactions(jm, 1, true, false)); } /** * Corrupt an edit log file after the start segment transaction */ private void corruptAfterStartSegment(File f) throws IOException { RandomAccessFile raf = new RandomAccessFile(f, "rw"); raf.seek(0x20); // skip version and first tranaction and a bit of next transaction for (int i = 0; i < 1000; i++) { raf.writeInt(0xdeadbeef); } raf.close(); } @Test(expected=IllegalStateException.class) public void testFinalizeErrorReportedToNNStorage() throws IOException, InterruptedException { File f = new File(TestEditLog.TEST_DIR + "/filejournaltestError"); // abort after 10th roll NNStorage storage = setupEdits(Collections.<URI>singletonList(f.toURI()), 10, new AbortSpec(10, 0)); StorageDirectory sd = storage.dirIterator(NameNodeDirType.EDITS).next(); FileJournalManager jm = new FileJournalManager(conf, sd, storage); String sdRootPath = sd.getRoot().getAbsolutePath(); FileUtil.chmod(sdRootPath, "-w", true); try { jm.finalizeLogSegment(0, 1); } finally { FileUtil.chmod(sdRootPath, "+w", true); assertTrue(storage.getRemovedStorageDirs().contains(sd)); } } /** * Test that we can read from a stream created by FileJournalManager. * Create a single edits directory, failing it on the final roll. * Then try loading from the point of the 3rd roll. Verify that we read * the correct number of transactions from this point. */ @Test public void testReadFromStream() throws IOException { File f = new File(TestEditLog.TEST_DIR + "/readfromstream"); // abort after 10th roll NNStorage storage = setupEdits(Collections.<URI>singletonList(f.toURI()), 10, new AbortSpec(10, 0)); StorageDirectory sd = storage.dirIterator(NameNodeDirType.EDITS).next(); FileJournalManager jm = new FileJournalManager(conf, sd, storage); long expectedTotalTxnCount = TXNS_PER_ROLL*10 + TXNS_PER_FAIL; assertEquals(expectedTotalTxnCount, getNumberOfTransactions(jm, 1, true, false)); long skippedTxns = (3*TXNS_PER_ROLL); // skip first 3 files long startingTxId = skippedTxns + 1; long numLoadable = getNumberOfTransactions(jm, startingTxId, true, false); assertEquals(expectedTotalTxnCount - skippedTxns, numLoadable); } /** * Make requests with starting transaction ids which don't match the beginning * txid of some log segments. * * This should succeed. */ @Test public void testAskForTransactionsMidfile() throws IOException { File f = new File(TestEditLog.TEST_DIR + "/askfortransactionsmidfile"); NNStorage storage = setupEdits(Collections.<URI>singletonList(f.toURI()), 10); StorageDirectory sd = storage.dirIterator(NameNodeDirType.EDITS).next(); FileJournalManager jm = new FileJournalManager(conf, sd, storage); // 10 rolls, so 11 rolled files, 110 txids total. final int TOTAL_TXIDS = 10 * 11; for (int txid = 1; txid <= TOTAL_TXIDS; txid++) { assertEquals((TOTAL_TXIDS - txid) + 1, getNumberOfTransactions(jm, txid, true, false)); } } /** * Test that we receive the correct number of transactions when we count * the number of transactions around gaps. * Set up a single edits directory, with no failures. Delete the 4th logfile. * Test that getNumberOfTransactions returns the correct number of * transactions before this gap and after this gap. Also verify that if you * try to count on the gap that an exception is thrown. */ @Test public void testManyLogsWithGaps() throws IOException { File f = new File(TestEditLog.TEST_DIR + "/manylogswithgaps"); NNStorage storage = setupEdits(Collections.<URI>singletonList(f.toURI()), 10); StorageDirectory sd = storage.dirIterator(NameNodeDirType.EDITS).next(); final long startGapTxId = 3*TXNS_PER_ROLL + 1; final long endGapTxId = 4*TXNS_PER_ROLL; File[] files = new File(f, "current").listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { if (name.startsWith(NNStorage.getFinalizedEditsFileName(startGapTxId, endGapTxId))) { return true; } return false; } }); assertEquals(1, files.length); assertTrue(files[0].delete()); FileJournalManager jm = new FileJournalManager(conf, sd, storage); assertEquals(startGapTxId-1, getNumberOfTransactions(jm, 1, true, true)); assertEquals(0, getNumberOfTransactions(jm, startGapTxId, true, true)); // rolled 10 times so there should be 11 files. assertEquals(11*TXNS_PER_ROLL - endGapTxId, getNumberOfTransactions(jm, endGapTxId + 1, true, true)); } /** * Test that we can load an edits directory with a corrupt inprogress file. * The corrupt inprogress file should be moved to the side. */ @Test public void testManyLogsWithCorruptInprogress() throws IOException { File f = new File(TestEditLog.TEST_DIR + "/manylogswithcorruptinprogress"); NNStorage storage = setupEdits(Collections.<URI>singletonList(f.toURI()), 10, new AbortSpec(10, 0)); StorageDirectory sd = storage.dirIterator(NameNodeDirType.EDITS).next(); File[] files = new File(f, "current").listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { if (name.startsWith("edits_inprogress")) { return true; } return false; } }); assertEquals(files.length, 1); corruptAfterStartSegment(files[0]); FileJournalManager jm = new FileJournalManager(conf, sd, storage); assertEquals(10*TXNS_PER_ROLL+1, getNumberOfTransactions(jm, 1, true, false)); } @Test public void testGetRemoteEditLog() throws IOException { StorageDirectory sd = FSImageTestUtil.mockStorageDirectory( NameNodeDirType.EDITS, false, NNStorage.getFinalizedEditsFileName(1, 100), NNStorage.getFinalizedEditsFileName(101, 200), NNStorage.getInProgressEditsFileName(201), NNStorage.getFinalizedEditsFileName(1001, 1100)); // passing null for NNStorage because this unit test will not use it FileJournalManager fjm = new FileJournalManager(conf, sd, null); assertEquals("[1,100],[101,200],[1001,1100]", getLogsAsString(fjm, 1)); assertEquals("[101,200],[1001,1100]", getLogsAsString(fjm, 101)); assertEquals("[101,200],[1001,1100]", getLogsAsString(fjm, 150)); assertEquals("[1001,1100]", getLogsAsString(fjm, 201)); assertEquals("Asking for a newer log than exists should return empty list", "", getLogsAsString(fjm, 9999)); } /** * tests that passing an invalid dir to matchEditLogs throws IOException */ @Test(expected = IOException.class) public void testMatchEditLogInvalidDirThrowsIOException() throws IOException { File badDir = new File("does not exist"); FileJournalManager.matchEditLogs(badDir); } private static EditLogInputStream getJournalInputStream(JournalManager jm, long txId, boolean inProgressOk) throws IOException { final PriorityQueue<EditLogInputStream> allStreams = new PriorityQueue<EditLogInputStream>(64, JournalSet.EDIT_LOG_INPUT_STREAM_COMPARATOR); jm.selectInputStreams(allStreams, txId, inProgressOk); EditLogInputStream elis = null, ret; try { while ((elis = allStreams.poll()) != null) { if (elis.getFirstTxId() > txId) { break; } if (elis.getLastTxId() < txId) { elis.close(); continue; } elis.skipUntil(txId); ret = elis; elis = null; return ret; } } finally { IOUtils.cleanup(LOG, allStreams.toArray(new EditLogInputStream[0])); IOUtils.cleanup(LOG, elis); } return null; } /** * Make sure that we starting reading the correct op when we request a stream * with a txid in the middle of an edit log file. */ @Test public void testReadFromMiddleOfEditLog() throws CorruptionException, IOException { File f = new File(TestEditLog.TEST_DIR + "/readfrommiddleofeditlog"); NNStorage storage = setupEdits(Collections.<URI>singletonList(f.toURI()), 10); StorageDirectory sd = storage.dirIterator(NameNodeDirType.EDITS).next(); FileJournalManager jm = new FileJournalManager(conf, sd, storage); EditLogInputStream elis = getJournalInputStream(jm, 5, true); try { FSEditLogOp op = elis.readOp(); assertEquals("read unexpected op", op.getTransactionId(), 5); } finally { IOUtils.cleanup(LOG, elis); } } /** * Make sure that in-progress streams aren't counted if we don't ask for * them. */ @Test public void testExcludeInProgressStreams() throws CorruptionException, IOException { File f = new File(TestEditLog.TEST_DIR + "/excludeinprogressstreams"); // Don't close the edit log once the files have been set up. NNStorage storage = setupEdits(Collections.<URI>singletonList(f.toURI()), 10, false); StorageDirectory sd = storage.dirIterator(NameNodeDirType.EDITS).next(); FileJournalManager jm = new FileJournalManager(conf, sd, storage); // If we exclude the in-progess stream, we should only have 100 tx. assertEquals(100, getNumberOfTransactions(jm, 1, false, false)); EditLogInputStream elis = getJournalInputStream(jm, 90, false); try { FSEditLogOp lastReadOp = null; while ((lastReadOp = elis.readOp()) != null) { assertTrue(lastReadOp.getTransactionId() <= 100); } } finally { IOUtils.cleanup(LOG, elis); } } private static String getLogsAsString( FileJournalManager fjm, long firstTxId) throws IOException { return Joiner.on(",").join(fjm.getRemoteEditLogs(firstTxId, false)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.modules.session.internal.filter.attributes; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.geode.DataSerializer; import org.apache.geode.internal.InternalDataSerializer; import org.apache.geode.internal.Version; import org.apache.geode.internal.util.BlobHelper; import org.apache.geode.modules.session.internal.filter.GemfireHttpSession; /** * Abstract implementation for attributes. Should be sub-classed to provide differing * implementations for synchronous or delta propagation. The backing store used is defined by the * session manager. */ public abstract class AbstractSessionAttributes implements SessionAttributes { private static final Logger LOG = LoggerFactory.getLogger(AbstractSessionAttributes.class.getName()); /** * Internal attribute store. */ protected Map<String, Object> attributes = Collections.synchronizedMap(new HashMap<String, Object>()); /** * The session to which these attributes belong */ protected transient GemfireHttpSession session; /** * The last accessed time */ protected long lastAccessedTime; /** * The maximum inactive interval. Default is 1800 seconds. */ protected int maxInactiveInterval = 60 * 30; /** * The JVM Id who last committed these attributes */ protected String jvmOwnerId; protected long creationTime; /** * {@inheritDoc} */ @Override public void setSession(GemfireHttpSession session) { this.session = session; } /** * {@inheritDoc} The actual de-serialization of any domain objects is deferred until the point at * which they are actually retrieved by the application layer. */ @Override public Object getAttribute(String name) { Object value = attributes.get(name); // If the value is a byte[] (meaning it came from the server), // deserialize it and re-add it to attributes map before returning it. if (value instanceof byte[]) { try { value = BlobHelper.deserializeBlob((byte[]) value); attributes.put(name, value); } catch (Exception iox) { LOG.error("Attribute '" + name + " contains a byte[] that cannot be deserialized due " + "to the following exception", iox); } } return value; } /** * {@inheritDoc} */ @Override public Set<String> getAttributeNames() { return attributes.keySet(); } /** * {@inheritDoc} + */ @Override public void setMaxInactiveInterval(int interval) { maxInactiveInterval = interval; } @Override public int getMaxIntactiveInterval() { return maxInactiveInterval; } @Override public long getCreationTime() { return creationTime; } @Override public void setCreationTime(long creationTime) { this.creationTime = creationTime; } @Override public void setLastAccessedTime(long time) { lastAccessedTime = time; } @Override public long getLastAccessedTime() { return lastAccessedTime; } /** * {@inheritDoc} This method calls back into the session to flush the whole session including its * attributes. */ @Override public void flush() { session.putInRegion(); } /** * Use DeltaEvents to propagate the actual attribute data - DeltaEvents turn the values into byte * arrays which means that the actual domain classes are not required on the server. */ @Override public void toData(DataOutput out) throws IOException { toDataPre_GEODE_1_3_0_0(out); out.writeLong(creationTime); } public void toDataPre_GEODE_1_3_0_0(DataOutput out) throws IOException { out.writeInt(maxInactiveInterval); out.writeLong(lastAccessedTime); synchronized (attributes) { out.writeInt(attributes.size()); for (Map.Entry<String, Object> entry : attributes.entrySet()) { DeltaEvent delta = new DeltaEvent(true, entry.getKey(), entry.getValue()); DataSerializer.writeObject(delta, out); } } out.writeUTF(jvmOwnerId); } @Override public void fromData(DataInput in) throws IOException, ClassNotFoundException { fromDataPre_GEODE_1_3_0_0(in); creationTime = in.readLong(); } private void fromDataPre_GEODE_1_3_0_0(DataInput in) throws IOException, ClassNotFoundException { maxInactiveInterval = in.readInt(); lastAccessedTime = in.readLong(); int size = in.readInt(); while (size-- > 0) { DeltaEvent event = DataSerializer.readObject(in); attributes.put(event.getName(), event.getValue()); } jvmOwnerId = in.readUTF(); } @Override public void setJvmOwnerId(String jvmId) { this.jvmOwnerId = jvmId; } @Override public String getJvmOwnerId() { return jvmOwnerId; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.converter.crypto; import java.io.BufferedOutputStream; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintWriter; import java.io.UnsupportedEncodingException; import java.security.SecureRandom; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.util.IOHelper; import org.bouncycastle.bcpg.BCPGOutputStream; import org.bouncycastle.bcpg.CompressionAlgorithmTags; import org.bouncycastle.bcpg.HashAlgorithmTags; import org.bouncycastle.bcpg.SymmetricKeyAlgorithmTags; import org.bouncycastle.bcpg.sig.KeyFlags; import org.bouncycastle.openpgp.PGPCompressedDataGenerator; import org.bouncycastle.openpgp.PGPEncryptedDataGenerator; import org.bouncycastle.openpgp.PGPException; import org.bouncycastle.openpgp.PGPLiteralData; import org.bouncycastle.openpgp.PGPLiteralDataGenerator; import org.bouncycastle.openpgp.PGPPrivateKey; import org.bouncycastle.openpgp.PGPPublicKey; import org.bouncycastle.openpgp.PGPPublicKeyRing; import org.bouncycastle.openpgp.PGPPublicKeyRingCollection; import org.bouncycastle.openpgp.PGPSecretKey; import org.bouncycastle.openpgp.PGPSecretKeyRing; import org.bouncycastle.openpgp.PGPSecretKeyRingCollection; import org.bouncycastle.openpgp.PGPSignature; import org.bouncycastle.openpgp.PGPSignatureGenerator; import org.bouncycastle.openpgp.PGPUtil; import org.bouncycastle.openpgp.operator.bc.BcKeyFingerprintCalculator; import org.bouncycastle.openpgp.operator.jcajce.JcaPGPContentSignerBuilder; import org.bouncycastle.openpgp.operator.jcajce.JcePBEKeyEncryptionMethodGenerator; import org.bouncycastle.openpgp.operator.jcajce.JcePBESecretKeyDecryptorBuilder; import org.bouncycastle.openpgp.operator.jcajce.JcePGPDataEncryptorBuilder; import org.bouncycastle.openpgp.operator.jcajce.JcePublicKeyKeyEncryptionMethodGenerator; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.fail; public class PGPDataFormatTest extends AbstractPGPDataFormatTest { private static final String PUB_KEY_RING_SUBKEYS_FILE_NAME = "org/apache/camel/component/crypto/pubringSubKeys.gpg"; private static final String SEC_KEY_RING_FILE_NAME = "org/apache/camel/component/crypto/secring.gpg"; private static final String PUB_KEY_RING_FILE_NAME = "org/apache/camel/component/crypto/pubring.gpg"; PGPDataFormat encryptor = new PGPDataFormat(); PGPDataFormat decryptor = new PGPDataFormat(); @BeforeEach public void setUpEncryptorAndDecryptor() { // the following keyring contains a primary key with KeyFlag "Certify" and a subkey for signing and a subkey for encryption encryptor.setKeyFileName(PUB_KEY_RING_SUBKEYS_FILE_NAME); encryptor.setSignatureKeyFileName("org/apache/camel/component/crypto/secringSubKeys.gpg"); encryptor.setSignaturePassword("Abcd1234"); encryptor.setKeyUserid("keyflag"); encryptor.setSignatureKeyUserid("keyflag"); encryptor.setIntegrity(false); encryptor.setFileName("fileNameABC"); // the following keyring contains a primary key with KeyFlag "Certify" and a subkey for signing and a subkey for encryption decryptor.setKeyFileName("org/apache/camel/component/crypto/secringSubKeys.gpg"); decryptor.setSignatureKeyFileName(PUB_KEY_RING_SUBKEYS_FILE_NAME); decryptor.setPassword("Abcd1234"); decryptor.setSignatureKeyUserid("keyflag"); } protected String getKeyFileName() { return PUB_KEY_RING_FILE_NAME; } protected String getKeyFileNameSec() { return SEC_KEY_RING_FILE_NAME; } protected String getKeyUserId() { return "[email protected]"; } protected List<String> getKeyUserIds() { List<String> userids = new ArrayList<>(2); userids.add("second"); userids.add(getKeyUserId()); return userids; } protected List<String> getSignatureKeyUserIds() { List<String> userids = new ArrayList<>(2); userids.add("second"); userids.add(getKeyUserId()); return userids; } protected String getKeyPassword() { return "sdude"; } protected String getProvider() { return "BC"; } protected int getAlgorithm() { return SymmetricKeyAlgorithmTags.TRIPLE_DES; } protected int getHashAlgorithm() { return HashAlgorithmTags.SHA256; } protected int getCompressionAlgorithm() { return CompressionAlgorithmTags.BZIP2; } @Test void testEncryption() throws Exception { doRoundTripEncryptionTests("direct:inline"); } @Test void testEncryption2() throws Exception { doRoundTripEncryptionTests("direct:inline2"); } @Test void testEncryptionArmor() throws Exception { doRoundTripEncryptionTests("direct:inline-armor"); } @Test void testEncryptionSigned() throws Exception { doRoundTripEncryptionTests("direct:inline-sign"); } @Test void testEncryptionKeyRingByteArray() throws Exception { doRoundTripEncryptionTests("direct:key-ring-byte-array"); } @Test void testEncryptionSignedKeyRingByteArray() throws Exception { doRoundTripEncryptionTests("direct:sign-key-ring-byte-array"); } @Test void testSeveralSignerKeys() throws Exception { doRoundTripEncryptionTests("direct:several-signer-keys"); } @Test void testOneUserIdWithServeralKeys() throws Exception { doRoundTripEncryptionTests("direct:one-userid-several-keys"); } @Test void testKeyAccess() throws Exception { doRoundTripEncryptionTests("direct:key_access"); } @Test void testVerifyExceptionNoPublicKeyFoundCorrespondingToSignatureUserIds() throws Exception { setupExpectations(context, 1, "mock:encrypted"); MockEndpoint exception = setupExpectations(context, 1, "mock:exception"); String payload = "Hi Alice, Be careful Eve is listening, signed Bob"; Map<String, Object> headers = getHeaders(); template.sendBodyAndHeaders("direct:verify_exception_sig_userids", payload, headers); assertMockEndpointsSatisfied(); checkThrownException(exception, IllegalArgumentException.class, null, "No public key found for the key ID(s)"); } @Test void testVerifyExceptionNoPassphraseSpecifiedForSignatureKeyUserId() throws Exception { MockEndpoint exception = setupExpectations(context, 1, "mock:exception"); String payload = "Hi Alice, Be careful Eve is listening, signed Bob"; Map<String, Object> headers = new HashMap<>(); // add signature user id which does not have a passphrase headers.put(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERID, "userIDWithNoPassphrase"); // the following entry is necessary for the dynamic test headers.put(PGPKeyAccessDataFormat.KEY_USERID, "second"); template.sendBodyAndHeaders("direct:several-signer-keys", payload, headers); assertMockEndpointsSatisfied(); checkThrownException(exception, IllegalArgumentException.class, null, "No passphrase specified for signature key user ID"); } /** * You get three keys with the UserId "keyflag", a primary key and its two * sub-keys. The sub-key with KeyFlag {@link KeyFlags#SIGN_DATA} should be * used for signing and the sub-key with KeyFlag * {@link KeyFlags#ENCRYPT_COMMS} or {@link KeyFlags#ENCRYPT_COMMS} or * {@link KeyFlags#ENCRYPT_STORAGE} should be used for decryption. * * @throws Exception */ @Test void testKeyFlagSelectsCorrectKey() throws Exception { MockEndpoint mockKeyFlag = getMockEndpoint("mock:encrypted_keyflag"); mockKeyFlag.setExpectedMessageCount(1); template.sendBody("direct:keyflag", "Test Message"); assertMockEndpointsSatisfied(); List<Exchange> exchanges = mockKeyFlag.getExchanges(); assertEquals(1, exchanges.size()); Exchange exchange = exchanges.get(0); Message inMess = exchange.getIn(); assertNotNull(inMess); // must contain exactly one encryption key and one signature assertEquals(1, inMess.getHeader(PGPKeyAccessDataFormat.NUMBER_OF_ENCRYPTION_KEYS)); assertEquals(1, inMess.getHeader(PGPKeyAccessDataFormat.NUMBER_OF_SIGNING_KEYS)); } /** * You get three keys with the UserId "keyflag", a primary key and its two * sub-keys. The sub-key with KeyFlag {@link KeyFlags#SIGN_DATA} should be * used for signing and the sub-key with KeyFlag * {@link KeyFlags#ENCRYPT_COMMS} or {@link KeyFlags#ENCRYPT_COMMS} or * {@link KeyFlags#ENCRYPT_STORAGE} should be used for decryption. * <p> * Tests also the decryption and verifying part with the subkeys. * * @throws Exception */ @Test void testDecryptVerifyWithSubkey() throws Exception { // do not use doRoundTripEncryptionTests("direct:subkey"); because otherwise you get an error in the dynamic test String payload = "Test Message"; MockEndpoint mockSubkey = getMockEndpoint("mock:unencrypted"); mockSubkey.expectedBodiesReceived(payload); template.sendBody("direct:subkey", payload); assertMockEndpointsSatisfied(); } @Test void testEmptyBody() throws Exception { String payload = ""; MockEndpoint mockSubkey = getMockEndpoint("mock:unencrypted"); mockSubkey.expectedBodiesReceived(payload); template.sendBody("direct:subkey", payload); assertMockEndpointsSatisfied(); } @Test void testExceptionDecryptorIncorrectInputFormatNoPGPMessage() throws Exception { String payload = "Not Correct Format"; MockEndpoint mock = getMockEndpoint("mock:exception"); mock.expectedMessageCount(1); template.sendBody("direct:subkeyUnmarshal", payload); assertMockEndpointsSatisfied(); checkThrownException(mock, IllegalArgumentException.class, null, "The input message body has an invalid format."); } @Test void testExceptionDecryptorIncorrectInputFormatPGPSignedData() throws Exception { ByteArrayOutputStream bos = new ByteArrayOutputStream(); createSignature(bos); MockEndpoint mock = getMockEndpoint("mock:exception"); mock.expectedMessageCount(1); template.sendBody("direct:subkeyUnmarshal", bos.toByteArray()); assertMockEndpointsSatisfied(); checkThrownException(mock, IllegalArgumentException.class, null, "The input message body has an invalid format."); } @Test void testEncryptSignWithoutCompressedDataPacket() throws Exception { doRoundTripEncryptionTests("direct:encrypt-sign-without-compressed-data-packet"); // ByteArrayOutputStream bos = new ByteArrayOutputStream(); // //// createEncryptedNonCompressedData(bos, PUB_KEY_RING_SUBKEYS_FILE_NAME); // // MockEndpoint mock = getMockEndpoint("mock:exception"); // mock.expectedMessageCount(1); // template.sendBody("direct:encrypt-sign-without-compressed-data-packet", bos.toByteArray()); // assertMockEndpointsSatisfied(); // // //checkThrownException(mock, IllegalArgumentException.class, null, "The input message body has an invalid format."); } @Test void testExceptionDecryptorNoKeyFound() throws Exception { ByteArrayOutputStream bos = new ByteArrayOutputStream(); createEncryptedNonCompressedData(bos, PUB_KEY_RING_FILE_NAME); MockEndpoint mock = getMockEndpoint("mock:exception"); mock.expectedMessageCount(1); template.sendBody("direct:subkeyUnmarshal", bos.toByteArray()); assertMockEndpointsSatisfied(); checkThrownException(mock, PGPException.class, null, "PGP message is encrypted with a key which could not be found in the Secret Keyring"); } void createEncryptedNonCompressedData(ByteArrayOutputStream bos, String keyringPath) throws Exception, IOException, PGPException, UnsupportedEncodingException { PGPEncryptedDataGenerator encGen = new PGPEncryptedDataGenerator(new JcePGPDataEncryptorBuilder(SymmetricKeyAlgorithmTags.CAST5) .setSecureRandom(new SecureRandom()).setProvider(getProvider())); encGen.addMethod(new JcePublicKeyKeyEncryptionMethodGenerator(readPublicKey(keyringPath))); OutputStream encOut = encGen.open(bos, new byte[512]); PGPLiteralDataGenerator litData = new PGPLiteralDataGenerator(); OutputStream litOut = litData.open(encOut, PGPLiteralData.BINARY, PGPLiteralData.CONSOLE, new Date(), new byte[512]); try { litOut.write("Test Message Without Compression".getBytes("UTF-8")); litOut.flush(); } finally { IOHelper.close(litOut); IOHelper.close(encOut, bos); } } private void createSignature(OutputStream out) throws Exception { PGPSecretKey pgpSec = readSecretKey(); PGPPrivateKey pgpPrivKey = pgpSec.extractPrivateKey(new JcePBESecretKeyDecryptorBuilder().setProvider(getProvider()).build( "sdude".toCharArray())); PGPSignatureGenerator sGen = new PGPSignatureGenerator(new JcaPGPContentSignerBuilder(pgpSec.getPublicKey().getAlgorithm(), HashAlgorithmTags.SHA1).setProvider(getProvider())); sGen.init(PGPSignature.BINARY_DOCUMENT, pgpPrivKey); BCPGOutputStream bOut = new BCPGOutputStream(out); InputStream fIn = new ByteArrayInputStream("Test Signature".getBytes("UTF-8")); int ch; while ((ch = fIn.read()) >= 0) { sGen.update((byte) ch); } fIn.close(); sGen.generate().encode(bOut); } static PGPSecretKey readSecretKey() throws Exception { InputStream input = new ByteArrayInputStream(getSecKeyRing()); PGPSecretKeyRingCollection pgpSec = new PGPSecretKeyRingCollection(PGPUtil.getDecoderStream(input), new BcKeyFingerprintCalculator()); @SuppressWarnings("rawtypes") Iterator keyRingIter = pgpSec.getKeyRings(); while (keyRingIter.hasNext()) { PGPSecretKeyRing keyRing = (PGPSecretKeyRing) keyRingIter.next(); @SuppressWarnings("rawtypes") Iterator keyIter = keyRing.getSecretKeys(); while (keyIter.hasNext()) { PGPSecretKey key = (PGPSecretKey) keyIter.next(); if (key.isSigningKey()) { return key; } } } throw new IllegalArgumentException("Can't find signing key in key ring."); } static PGPPublicKey readPublicKey(String keyringPath) throws Exception { InputStream input = new ByteArrayInputStream(getKeyRing(keyringPath)); PGPPublicKeyRingCollection pgpPub = new PGPPublicKeyRingCollection(PGPUtil.getDecoderStream(input), new BcKeyFingerprintCalculator()); @SuppressWarnings("rawtypes") Iterator keyRingIter = pgpPub.getKeyRings(); while (keyRingIter.hasNext()) { PGPPublicKeyRing keyRing = (PGPPublicKeyRing) keyRingIter.next(); @SuppressWarnings("rawtypes") Iterator keyIter = keyRing.getPublicKeys(); while (keyIter.hasNext()) { PGPPublicKey key = (PGPPublicKey) keyIter.next(); if (key.isEncryptionKey()) { return key; } } } throw new IllegalArgumentException("Can't find encryption key in key ring."); } @Test void testExceptionDecryptorIncorrectInputFormatSymmetricEncryptedData() throws Exception { byte[] payload = "Not Correct Format".getBytes("UTF-8"); ByteArrayOutputStream bos = new ByteArrayOutputStream(); PGPEncryptedDataGenerator encGen = new PGPEncryptedDataGenerator(new JcePGPDataEncryptorBuilder(SymmetricKeyAlgorithmTags.CAST5) .setSecureRandom(new SecureRandom()).setProvider(getProvider())); encGen.addMethod(new JcePBEKeyEncryptionMethodGenerator("pw".toCharArray())); OutputStream encOut = encGen.open(bos, new byte[1024]); PGPCompressedDataGenerator comData = new PGPCompressedDataGenerator(CompressionAlgorithmTags.ZIP); OutputStream comOut = new BufferedOutputStream(comData.open(encOut)); PGPLiteralDataGenerator litData = new PGPLiteralDataGenerator(); OutputStream litOut = litData.open(comOut, PGPLiteralData.BINARY, PGPLiteralData.CONSOLE, new Date(), new byte[1024]); litOut.write(payload); litOut.flush(); litOut.close(); comOut.close(); encOut.close(); MockEndpoint mock = getMockEndpoint("mock:exception"); mock.expectedMessageCount(1); template.sendBody("direct:subkeyUnmarshal", bos.toByteArray()); assertMockEndpointsSatisfied(); checkThrownException(mock, IllegalArgumentException.class, null, "The input message body has an invalid format."); } @Test void testExceptionForSignatureVerificationOptionNoSignatureAllowed() throws Exception { decryptor.setSignatureVerificationOption(PGPKeyAccessDataFormat.SIGNATURE_VERIFICATION_OPTION_NO_SIGNATURE_ALLOWED); MockEndpoint mock = getMockEndpoint("mock:exception"); mock.expectedMessageCount(1); template.sendBody("direct:subkey", "Test Message"); assertMockEndpointsSatisfied(); checkThrownException(mock, PGPException.class, null, "PGP message contains a signature although a signature is not expected"); } @Test void testExceptionForSignatureVerificationOptionRequired() throws Exception { encryptor.setSignatureKeyUserid(null); // no signature decryptor.setSignatureVerificationOption(PGPKeyAccessDataFormat.SIGNATURE_VERIFICATION_OPTION_REQUIRED); MockEndpoint mock = getMockEndpoint("mock:exception"); mock.expectedMessageCount(1); template.sendBody("direct:subkey", "Test Message"); assertMockEndpointsSatisfied(); checkThrownException(mock, PGPException.class, null, "PGP message does not contain any signatures although a signature is expected"); } @Test void testSignatureVerificationOptionIgnore() throws Exception { // encryptor is sending a PGP message with signature! Decryptor is ignoreing the signature decryptor.setSignatureVerificationOption(PGPKeyAccessDataFormat.SIGNATURE_VERIFICATION_OPTION_IGNORE); decryptor.setSignatureKeyUserids(null); decryptor.setSignatureKeyFileName(null); // no public keyring! --> no signature validation possible String payload = "Test Message"; MockEndpoint mock = getMockEndpoint("mock:unencrypted"); mock.expectedBodiesReceived(payload); template.sendBody("direct:subkey", payload); assertMockEndpointsSatisfied(); } @Override protected RouteBuilder[] createRouteBuilders() { return new RouteBuilder[] {new RouteBuilder() { public void configure() throws Exception { onException(Exception.class).handled(true).to("mock:exception"); // START SNIPPET: pgp-format // Public Key FileName String keyFileName = getKeyFileName(); // Private Key FileName String keyFileNameSec = getKeyFileNameSec(); // Keyring Userid Used to Encrypt String keyUserid = getKeyUserId(); // Private key password String keyPassword = getKeyPassword(); from("direct:inline").marshal().pgp(keyFileName, keyUserid).to("mock:encrypted").unmarshal() .pgp(keyFileNameSec, null, keyPassword).to("mock:unencrypted"); // END SNIPPET: pgp-format // START SNIPPET: pgp-format-header PGPDataFormat pgpEncrypt = new PGPDataFormat(); pgpEncrypt.setKeyFileName(keyFileName); pgpEncrypt.setKeyUserid(keyUserid); pgpEncrypt.setProvider(getProvider()); pgpEncrypt.setAlgorithm(getAlgorithm()); pgpEncrypt.setCompressionAlgorithm(getCompressionAlgorithm()); PGPDataFormat pgpDecrypt = new PGPDataFormat(); pgpDecrypt.setKeyFileName(keyFileNameSec); pgpDecrypt.setPassword(keyPassword); pgpDecrypt.setProvider(getProvider()); pgpDecrypt.setSignatureVerificationOption(PGPKeyAccessDataFormat.SIGNATURE_VERIFICATION_OPTION_NO_SIGNATURE_ALLOWED); from("direct:inline2").marshal(pgpEncrypt).to("mock:encrypted").unmarshal(pgpDecrypt).to("mock:unencrypted"); from("direct:inline-armor").marshal().pgp(keyFileName, keyUserid, null, true, true).to("mock:encrypted").unmarshal() .pgp(keyFileNameSec, null, keyPassword, true, true).to("mock:unencrypted"); // END SNIPPET: pgp-format-header // START SNIPPET: pgp-format-signature PGPDataFormat pgpSignAndEncrypt = new PGPDataFormat(); pgpSignAndEncrypt.setKeyFileName(keyFileName); pgpSignAndEncrypt.setKeyUserid(keyUserid); pgpSignAndEncrypt.setSignatureKeyFileName(keyFileNameSec); PGPPassphraseAccessor passphraseAccessor = getPassphraseAccessor(); pgpSignAndEncrypt.setSignatureKeyUserid("Super <[email protected]>"); // must be the exact user Id because passphrase is searched in accessor pgpSignAndEncrypt.setPassphraseAccessor(passphraseAccessor); pgpSignAndEncrypt.setProvider(getProvider()); pgpSignAndEncrypt.setAlgorithm(getAlgorithm()); pgpSignAndEncrypt.setHashAlgorithm(getHashAlgorithm()); pgpSignAndEncrypt.setCompressionAlgorithm(getCompressionAlgorithm()); PGPDataFormat pgpVerifyAndDecrypt = new PGPDataFormat(); pgpVerifyAndDecrypt.setKeyFileName(keyFileNameSec); pgpVerifyAndDecrypt.setPassword(keyPassword); pgpVerifyAndDecrypt.setSignatureKeyFileName(keyFileName); pgpVerifyAndDecrypt.setProvider(getProvider()); pgpVerifyAndDecrypt.setSignatureKeyUserid(keyUserid); // restrict verification to public keys with certain User ID from("direct:inline-sign").marshal(pgpSignAndEncrypt).to("mock:encrypted").unmarshal(pgpVerifyAndDecrypt) .to("mock:unencrypted"); // END SNIPPET: pgp-format-signature // test verifying exception, no public key found corresponding to signature key userIds from("direct:verify_exception_sig_userids").marshal(pgpSignAndEncrypt).to("mock:encrypted") .setHeader(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERIDS).constant(Arrays.asList(new String[] {"wrong1", "wrong2" })) .setHeader(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERID).constant("wrongUserID").unmarshal(pgpVerifyAndDecrypt) .to("mock:unencrypted"); /* ---- key ring as byte array -- */ // START SNIPPET: pgp-format-key-ring-byte-array PGPDataFormat pgpEncryptByteArray = new PGPDataFormat(); pgpEncryptByteArray.setEncryptionKeyRing(getPublicKeyRing()); pgpEncryptByteArray.setKeyUserids(getKeyUserIds()); pgpEncryptByteArray.setProvider(getProvider()); pgpEncryptByteArray.setAlgorithm(SymmetricKeyAlgorithmTags.DES); pgpEncryptByteArray.setCompressionAlgorithm(CompressionAlgorithmTags.UNCOMPRESSED); PGPDataFormat pgpDecryptByteArray = new PGPDataFormat(); pgpDecryptByteArray.setEncryptionKeyRing(getSecKeyRing()); pgpDecryptByteArray.setPassphraseAccessor(passphraseAccessor); pgpDecryptByteArray.setProvider(getProvider()); from("direct:key-ring-byte-array").streamCaching().marshal(pgpEncryptByteArray).to("mock:encrypted") .unmarshal(pgpDecryptByteArray).to("mock:unencrypted"); // END SNIPPET: pgp-format-key-ring-byte-array // START SNIPPET: pgp-format-signature-key-ring-byte-array PGPDataFormat pgpSignAndEncryptByteArray = new PGPDataFormat(); pgpSignAndEncryptByteArray.setKeyUserid(keyUserid); pgpSignAndEncryptByteArray.setSignatureKeyRing(getSecKeyRing()); pgpSignAndEncryptByteArray.setSignatureKeyUserid(keyUserid); pgpSignAndEncryptByteArray.setSignaturePassword(keyPassword); pgpSignAndEncryptByteArray.setProvider(getProvider()); pgpSignAndEncryptByteArray.setAlgorithm(SymmetricKeyAlgorithmTags.BLOWFISH); pgpSignAndEncryptByteArray.setHashAlgorithm(HashAlgorithmTags.RIPEMD160); pgpSignAndEncryptByteArray.setCompressionAlgorithm(CompressionAlgorithmTags.ZLIB); PGPDataFormat pgpVerifyAndDecryptByteArray = new PGPDataFormat(); pgpVerifyAndDecryptByteArray.setPassphraseAccessor(passphraseAccessor); pgpVerifyAndDecryptByteArray.setEncryptionKeyRing(getSecKeyRing()); pgpVerifyAndDecryptByteArray.setProvider(getProvider()); // restrict verification to public keys with certain User ID pgpVerifyAndDecryptByteArray.setSignatureKeyUserids(getSignatureKeyUserIds()); pgpVerifyAndDecryptByteArray.setSignatureVerificationOption(PGPKeyAccessDataFormat.SIGNATURE_VERIFICATION_OPTION_REQUIRED); from("direct:sign-key-ring-byte-array").streamCaching() // encryption key ring can also be set as header .setHeader(PGPDataFormat.ENCRYPTION_KEY_RING).constant(getPublicKeyRing()).marshal(pgpSignAndEncryptByteArray) // it is recommended to remove the header immediately when it is no longer needed .removeHeader(PGPDataFormat.ENCRYPTION_KEY_RING).to("mock:encrypted") // signature key ring can also be set as header .setHeader(PGPDataFormat.SIGNATURE_KEY_RING).constant(getPublicKeyRing()).unmarshal(pgpVerifyAndDecryptByteArray) // it is recommended to remove the header immediately when it is no longer needed .removeHeader(PGPDataFormat.SIGNATURE_KEY_RING).to("mock:unencrypted"); // END SNIPPET: pgp-format-signature-key-ring-byte-array // START SNIPPET: pgp-format-several-signer-keys PGPDataFormat pgpSignAndEncryptSeveralSignerKeys = new PGPDataFormat(); pgpSignAndEncryptSeveralSignerKeys.setKeyUserid(keyUserid); pgpSignAndEncryptSeveralSignerKeys.setEncryptionKeyRing(getPublicKeyRing()); pgpSignAndEncryptSeveralSignerKeys.setSignatureKeyRing(getSecKeyRing()); List<String> signerUserIds = new ArrayList<>(); signerUserIds.add("Third (comment third) <[email protected]>"); signerUserIds.add("Second <[email protected]>"); pgpSignAndEncryptSeveralSignerKeys.setSignatureKeyUserids(signerUserIds); Map<String, String> userId2Passphrase = new HashMap<>(); userId2Passphrase.put("Third (comment third) <[email protected]>", "sdude"); userId2Passphrase.put("Second <[email protected]>", "sdude"); PGPPassphraseAccessor passphraseAccessorSeveralKeys = new DefaultPGPPassphraseAccessor(userId2Passphrase); pgpSignAndEncryptSeveralSignerKeys.setPassphraseAccessor(passphraseAccessorSeveralKeys); PGPDataFormat pgpVerifyAndDecryptSeveralSignerKeys = new PGPDataFormat(); pgpVerifyAndDecryptSeveralSignerKeys.setPassphraseAccessor(passphraseAccessor); pgpVerifyAndDecryptSeveralSignerKeys.setEncryptionKeyRing(getSecKeyRing()); pgpVerifyAndDecryptSeveralSignerKeys.setSignatureKeyRing(getPublicKeyRing()); pgpVerifyAndDecryptSeveralSignerKeys.setProvider(getProvider()); // only specify one expected signature List<String> expectedSigUserIds = new ArrayList<>(); expectedSigUserIds.add("Second <[email protected]>"); pgpVerifyAndDecryptSeveralSignerKeys.setSignatureKeyUserids(expectedSigUserIds); from("direct:several-signer-keys").streamCaching().marshal(pgpSignAndEncryptSeveralSignerKeys).to("mock:encrypted") .unmarshal(pgpVerifyAndDecryptSeveralSignerKeys).to("mock:unencrypted"); // END SNIPPET: pgp-format-several-signer-keys // test encryption by several key and signing by serveral keys where the keys are specified by one User ID part PGPDataFormat pgpSignAndEncryptOneUserIdWithServeralKeys = new PGPDataFormat(); pgpSignAndEncryptOneUserIdWithServeralKeys.setEncryptionKeyRing(getPublicKeyRing()); pgpSignAndEncryptOneUserIdWithServeralKeys.setSignatureKeyRing(getSecKeyRing()); // the two private keys have the same password therefore we do not need a passphrase accessor pgpSignAndEncryptOneUserIdWithServeralKeys.setPassword(getKeyPassword()); PGPDataFormat pgpVerifyAndDecryptOneUserIdWithServeralKeys = new PGPDataFormat(); pgpVerifyAndDecryptOneUserIdWithServeralKeys.setPassword(getKeyPassword()); pgpVerifyAndDecryptOneUserIdWithServeralKeys.setEncryptionKeyRing(getSecKeyRing()); pgpVerifyAndDecryptOneUserIdWithServeralKeys.setSignatureKeyRing(getPublicKeyRing()); pgpVerifyAndDecryptOneUserIdWithServeralKeys.setProvider(getProvider()); pgpVerifyAndDecryptOneUserIdWithServeralKeys.setSignatureKeyUserids(expectedSigUserIds); from("direct:one-userid-several-keys") // there are two keys which have a User ID which contains the string "econd" .setHeader(PGPKeyAccessDataFormat.KEY_USERID) .constant("econd") .setHeader(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERID) .constant("econd") .marshal(pgpSignAndEncryptOneUserIdWithServeralKeys) // it is recommended to remove the header immediately when it is no longer needed .removeHeader(PGPKeyAccessDataFormat.KEY_USERID) .removeHeader(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERID) .to("mock:encrypted") // only specify one expected signature key, to check the first signature .setHeader(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERID) .constant("Second <[email protected]>") .unmarshal(pgpVerifyAndDecryptOneUserIdWithServeralKeys) // do it again but now check the second signature key // there are two keys which have a User ID which contains the string "econd" .setHeader(PGPKeyAccessDataFormat.KEY_USERID).constant("econd").setHeader(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERID) .constant("econd").marshal(pgpSignAndEncryptOneUserIdWithServeralKeys) // it is recommended to remove the header immediately when it is no longer needed .removeHeader(PGPKeyAccessDataFormat.KEY_USERID).removeHeader(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERID) // only specify one expected signature key, to check the second signature .setHeader(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERID).constant("Third (comment third) <[email protected]>") .unmarshal(pgpVerifyAndDecryptOneUserIdWithServeralKeys).to("mock:unencrypted"); } }, new RouteBuilder() { public void configure() { onException(Exception.class).handled(true).to("mock:exception"); from("direct:keyflag").marshal(encryptor).to("mock:encrypted_keyflag"); // test that the correct subkey is selected during decrypt and verify from("direct:subkey").marshal(encryptor).to("mock:encrypted").unmarshal(decryptor).to("mock:unencrypted"); from("direct:subkeyUnmarshal").unmarshal(decryptor).to("mock:unencrypted"); } }, new RouteBuilder() { public void configure() throws Exception { PGPPublicKeyAccessor publicKeyAccessor = new DefaultPGPPublicKeyAccessor(getPublicKeyRing()); //password cannot be set dynamically! PGPSecretKeyAccessor secretKeyAccessor = new DefaultPGPSecretKeyAccessor(getSecKeyRing(), "sdude", getProvider()); PGPKeyAccessDataFormat dfEncryptSignKeyAccess = new PGPKeyAccessDataFormat(); dfEncryptSignKeyAccess.setPublicKeyAccessor(publicKeyAccessor); dfEncryptSignKeyAccess.setSecretKeyAccessor(secretKeyAccessor); dfEncryptSignKeyAccess.setKeyUserid(getKeyUserId()); dfEncryptSignKeyAccess.setSignatureKeyUserid(getKeyUserId()); PGPKeyAccessDataFormat dfDecryptVerifyKeyAccess = new PGPKeyAccessDataFormat(); dfDecryptVerifyKeyAccess.setPublicKeyAccessor(publicKeyAccessor); dfDecryptVerifyKeyAccess.setSecretKeyAccessor(secretKeyAccessor); dfDecryptVerifyKeyAccess.setSignatureKeyUserid(getKeyUserId()); from("direct:key_access").marshal(dfEncryptSignKeyAccess).to("mock:encrypted").unmarshal(dfDecryptVerifyKeyAccess) .to("mock:unencrypted"); } }, new RouteBuilder() { public void configure() throws Exception { // START SNIPPET: pgp-encrypt-sign-without-compressed-data-packet PGPDataFormat pgpEncryptSign = new PGPDataFormat(); pgpEncryptSign.setKeyUserid(getKeyUserId()); pgpEncryptSign.setSignatureKeyRing(getSecKeyRing()); pgpEncryptSign.setSignatureKeyUserid(getKeyUserId()); pgpEncryptSign.setSignaturePassword(getKeyPassword()); pgpEncryptSign.setProvider(getProvider()); pgpEncryptSign.setAlgorithm(SymmetricKeyAlgorithmTags.BLOWFISH); pgpEncryptSign.setHashAlgorithm(HashAlgorithmTags.RIPEMD160); // without compressed data packet pgpEncryptSign.setWithCompressedDataPacket(false); PGPDataFormat pgpVerifyAndDecryptByteArray = new PGPDataFormat(); pgpVerifyAndDecryptByteArray.setPassphraseAccessor(getPassphraseAccessor()); pgpVerifyAndDecryptByteArray.setEncryptionKeyRing(getSecKeyRing()); pgpVerifyAndDecryptByteArray.setProvider(getProvider()); // restrict verification to public keys with certain User ID pgpVerifyAndDecryptByteArray.setSignatureKeyUserids(getSignatureKeyUserIds()); pgpVerifyAndDecryptByteArray.setSignatureVerificationOption(PGPKeyAccessDataFormat.SIGNATURE_VERIFICATION_OPTION_REQUIRED); from("direct:encrypt-sign-without-compressed-data-packet").streamCaching() // encryption key ring can also be set as header .setHeader(PGPDataFormat.ENCRYPTION_KEY_RING).constant(getPublicKeyRing()).marshal(pgpEncryptSign) // it is recommended to remove the header immediately when it is no longer needed .removeHeader(PGPDataFormat.ENCRYPTION_KEY_RING).to("mock:encrypted") // signature key ring can also be set as header .setHeader(PGPDataFormat.SIGNATURE_KEY_RING).constant(getPublicKeyRing()).unmarshal(pgpVerifyAndDecryptByteArray) // it is recommended to remove the header immediately when it is no longer needed .removeHeader(PGPDataFormat.SIGNATURE_KEY_RING).to("mock:unencrypted"); // END SNIPPET: pgp-encrypt-sign-without-compressed-data-packet } }}; } public static byte[] getPublicKeyRing() throws Exception { return getKeyRing(PUB_KEY_RING_FILE_NAME); } public static byte[] getSecKeyRing() throws Exception { return getKeyRing(SEC_KEY_RING_FILE_NAME); } private static byte[] getKeyRing(String fileName) throws IOException { InputStream is = PGPDataFormatTest.class.getClassLoader().getResourceAsStream(fileName); ByteArrayOutputStream output = new ByteArrayOutputStream(); IOHelper.copyAndCloseInput(is, output); output.close(); return output.toByteArray(); } public static PGPPassphraseAccessor getPassphraseAccessor() { Map<String, String> userId2Passphrase = Collections.singletonMap("Super <[email protected]>", "sdude"); PGPPassphraseAccessor passphraseAccessor = new DefaultPGPPassphraseAccessor(userId2Passphrase); return passphraseAccessor; } public static void checkThrownException(MockEndpoint mock, Class<? extends Exception> cl, Class<? extends Exception> expectedCauseClass, String expectedMessagePart) throws Exception { Exception e = (Exception) mock.getExchanges().get(0).getProperty(Exchange.EXCEPTION_CAUGHT); assertNotNull(e, "Expected excpetion " + cl.getName() + " missing"); if (e.getClass() != cl) { String stackTrace = getStrackTrace(e); fail("Exception " + cl.getName() + " excpected, but was " + e.getClass().getName() + ": " + stackTrace); } if (expectedMessagePart != null) { if (e.getMessage() == null) { fail("Expected excption does not contain a message. Stack trace: " + getStrackTrace(e)); } else { if (!e.getMessage().contains(expectedMessagePart)) { fail("Expected excption message does not contain a expected message part " + expectedMessagePart + ". Stack trace: " + getStrackTrace(e)); } } } if (expectedCauseClass != null) { Throwable cause = e.getCause(); assertNotNull(cause, "Expected cause exception" + expectedCauseClass.getName() + " missing"); if (expectedCauseClass != cause.getClass()) { fail("Cause exception " + expectedCauseClass.getName() + " expected, but was " + cause.getClass().getName() + ": " + getStrackTrace(e)); } } } public static String getStrackTrace(Exception e) throws UnsupportedEncodingException { ByteArrayOutputStream os = new ByteArrayOutputStream(); PrintWriter w = new PrintWriter(os); e.printStackTrace(w); w.close(); String stackTrace = new String(os.toByteArray(), "UTF-8"); return stackTrace; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.swagger; import java.lang.management.ManagementFactory; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import javax.management.MBeanServer; import javax.management.ObjectName; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import io.swagger.jaxrs.config.BeanConfig; import io.swagger.models.Contact; import io.swagger.models.Info; import io.swagger.models.License; import io.swagger.models.Swagger; import org.apache.camel.Exchange; import org.apache.camel.model.ModelHelper; import org.apache.camel.model.rest.RestDefinition; import org.apache.camel.model.rest.RestsDefinition; import org.apache.camel.spi.ClassResolver; import org.apache.camel.util.CamelVersionHelper; import org.apache.camel.util.EndpointHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A support class for that allows SPI to plugin * and offer Swagger API service listings as part of the Camel component. This allows rest-dsl components * such as servlet/jetty/netty4-http to offer Swagger API listings with minimal effort. */ public class RestSwaggerSupport { private static final Logger LOG = LoggerFactory.getLogger(RestSwaggerSupport.class); private RestSwaggerReader reader = new RestSwaggerReader(); private boolean cors; public void initSwagger(BeanConfig swaggerConfig, Map<String, Object> config) { // configure swagger options String s = (String) config.get("swagger.version"); if (s != null) { swaggerConfig.setVersion(s); } s = (String) config.get("base.path"); if (s != null) { swaggerConfig.setBasePath(s); } s = (String) config.get("host"); if (s != null) { swaggerConfig.setHost(s); } s = (String) config.get("cors"); if (s != null) { cors = "true".equalsIgnoreCase(s); } s = (String) config.get("schemes"); if (s == null) { // deprecated due typo s = (String) config.get("schemas"); } if (s != null) { String[] schemes = s.split(","); swaggerConfig.setSchemes(schemes); } else { // assume http by default swaggerConfig.setSchemes(new String[]{"http"}); } String version = (String) config.get("api.version"); String title = (String) config.get("api.title"); String description = (String) config.get("api.description"); String termsOfService = (String) config.get("api.termsOfService"); String licenseName = (String) config.get("api.license.name"); String licenseUrl = (String) config.get("api.license.url"); String contactName = (String) config.get("api.contact.name"); String contactUrl = (String) config.get("api.contact.url"); String contactEmail = (String) config.get("api.contact.email"); Info info = new Info(); info.setVersion(version); info.setTitle(title); info.setDescription(description); info.setTermsOfService(termsOfService); if (licenseName != null || licenseUrl != null) { License license = new License(); license.setName(licenseName); license.setUrl(licenseUrl); info.setLicense(license); } if (contactName != null || contactUrl != null || contactEmail != null) { Contact contact = new Contact(); contact.setName(contactName); contact.setUrl(contactUrl); contact.setEmail(contactEmail); info.setContact(contact); } swaggerConfig.setInfo(info); } public List<RestDefinition> getRestDefinitions(String camelId) throws Exception { ObjectName found = null; MBeanServer server = ManagementFactory.getPlatformMBeanServer(); Set<ObjectName> names = server.queryNames(new ObjectName("org.apache.camel:type=context,*"), null); for (ObjectName on : names) { String id = on.getKeyProperty("name"); if (id.startsWith("\"") && id.endsWith("\"")) { id = id.substring(1, id.length() - 1); } if (camelId == null || camelId.equals(id)) { // filter out older Camel versions as this requires Camel 2.15 or better (rest-dsl) String version = (String) server.getAttribute(on, "CamelVersion"); if (CamelVersionHelper.isGE("2.15.0", version)) { found = on; } } } if (found != null) { String xml = (String) server.invoke(found, "dumpRestsAsXml", null, null); if (xml != null) { RestsDefinition rests = ModelHelper.createModelFromXml(null, xml, RestsDefinition.class); if (rests != null) { return rests.getRests(); } } } return null; } public List<String> findCamelContexts() throws Exception { List<String> answer = new ArrayList<>(); MBeanServer server = ManagementFactory.getPlatformMBeanServer(); Set<ObjectName> names = server.queryNames(new ObjectName("*:type=context,*"), null); for (ObjectName on : names) { String id = on.getKeyProperty("name"); if (id.startsWith("\"") && id.endsWith("\"")) { id = id.substring(1, id.length() - 1); } // filter out older Camel versions as this requires Camel 2.15 or better (rest-dsl) String version = (String) server.getAttribute(on, "CamelVersion"); if (CamelVersionHelper.isGE("2.15.0", version)) { answer.add(id); } } return answer; } public void renderResourceListing(RestApiResponseAdapter response, BeanConfig swaggerConfig, String contextId, String route, ClassResolver classResolver) throws Exception { LOG.trace("renderResourceListing"); if (cors) { response.setHeader("Access-Control-Allow-Headers", "Origin, Accept, X-Requested-With, Content-Type, Access-Control-Request-Method, Access-Control-Request-Headers"); response.setHeader("Access-Control-Allow-Methods", "GET, HEAD, POST, PUT, DELETE, TRACE, OPTIONS, CONNECT, PATCH"); response.setHeader("Access-Control-Allow-Origin", "*"); } List<RestDefinition> rests = getRestDefinitions(contextId); if (rests != null) { response.setHeader(Exchange.CONTENT_TYPE, "application/json"); // read the rest-dsl into swagger model Swagger swagger = reader.read(rests, route, swaggerConfig, contextId, classResolver); ObjectMapper mapper = new ObjectMapper(); mapper.enable(SerializationFeature.INDENT_OUTPUT); mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); byte[] bytes = mapper.writeValueAsBytes(swagger); int len = bytes.length; response.setHeader(Exchange.CONTENT_LENGTH, "" + len); response.writeBytes(bytes); } else { response.noContent(); } } /** * Renders a list of available CamelContexts in the JVM */ public void renderCamelContexts(RestApiResponseAdapter response, String contextId, String contextIdPattern) throws Exception { LOG.trace("renderCamelContexts"); if (cors) { response.setHeader("Access-Control-Allow-Headers", "Origin, Accept, X-Requested-With, Content-Type, Access-Control-Request-Method, Access-Control-Request-Headers"); response.setHeader("Access-Control-Allow-Methods", "GET, HEAD, POST, PUT, DELETE, TRACE, OPTIONS, CONNECT, PATCH"); response.setHeader("Access-Control-Allow-Origin", "*"); } response.setHeader(Exchange.CONTENT_TYPE, "application/json"); StringBuffer sb = new StringBuffer(); List<String> contexts = findCamelContexts(); // filter non matched CamelContext's if (contextIdPattern != null) { Iterator<String> it = contexts.iterator(); while (it.hasNext()) { String name = it.next(); boolean match; if ("#name#".equals(contextIdPattern)) { match = name.equals(contextId); } else { match = EndpointHelper.matchPattern(name, contextIdPattern); } if (!match) { it.remove(); } } } sb.append("[\n"); for (int i = 0; i < contexts.size(); i++) { String name = contexts.get(i); sb.append("{\"name\": \"").append(name).append("\"}"); if (i < contexts.size() - 1) { sb.append(",\n"); } } sb.append("\n]"); int len = sb.length(); response.setHeader(Exchange.CONTENT_LENGTH, "" + len); response.writeBytes(sb.toString().getBytes()); } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.appengine.v1.stub; import static com.google.appengine.v1.DomainMappingsClient.ListDomainMappingsPagedResponse; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcStubCallableFactory; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.UnaryCallable; import com.google.appengine.v1.CreateDomainMappingRequest; import com.google.appengine.v1.DeleteDomainMappingRequest; import com.google.appengine.v1.DomainMapping; import com.google.appengine.v1.GetDomainMappingRequest; import com.google.appengine.v1.ListDomainMappingsRequest; import com.google.appengine.v1.ListDomainMappingsResponse; import com.google.appengine.v1.OperationMetadataV1; import com.google.appengine.v1.UpdateDomainMappingRequest; import com.google.common.collect.ImmutableMap; import com.google.longrunning.Operation; import com.google.longrunning.stub.GrpcOperationsStub; import com.google.protobuf.Empty; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * gRPC stub implementation for the DomainMappings service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator-java") public class GrpcDomainMappingsStub extends DomainMappingsStub { private static final MethodDescriptor<ListDomainMappingsRequest, ListDomainMappingsResponse> listDomainMappingsMethodDescriptor = MethodDescriptor.<ListDomainMappingsRequest, ListDomainMappingsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.appengine.v1.DomainMappings/ListDomainMappings") .setRequestMarshaller( ProtoUtils.marshaller(ListDomainMappingsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(ListDomainMappingsResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<GetDomainMappingRequest, DomainMapping> getDomainMappingMethodDescriptor = MethodDescriptor.<GetDomainMappingRequest, DomainMapping>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.appengine.v1.DomainMappings/GetDomainMapping") .setRequestMarshaller( ProtoUtils.marshaller(GetDomainMappingRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(DomainMapping.getDefaultInstance())) .build(); private static final MethodDescriptor<CreateDomainMappingRequest, Operation> createDomainMappingMethodDescriptor = MethodDescriptor.<CreateDomainMappingRequest, Operation>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.appengine.v1.DomainMappings/CreateDomainMapping") .setRequestMarshaller( ProtoUtils.marshaller(CreateDomainMappingRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .build(); private static final MethodDescriptor<UpdateDomainMappingRequest, Operation> updateDomainMappingMethodDescriptor = MethodDescriptor.<UpdateDomainMappingRequest, Operation>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.appengine.v1.DomainMappings/UpdateDomainMapping") .setRequestMarshaller( ProtoUtils.marshaller(UpdateDomainMappingRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .build(); private static final MethodDescriptor<DeleteDomainMappingRequest, Operation> deleteDomainMappingMethodDescriptor = MethodDescriptor.<DeleteDomainMappingRequest, Operation>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.appengine.v1.DomainMappings/DeleteDomainMapping") .setRequestMarshaller( ProtoUtils.marshaller(DeleteDomainMappingRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .build(); private final UnaryCallable<ListDomainMappingsRequest, ListDomainMappingsResponse> listDomainMappingsCallable; private final UnaryCallable<ListDomainMappingsRequest, ListDomainMappingsPagedResponse> listDomainMappingsPagedCallable; private final UnaryCallable<GetDomainMappingRequest, DomainMapping> getDomainMappingCallable; private final UnaryCallable<CreateDomainMappingRequest, Operation> createDomainMappingCallable; private final OperationCallable<CreateDomainMappingRequest, DomainMapping, OperationMetadataV1> createDomainMappingOperationCallable; private final UnaryCallable<UpdateDomainMappingRequest, Operation> updateDomainMappingCallable; private final OperationCallable<UpdateDomainMappingRequest, DomainMapping, OperationMetadataV1> updateDomainMappingOperationCallable; private final UnaryCallable<DeleteDomainMappingRequest, Operation> deleteDomainMappingCallable; private final OperationCallable<DeleteDomainMappingRequest, Empty, OperationMetadataV1> deleteDomainMappingOperationCallable; private final BackgroundResource backgroundResources; private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcDomainMappingsStub create(DomainMappingsStubSettings settings) throws IOException { return new GrpcDomainMappingsStub(settings, ClientContext.create(settings)); } public static final GrpcDomainMappingsStub create(ClientContext clientContext) throws IOException { return new GrpcDomainMappingsStub( DomainMappingsStubSettings.newBuilder().build(), clientContext); } public static final GrpcDomainMappingsStub create( ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { return new GrpcDomainMappingsStub( DomainMappingsStubSettings.newBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of GrpcDomainMappingsStub, using the given settings. This is protected * so that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected GrpcDomainMappingsStub(DomainMappingsStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new GrpcDomainMappingsCallableFactory()); } /** * Constructs an instance of GrpcDomainMappingsStub, using the given settings. This is protected * so that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected GrpcDomainMappingsStub( DomainMappingsStubSettings settings, ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); GrpcCallSettings<ListDomainMappingsRequest, ListDomainMappingsResponse> listDomainMappingsTransportSettings = GrpcCallSettings.<ListDomainMappingsRequest, ListDomainMappingsResponse>newBuilder() .setMethodDescriptor(listDomainMappingsMethodDescriptor) .setParamsExtractor( request -> { ImmutableMap.Builder<String, String> params = ImmutableMap.builder(); params.put("parent", String.valueOf(request.getParent())); return params.build(); }) .build(); GrpcCallSettings<GetDomainMappingRequest, DomainMapping> getDomainMappingTransportSettings = GrpcCallSettings.<GetDomainMappingRequest, DomainMapping>newBuilder() .setMethodDescriptor(getDomainMappingMethodDescriptor) .setParamsExtractor( request -> { ImmutableMap.Builder<String, String> params = ImmutableMap.builder(); params.put("name", String.valueOf(request.getName())); return params.build(); }) .build(); GrpcCallSettings<CreateDomainMappingRequest, Operation> createDomainMappingTransportSettings = GrpcCallSettings.<CreateDomainMappingRequest, Operation>newBuilder() .setMethodDescriptor(createDomainMappingMethodDescriptor) .setParamsExtractor( request -> { ImmutableMap.Builder<String, String> params = ImmutableMap.builder(); params.put("parent", String.valueOf(request.getParent())); return params.build(); }) .build(); GrpcCallSettings<UpdateDomainMappingRequest, Operation> updateDomainMappingTransportSettings = GrpcCallSettings.<UpdateDomainMappingRequest, Operation>newBuilder() .setMethodDescriptor(updateDomainMappingMethodDescriptor) .setParamsExtractor( request -> { ImmutableMap.Builder<String, String> params = ImmutableMap.builder(); params.put("name", String.valueOf(request.getName())); return params.build(); }) .build(); GrpcCallSettings<DeleteDomainMappingRequest, Operation> deleteDomainMappingTransportSettings = GrpcCallSettings.<DeleteDomainMappingRequest, Operation>newBuilder() .setMethodDescriptor(deleteDomainMappingMethodDescriptor) .setParamsExtractor( request -> { ImmutableMap.Builder<String, String> params = ImmutableMap.builder(); params.put("name", String.valueOf(request.getName())); return params.build(); }) .build(); this.listDomainMappingsCallable = callableFactory.createUnaryCallable( listDomainMappingsTransportSettings, settings.listDomainMappingsSettings(), clientContext); this.listDomainMappingsPagedCallable = callableFactory.createPagedCallable( listDomainMappingsTransportSettings, settings.listDomainMappingsSettings(), clientContext); this.getDomainMappingCallable = callableFactory.createUnaryCallable( getDomainMappingTransportSettings, settings.getDomainMappingSettings(), clientContext); this.createDomainMappingCallable = callableFactory.createUnaryCallable( createDomainMappingTransportSettings, settings.createDomainMappingSettings(), clientContext); this.createDomainMappingOperationCallable = callableFactory.createOperationCallable( createDomainMappingTransportSettings, settings.createDomainMappingOperationSettings(), clientContext, operationsStub); this.updateDomainMappingCallable = callableFactory.createUnaryCallable( updateDomainMappingTransportSettings, settings.updateDomainMappingSettings(), clientContext); this.updateDomainMappingOperationCallable = callableFactory.createOperationCallable( updateDomainMappingTransportSettings, settings.updateDomainMappingOperationSettings(), clientContext, operationsStub); this.deleteDomainMappingCallable = callableFactory.createUnaryCallable( deleteDomainMappingTransportSettings, settings.deleteDomainMappingSettings(), clientContext); this.deleteDomainMappingOperationCallable = callableFactory.createOperationCallable( deleteDomainMappingTransportSettings, settings.deleteDomainMappingOperationSettings(), clientContext, operationsStub); this.backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } public GrpcOperationsStub getOperationsStub() { return operationsStub; } @Override public UnaryCallable<ListDomainMappingsRequest, ListDomainMappingsResponse> listDomainMappingsCallable() { return listDomainMappingsCallable; } @Override public UnaryCallable<ListDomainMappingsRequest, ListDomainMappingsPagedResponse> listDomainMappingsPagedCallable() { return listDomainMappingsPagedCallable; } @Override public UnaryCallable<GetDomainMappingRequest, DomainMapping> getDomainMappingCallable() { return getDomainMappingCallable; } @Override public UnaryCallable<CreateDomainMappingRequest, Operation> createDomainMappingCallable() { return createDomainMappingCallable; } @Override public OperationCallable<CreateDomainMappingRequest, DomainMapping, OperationMetadataV1> createDomainMappingOperationCallable() { return createDomainMappingOperationCallable; } @Override public UnaryCallable<UpdateDomainMappingRequest, Operation> updateDomainMappingCallable() { return updateDomainMappingCallable; } @Override public OperationCallable<UpdateDomainMappingRequest, DomainMapping, OperationMetadataV1> updateDomainMappingOperationCallable() { return updateDomainMappingOperationCallable; } @Override public UnaryCallable<DeleteDomainMappingRequest, Operation> deleteDomainMappingCallable() { return deleteDomainMappingCallable; } @Override public OperationCallable<DeleteDomainMappingRequest, Empty, OperationMetadataV1> deleteDomainMappingOperationCallable() { return deleteDomainMappingOperationCallable; } @Override public final void close() { try { backgroundResources.close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException("Failed to close resource", e); } } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package mf.org.apache.xerces.impl.xs.traversers; import mf.org.apache.xerces.impl.xs.SchemaGrammar; import mf.org.apache.xerces.impl.xs.SchemaSymbols; import mf.org.apache.xerces.impl.xs.XSAnnotationImpl; import mf.org.apache.xerces.impl.xs.XSConstraints; import mf.org.apache.xerces.impl.xs.XSGroupDecl; import mf.org.apache.xerces.impl.xs.XSModelGroupImpl; import mf.org.apache.xerces.impl.xs.XSParticleDecl; import mf.org.apache.xerces.impl.xs.util.XInt; import mf.org.apache.xerces.impl.xs.util.XSObjectListImpl; import mf.org.apache.xerces.util.DOMUtil; import mf.org.apache.xerces.util.XMLSymbols; import mf.org.apache.xerces.xni.QName; import mf.org.apache.xerces.xs.XSObjectList; import mf.org.w3c.dom.Element; /** * The model group schema component traverser. * * <group * name = NCName> * Content: (annotation?, (all | choice | sequence)) * </group> * * @xerces.internal * * @author Rahul Srivastava, Sun Microsystems Inc. * @author Elena Litani, IBM * @author Lisa Martin, IBM * @version $Id: XSDGroupTraverser.java 819653 2009-09-28 17:29:56Z knoaman $ */ class XSDGroupTraverser extends XSDAbstractParticleTraverser { XSDGroupTraverser (XSDHandler handler, XSAttributeChecker gAttrCheck) { super(handler, gAttrCheck); } XSParticleDecl traverseLocal(Element elmNode, XSDocumentInfo schemaDoc, SchemaGrammar grammar) { // General Attribute Checking for elmNode declared locally Object[] attrValues = fAttrChecker.checkAttributes(elmNode, false, schemaDoc); QName refAttr = (QName) attrValues[XSAttributeChecker.ATTIDX_REF]; XInt minAttr = (XInt) attrValues[XSAttributeChecker.ATTIDX_MINOCCURS]; XInt maxAttr = (XInt) attrValues[XSAttributeChecker.ATTIDX_MAXOCCURS]; XSGroupDecl group = null; // ref should be here. if (refAttr == null) { reportSchemaError("s4s-att-must-appear", new Object[]{"group (local)", "ref"}, elmNode); } else { // get global decl // index is a particle index. group = (XSGroupDecl)fSchemaHandler.getGlobalDecl(schemaDoc, XSDHandler.GROUP_TYPE, refAttr, elmNode); } XSAnnotationImpl annotation = null; // no children other than "annotation?" are allowed Element child = DOMUtil.getFirstChildElement(elmNode); if (child != null && DOMUtil.getLocalName(child).equals(SchemaSymbols.ELT_ANNOTATION)) { annotation = traverseAnnotationDecl(child, attrValues, false, schemaDoc); child = DOMUtil.getNextSiblingElement(child); } else { String text = DOMUtil.getSyntheticAnnotation(elmNode); if (text != null) { annotation = traverseSyntheticAnnotation(elmNode, text, attrValues, false, schemaDoc); } } if (child != null) { reportSchemaError("s4s-elt-must-match.1", new Object[]{"group (local)", "(annotation?)", DOMUtil.getLocalName(elmNode)}, elmNode); } int minOccurs = minAttr.intValue(); int maxOccurs = maxAttr.intValue(); XSParticleDecl particle = null; // not empty group, not empty particle if (group != null && group.fModelGroup != null && !(minOccurs == 0 && maxOccurs == 0)) { // create a particle to contain this model group if (fSchemaHandler.fDeclPool != null) { particle = fSchemaHandler.fDeclPool.getParticleDecl(); } else { particle = new XSParticleDecl(); } particle.fType = XSParticleDecl.PARTICLE_MODELGROUP; particle.fValue = group.fModelGroup; particle.fMinOccurs = minOccurs; particle.fMaxOccurs = maxOccurs; if (group.fModelGroup.fCompositor == XSModelGroupImpl.MODELGROUP_ALL) { Long defaultVals = (Long)attrValues[XSAttributeChecker.ATTIDX_FROMDEFAULT]; particle = checkOccurrences(particle, SchemaSymbols.ELT_GROUP, (Element)elmNode.getParentNode(), GROUP_REF_WITH_ALL, defaultVals.longValue()); } if (refAttr != null) { XSObjectList annotations; if (annotation != null) { annotations = new XSObjectListImpl(); ((XSObjectListImpl) annotations).addXSObject(annotation); } else { annotations = XSObjectListImpl.EMPTY_LIST; } particle.fAnnotations = annotations; } else { particle.fAnnotations = group.fAnnotations; } } fAttrChecker.returnAttrArray(attrValues, schemaDoc); return particle; } // traverseLocal XSGroupDecl traverseGlobal(Element elmNode, XSDocumentInfo schemaDoc, SchemaGrammar grammar) { // General Attribute Checking for elmNode declared globally Object[] attrValues = fAttrChecker.checkAttributes(elmNode, true, schemaDoc); String strNameAttr = (String) attrValues[XSAttributeChecker.ATTIDX_NAME]; // must have a name if (strNameAttr == null) { reportSchemaError("s4s-att-must-appear", new Object[]{"group (global)", "name"}, elmNode); } // Create the group defi up-front, so it can be passed // to the traversal methods XSGroupDecl group = new XSGroupDecl(); XSParticleDecl particle = null; // must have at least one child Element l_elmChild = DOMUtil.getFirstChildElement(elmNode); XSAnnotationImpl annotation = null; if (l_elmChild == null) { reportSchemaError("s4s-elt-must-match.2", new Object[]{"group (global)", "(annotation?, (all | choice | sequence))"}, elmNode); } else { String childName = l_elmChild.getLocalName(); if (childName.equals(SchemaSymbols.ELT_ANNOTATION)) { annotation = traverseAnnotationDecl(l_elmChild, attrValues, true, schemaDoc); l_elmChild = DOMUtil.getNextSiblingElement(l_elmChild); if (l_elmChild != null) childName = l_elmChild.getLocalName(); } else { String text = DOMUtil.getSyntheticAnnotation(elmNode); if (text != null) { annotation = traverseSyntheticAnnotation(elmNode, text, attrValues, false, schemaDoc); } } if (l_elmChild == null) { reportSchemaError("s4s-elt-must-match.2", new Object[]{"group (global)", "(annotation?, (all | choice | sequence))"}, elmNode); } else if (childName.equals(SchemaSymbols.ELT_ALL)) { particle = traverseAll(l_elmChild, schemaDoc, grammar, CHILD_OF_GROUP, group); } else if (childName.equals(SchemaSymbols.ELT_CHOICE)) { particle = traverseChoice(l_elmChild, schemaDoc, grammar, CHILD_OF_GROUP, group); } else if (childName.equals(SchemaSymbols.ELT_SEQUENCE)) { particle = traverseSequence(l_elmChild, schemaDoc, grammar, CHILD_OF_GROUP, group); } else { reportSchemaError("s4s-elt-must-match.1", new Object[]{"group (global)", "(annotation?, (all | choice | sequence))", DOMUtil.getLocalName(l_elmChild)}, l_elmChild); } if (l_elmChild != null && DOMUtil.getNextSiblingElement(l_elmChild) != null) { reportSchemaError("s4s-elt-must-match.1", new Object[]{"group (global)", "(annotation?, (all | choice | sequence))", DOMUtil.getLocalName(DOMUtil.getNextSiblingElement(l_elmChild))}, DOMUtil.getNextSiblingElement(l_elmChild)); } } // add global group declaration to the grammar if (strNameAttr != null) { group.fName = strNameAttr; group.fTargetNamespace = schemaDoc.fTargetNamespace; if (particle == null) { particle = XSConstraints.getEmptySequence(); } group.fModelGroup = (XSModelGroupImpl)particle.fValue; XSObjectList annotations; if (annotation != null) { annotations = new XSObjectListImpl(); ((XSObjectListImpl) annotations).addXSObject(annotation); } else { annotations = XSObjectListImpl.EMPTY_LIST; } group.fAnnotations = annotations; // Add group declaration to grammar if (grammar.getGlobalGroupDecl(group.fName) == null) { grammar.addGlobalGroupDecl(group); } // also add it to extended map final String loc = fSchemaHandler.schemaDocument2SystemId(schemaDoc); final XSGroupDecl group2 = grammar.getGlobalGroupDecl(group.fName, loc); if (group2 == null) { grammar.addGlobalGroupDecl(group, loc); } // handle duplicates if (fSchemaHandler.fTolerateDuplicates) { if (group2 != null) { group = group2; } fSchemaHandler.addGlobalGroupDecl(group); } } else { // name attribute is not there, don't return this group. group = null; } if (group != null) { // store groups redefined by restriction in the grammar so // that we can get at them at full-schema-checking time. Object redefinedGrp = fSchemaHandler.getGrpOrAttrGrpRedefinedByRestriction(XSDHandler.GROUP_TYPE, new QName(XMLSymbols.EMPTY_STRING, strNameAttr, strNameAttr, schemaDoc.fTargetNamespace), schemaDoc, elmNode); if (redefinedGrp != null) { // store in grammar grammar.addRedefinedGroupDecl(group, (XSGroupDecl)redefinedGrp, fSchemaHandler.element2Locator(elmNode)); } } fAttrChecker.returnAttrArray(attrValues, schemaDoc); return group; } // traverseGlobal }
/* * This class implements a single compiler pass. The pass is executed * after the promising loop fragments have been marked. The goal for * this compiler pass is to extract the set of input variables for * all of the marked code fragments. * * Input variables are variables that were declared outside of the loop * body, but were read within the loop. They are thus inputs to the * loop code fragment. * * - Maaz */ package casper.visit; import java.util.ArrayList; import java.util.List; import casper.JavaLibModel; import casper.ast.JavaExt; import casper.extension.MyWhileExt; import casper.types.Variable; import polyglot.ast.ArrayAccess; import polyglot.ast.Assign; import polyglot.ast.Binary; import polyglot.ast.Call; import polyglot.ast.Cast; import polyglot.ast.Expr; import polyglot.ast.Field; import polyglot.ast.If; import polyglot.ast.Lit; import polyglot.ast.Local; import polyglot.ast.LocalDecl; import polyglot.ast.Node; import polyglot.ast.Receiver; import polyglot.ast.Return; import polyglot.ast.Switch; import polyglot.ast.Unary; import polyglot.ast.While; import polyglot.ext.jl5.ast.ExtendedFor; import polyglot.visit.NodeVisitor; public class ExtractInputVariables extends NodeVisitor { boolean debug; boolean ignore; ArrayList<MyWhileExt> extensions; @SuppressWarnings("deprecation") public ExtractInputVariables(){ this.debug = false; this.extensions = new ArrayList<MyWhileExt>(); } private void extractReadsFromExpr(Expr exp){ if(exp == null){ return; } else if(exp instanceof Local){ // If expression is a local variable for(MyWhileExt ext : extensions){ ext.saveInputVariable(exp.toString(), exp.type().toString(),Variable.VAR); } } else if(exp instanceof Unary){ // If expression is a unary operation Expr operand = ((Unary) exp).expr(); if(operand instanceof Local){ // If operand is a variable for(MyWhileExt ext : extensions){ ext.saveInputVariable(operand.toString(),operand.type().toString(),Variable.VAR); } } for(MyWhileExt ext : extensions) ext.saveExpression(exp.toString(), exp.type().toString()); } else if(exp instanceof Binary){ // If expression is a binary expression Expr operandLeft = ((Binary) exp).left(); Expr operandRight = ((Binary) exp).right(); if(operandLeft instanceof Local){ // If operand is a variable for(MyWhileExt ext : extensions){ ext.saveInputVariable(operandLeft.toString(),operandLeft.type().toString(),Variable.VAR); } } if(operandRight instanceof Local){ // If operand is a variable for(MyWhileExt ext : extensions){ ext.saveInputVariable(operandRight.toString(),operandRight.type().toString(),Variable.VAR); } } // For now, not saving unary or binary expressions - but their components instead for(MyWhileExt ext : extensions) ext.saveExpression(exp.toString(), exp.type().toString()); } else if(exp instanceof Call){ // If expression is a user defined function call // TODO // Else if the expression is a library function call for(MyWhileExt ext : extensions){ List<Node> reads = JavaLibModel.extractReads((Call)exp,ext); for(Node node : reads){ if(node instanceof Receiver && !(node instanceof Lit) ){ ext.saveInputVariable(node.toString(),((Receiver)node).type().toString(),Variable.FIELD_ACCESS); } else if(node instanceof Expr){ extractReadsFromExpr((Expr)node); } } } } else if(exp instanceof Field){ // If right hand side is a field load for(MyWhileExt ext : extensions){ ext.saveInputVariable(exp.toString(), exp.type().toString(), ((Field) exp).target().type().toString(),Variable.FIELD_ACCESS); } } else if(exp instanceof ArrayAccess){ // If expression is an array access for(MyWhileExt ext : extensions){ Expr index = ((ArrayAccess)exp).index(); // Very naive. Should scan previous code. Locals may be holding constant values. if(index instanceof Local){ // Save the array ext.saveInputVariable(((ArrayAccess)exp).array().toString(), ((ArrayAccess)exp).array().type().toString(),Variable.ARRAY_ACCESS); // Save the index (unless it is a constant) ext.saveInputVariable(index.toString(), index.type().toString(),Variable.VAR); } else if(index instanceof Lit){ // Save the array ext.saveInputVariable(((ArrayAccess)exp).array().toString(), ((ArrayAccess)exp).array().type().toString(),Variable.CONST_ARRAY_ACCESS); } } } else if(exp instanceof Cast){ // If expression is being casted extractReadsFromExpr(((Cast) exp).expr()); } else if(exp instanceof Lit){ // Ignore } else{ if(debug){ // Something weird happened System.err.print("NOT SURE! "); System.err.print(exp.getClass().getName()); System.err.print(" : " ); System.err.println(exp.toString()); } } } public NodeVisitor enter(Node parent, Node n){ // If the node is a loop if(n instanceof While){ // If the loop was marked as interesting if(((MyWhileExt)JavaExt.ext(n)).interesting){ // begin extraction this.extensions.add((MyWhileExt)JavaExt.ext(n)); } } if(n instanceof ExtendedFor){ // If the loop was marked as interesting if(((MyWhileExt)JavaExt.ext(n)).interesting){ // begin extraction MyWhileExt ext = (MyWhileExt)JavaExt.ext(n); this.extensions.add(ext); ext.saveInputVariable(((ExtendedFor) n).expr().toString(), ((ExtendedFor) n).expr().type().toString(), Variable.ARRAY_ACCESS); } } // If we are not extracting, then do nothing if(this.extensions.size() == 0) return this; if(n instanceof Assign){ // Assignment statement extractReadsFromExpr((((Assign) n).right())); } else if(n instanceof If){ // If statement extractReadsFromExpr(((If) n).cond()); } else if(n instanceof While){ // While statement extractReadsFromExpr(((While) n).cond()); } else if(n instanceof Switch){ // Switch statement extractReadsFromExpr(((Switch) n).expr()); } else if(n instanceof LocalDecl){ // Local declaration statement extractReadsFromExpr(((LocalDecl) n).init()); // Save local variable in each ext for(MyWhileExt ext : extensions) ext.saveLocalVariable(((LocalDecl) n).id().toString(), ((LocalDecl) n).type().toString()); } else if(n instanceof Call){ // If expression is a user defined function call // TODO // Else if the expression is a library function call for(MyWhileExt ext : extensions){ List<Node> reads = JavaLibModel.extractReads((Call)n,ext); for(Node node : reads){ if(node instanceof Receiver && !(node instanceof Lit) ){ ext.saveInputVariable(node.toString(),((Receiver)node).type().toString(),Variable.FIELD_ACCESS); } else if(node instanceof Expr){ extractReadsFromExpr((Expr)node); } } } } else if(n instanceof Return){ // Return statement extractReadsFromExpr(((Return) n).expr()); } return this; } @Override public Node leave(Node old, Node n, NodeVisitor v){ // If the node is a loop if(n instanceof While || n instanceof ExtendedFor){ // If the loop was marked as interesting if(((MyWhileExt)JavaExt.ext(n)).interesting){ ((MyWhileExt)JavaExt.ext(n)).savePendingInputVariables(); if(debug){ System.err.println("Input:\n"+((MyWhileExt)JavaExt.ext(n)).inputVars.toString()); System.err.println("Local:\n"+((MyWhileExt)JavaExt.ext(n)).localVars.toString()); System.err.println("Expressions:\n"+((MyWhileExt)JavaExt.ext(n)).expUsed.toString()); } this.extensions.remove(((MyWhileExt)JavaExt.ext(n))); } } return n; } @Override public void finish(){ if(debug) System.err.println("\n************* Finished input var extraction complier pass *************"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.functions.aggfunctions; import org.apache.flink.table.api.DataTypes; import org.apache.flink.table.data.DecimalData; import org.apache.flink.table.data.DecimalDataUtils; import org.apache.flink.table.data.RowData; import org.apache.flink.table.data.StringData; import org.apache.flink.table.functions.AggregateFunction; import org.apache.flink.table.types.logical.BigIntType; import org.apache.flink.table.types.logical.BooleanType; import org.apache.flink.table.types.logical.DecimalType; import org.apache.flink.table.types.logical.DoubleType; import org.apache.flink.table.types.logical.FloatType; import org.apache.flink.table.types.logical.IntType; import org.apache.flink.table.types.logical.TinyIntType; import org.apache.flink.table.types.logical.VarCharType; import org.apache.flink.testutils.serialization.types.ShortType; import org.junit.experimental.runners.Enclosed; import org.junit.runner.RunWith; import java.util.Arrays; import java.util.List; /** * Test case for built-in LAST_VALUE aggregate function. * This class tests `accumulate` method without order argument. */ @RunWith(Enclosed.class) public final class LastValueAggFunctionWithoutOrderTest { // -------------------------------------------------------------------------------------------- // Test sets for a particular type being aggregated // // Actual tests are implemented in: // - AggFunctionTestBase // -------------------------------------------------------------------------------------------- /** * Test for {@link TinyIntType}. */ public static final class ByteLastValueAggFunctionWithoutOrderTest extends NumberLastValueAggFunctionWithoutOrderTestBase<Byte> { @Override protected Byte getValue(String v) { return Byte.valueOf(v); } @Override protected AggregateFunction<Byte, RowData> getAggregator() { return new LastValueAggFunction<>(DataTypes.TINYINT().getLogicalType()); } } /** * Test for {@link ShortType}. */ public static final class ShortLastValueAggFunctionWithoutOrderTest extends NumberLastValueAggFunctionWithoutOrderTestBase<Short> { @Override protected Short getValue(String v) { return Short.valueOf(v); } @Override protected AggregateFunction<Short, RowData> getAggregator() { return new LastValueAggFunction<>(DataTypes.SMALLINT().getLogicalType()); } } /** * Test for {@link IntType}. */ public static final class IntLastValueAggFunctionWithoutOrderTest extends NumberLastValueAggFunctionWithoutOrderTestBase<Integer> { @Override protected Integer getValue(String v) { return Integer.valueOf(v); } @Override protected AggregateFunction<Integer, RowData> getAggregator() { return new LastValueAggFunction<>(DataTypes.INT().getLogicalType()); } } /** * Test for {@link BigIntType}. */ public static final class LongLastValueAggFunctionWithoutOrderTest extends NumberLastValueAggFunctionWithoutOrderTestBase<Long> { @Override protected Long getValue(String v) { return Long.valueOf(v); } @Override protected AggregateFunction<Long, RowData> getAggregator() { return new LastValueAggFunction<>(DataTypes.BIGINT().getLogicalType()); } } /** * Test for {@link FloatType}. */ public static final class FloatLastValueAggFunctionWithoutOrderTest extends NumberLastValueAggFunctionWithoutOrderTestBase<Float> { @Override protected Float getValue(String v) { return Float.valueOf(v); } @Override protected AggregateFunction<Float, RowData> getAggregator() { return new LastValueAggFunction<>(DataTypes.FLOAT().getLogicalType()); } } /** * Test for {@link DoubleType}. */ public static final class DoubleLastValueAggFunctionWithoutOrderTest extends NumberLastValueAggFunctionWithoutOrderTestBase<Double> { @Override protected Double getValue(String v) { return Double.valueOf(v); } @Override protected AggregateFunction<Double, RowData> getAggregator() { return new LastValueAggFunction<>(DataTypes.DOUBLE().getLogicalType()); } } /** * Test for {@link BooleanType}. */ public static final class BooleanLastValueAggFunctionWithoutOrderTest extends LastValueAggFunctionWithoutOrderTestBase<Boolean> { @Override protected List<List<Boolean>> getInputValueSets() { return Arrays.asList( Arrays.asList( false, false, false ), Arrays.asList( true, true, true ), Arrays.asList( true, false, null, true, false, true, null ), Arrays.asList( null, null, null ), Arrays.asList( null, true )); } @Override protected List<Boolean> getExpectedResults() { return Arrays.asList( false, true, true, null, true ); } @Override protected AggregateFunction<Boolean, RowData> getAggregator() { return new LastValueAggFunction<>(DataTypes.BOOLEAN().getLogicalType()); } } /** * Test for {@link DecimalType}. */ public static final class DecimalLastValueAggFunctionWithoutOrderTest extends LastValueAggFunctionWithoutOrderTestBase<DecimalData> { private int precision = 20; private int scale = 6; @Override protected List<List<DecimalData>> getInputValueSets() { return Arrays.asList( Arrays.asList( DecimalDataUtils.castFrom("1", precision, scale), DecimalDataUtils.castFrom("1000.000001", precision, scale), DecimalDataUtils.castFrom("-1", precision, scale), DecimalDataUtils.castFrom("-999.998999", precision, scale), null, DecimalDataUtils.castFrom("0", precision, scale), DecimalDataUtils.castFrom("-999.999", precision, scale), null, DecimalDataUtils.castFrom("999.999", precision, scale) ), Arrays.asList( null, null, null, null, null ), Arrays.asList( null, DecimalDataUtils.castFrom("0", precision, scale) ) ); } @Override protected List<DecimalData> getExpectedResults() { return Arrays.asList( DecimalDataUtils.castFrom("999.999", precision, scale), null, DecimalDataUtils.castFrom("0", precision, scale) ); } @Override protected AggregateFunction<DecimalData, RowData> getAggregator() { return new LastValueAggFunction<>(DataTypes.DECIMAL(precision, scale).getLogicalType()); } } /** * Test for {@link VarCharType}. */ public static final class StringLastValueAggFunctionWithoutOrderTest extends LastValueAggFunctionWithoutOrderTestBase<StringData> { @Override protected List<List<StringData>> getInputValueSets() { return Arrays.asList( Arrays.asList( StringData.fromString("abc"), StringData.fromString("def"), StringData.fromString("ghi"), null, StringData.fromString("jkl"), null, StringData.fromString("zzz") ), Arrays.asList( null, null ), Arrays.asList( null, StringData.fromString("a"), null ), Arrays.asList( StringData.fromString("x"), null, StringData.fromString("e") ) ); } @Override protected List<StringData> getExpectedResults() { return Arrays.asList( StringData.fromString("zzz"), null, StringData.fromString("a"), StringData.fromString("e") ); } @Override protected AggregateFunction<StringData, RowData> getAggregator() { return new LastValueAggFunction<>(DataTypes.STRING().getLogicalType()); } } // -------------------------------------------------------------------------------------------- // This section contain base classes that provide common inputs and declare the accumulator // class type for tests declared above. // -------------------------------------------------------------------------------------------- /** * Test base for {@link LastValueAggFunction} without order. */ public abstract static class LastValueAggFunctionWithoutOrderTestBase<T> extends AggFunctionTestBase<T, RowData> { @Override protected Class<?> getAccClass() { return RowData.class; } } /** * Test base for {@link LastValueAggFunction} with number types. */ public abstract static class NumberLastValueAggFunctionWithoutOrderTestBase<T> extends LastValueAggFunctionWithoutOrderTestBase<T> { protected abstract T getValue(String v); @Override protected List<List<T>> getInputValueSets() { return Arrays.asList( Arrays.asList( getValue("1"), null, getValue("-99"), getValue("3"), null ), Arrays.asList( null, null, null, null ), Arrays.asList( null, getValue("10"), null, getValue("3") ) ); } @Override protected List<T> getExpectedResults() { return Arrays.asList( getValue("3"), null, getValue("3") ); } } }
/* * The MIT License * * Copyright 2012 SBPrime. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.primesoft.mcpainter.configuration; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import org.primesoft.mcpainter.MCPainterMain; import org.bukkit.configuration.Configuration; import org.bukkit.configuration.ConfigurationSection; /** * This class contains configuration * * @author SBPrime */ public class ConfigProvider { private static final int CONFIG_VERSION = 4; public static final int BLOCK_SIZE = 16; private static final HashMap<String, Double> m_commandPrice = new HashMap<String, Double>(); private static SizeNode[] m_sizeEntries = new SizeNode[0]; private static SizeNode m_maxImage = new SizeNode("0x0"); private static String[] m_texturePacks; private static boolean m_checkUpdate = false; private static boolean m_isConfigUpdate = false; private static int m_queueHardLimit; private static int m_queueSoftLimit; private static File m_pluginFolder; private static File m_imgFolder; private static File m_modelFolder; private static File m_paletteFolder; private static File m_modFolder; private static File m_dataFolder; private static long m_interval; private static int m_blocksCnt; private static String m_configVersion; private static String m_defaultPalette; private static boolean m_checkAccess; private static boolean m_logBlocks; public static String getConfigVersion() { return m_configVersion; } /** * Plugin root folder * * @return */ public static File getPluginFolder() { return m_pluginFolder; } /** * Queue hard limit * * @return */ public static int getQueueHardLimit() { return m_queueHardLimit; } /** * Queue soft limit * * @return */ public static int getQueueSoftLimit() { return m_queueSoftLimit; } /** * Folder where the palettes are stored * * @return */ public static File getPaletteFolder() { return m_paletteFolder; } /** * Folder where the models are stored * * @return */ public static File getModelFolder() { return m_modelFolder; } /** * Folder where the image maps are stored * * @return */ public static File getImgFolder() { return m_imgFolder; } /** * The plugin data folder * * @return */ public static File getDataFolder() { return m_dataFolder; } /** * The default palette name * @return */ public static String getDefaultPalette() { return m_defaultPalette; } public static SizeNode[] getSizeNodes() { return m_sizeEntries; } public static SizeNode getMaxSize() { return m_maxImage; } /** * Is update checking enabled * * @return true if enabled */ public static boolean getCheckUpdate() { return m_checkUpdate; } /** * Block drawing interval * * @return the interval */ public static long getInterval() { return m_interval; } /** * Is block login enabled * * @return */ public static boolean getLogBlocks() { return m_logBlocks; } /** * Is block perms checking enabled * * @return */ public static boolean getCheckAccess() { return m_checkAccess; } /** * Get the number of blocks placed * * @return number of blocks */ public static int getBlockCount() { return m_blocksCnt; } /** * Is the configuration up to date * * @return */ public static boolean isConfigUpdated() { return m_isConfigUpdate; } /** * The mod (jar) folder */ public static File getModFolder() { return m_modFolder; } /** * All texture packs * * @return */ public static String[] getTexturePacks() { return m_texturePacks; } /** * Is any of the texture packs enabled * * @return */ public static boolean isTexturePackEnabled() { return m_texturePacks != null && m_texturePacks.length > 0; } /** * Load configuration * * @param plugin parent plugin * @return true if config loaded */ public static boolean load(MCPainterMain plugin) { if (plugin == null) { return false; } plugin.saveDefaultConfig(); Configuration config = plugin.getConfig(); m_pluginFolder = plugin.getDataFolder(); m_imgFolder = new File(m_pluginFolder, "img"); if (!m_imgFolder.exists()) { m_imgFolder.mkdir(); } m_modelFolder = new File(m_pluginFolder, "models"); if (!m_modelFolder.exists()) { m_modelFolder.mkdir(); } m_paletteFolder = new File(m_pluginFolder, "palette"); if (!m_paletteFolder.exists()) { m_paletteFolder.mkdir(); } m_dataFolder = new File(m_pluginFolder, "data"); if (!m_dataFolder.exists()) { m_dataFolder.mkdir(); } ConfigurationSection mainSection = config.getConfigurationSection("mcpainter"); if (mainSection == null) { return false; } m_configVersion = mainSection.getString("version", "?"); parseRenderSection(mainSection); parsePriceSection(mainSection); m_checkUpdate = mainSection.getBoolean("checkVersion", true); m_isConfigUpdate = mainSection.getInt("version", 1) == CONFIG_VERSION; m_maxImage = new SizeNode(mainSection.getString("maxSize", "0x0")); m_modFolder = new File(m_pluginFolder, mainSection.getString("modsFolder", "mods")); m_defaultPalette = mainSection.getString("palette", "default").toLowerCase(); m_texturePacks = parseTextures(mainSection.getStringList("texturePacks")); if (!m_modFolder.exists()) { m_modFolder.mkdir(); } m_sizeEntries = parseSizeNodeSection(mainSection); parseBlocksHubSection(mainSection.getConfigurationSection("blocksHub")); MCPainterMain.log(m_sizeEntries.length + " size nodes defined in config file."); return true; } /** * Initialize blocks hub configuration * * @param bhSection */ private static void parseBlocksHubSection(ConfigurationSection bhSection) { if (bhSection == null) { m_logBlocks = true; m_checkAccess = false; } else { m_logBlocks = bhSection.getBoolean("logBlocks", true); m_checkAccess = bhSection.getBoolean("checkAccess", false); } } /** * Parse the node size section * * @param mainSection * @return */ private static SizeNode[] parseSizeNodeSection( ConfigurationSection mainSection) { List<SizeNode> sizeEntries = new ArrayList(); for (String string : mainSection.getStringList("maxSizeNodes")) { try { sizeEntries.add(new SizeNode(string)); } catch (Exception e) { MCPainterMain.log("Error parsing config entry: " + string); } } return sizeEntries.toArray(new SizeNode[0]); } /** * Parse render section * * @param mainSection */ private static void parseRenderSection(ConfigurationSection mainSection) { ConfigurationSection renderSection = mainSection.getConfigurationSection("rendering"); if (renderSection == null) { m_blocksCnt = 1000; m_interval = 15; m_queueSoftLimit = 100000; m_queueHardLimit = 100000; } else { m_blocksCnt = renderSection.getInt("blocks", 1000); m_interval = renderSection.getInt("interval", 15); m_queueSoftLimit = renderSection.getInt("queue-limit-soft", 100000); m_queueHardLimit = renderSection.getInt("queue-limit-hard", 200000); } } /** * Parse the price section entry * * @param mainSection */ private static void parsePriceSection(ConfigurationSection mainSection) { for (String string : mainSection.getStringList("price")) { try { String[] parts = string.split(":"); if (parts.length != 2) { MCPainterMain.log("* Error parsing price entry: " + string); continue; } String command = parts[0]; double price = Double.parseDouble(parts[1]); m_commandPrice.put(command, price); } catch (Exception e) { MCPainterMain.log("* Error parsing price entry: " + string); } } } /** * Get price for command * * @param command The command * @return Command price */ public static double getCommandPrice(String command) { if (m_commandPrice.containsKey(command)) { return m_commandPrice.get(command); } return 0; } /** * Parse texture packs entrie * * @param stringList * @return */ private static String[] parseTextures(List<String> stringList) { if (stringList == null) { return new String[0]; } return stringList.toArray(new String[0]); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.Stack; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.permission.AclEntryScope; import org.apache.hadoop.fs.permission.AclEntryType; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.util.ReadOnlyList; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; /** * Class that helps in checking file system permission. * The state of this class need not be synchronized as it has data structures that * are read-only. * * Some of the helper methods are gaurded by {@link FSNamesystem#readLock()}. */ class FSPermissionChecker { static final Log LOG = LogFactory.getLog(UserGroupInformation.class); /** @return a string for throwing {@link AccessControlException} */ private String toAccessControlString(INode inode, int snapshotId, FsAction access, FsPermission mode) { return toAccessControlString(inode, snapshotId, access, mode, false); } /** @return a string for throwing {@link AccessControlException} */ private String toAccessControlString(INode inode, int snapshotId, FsAction access, FsPermission mode, boolean deniedFromAcl) { StringBuilder sb = new StringBuilder("Permission denied: ") .append("user=").append(user).append(", ") .append("access=").append(access).append(", ") .append("inode=\"").append(inode.getFullPathName()).append("\":") .append(inode.getUserName(snapshotId)).append(':') .append(inode.getGroupName(snapshotId)).append(':') .append(inode.isDirectory() ? 'd' : '-') .append(mode); if (deniedFromAcl) { sb.append("+"); } return sb.toString(); } private final String user; /** A set with group namess. Not synchronized since it is unmodifiable */ private final Set<String> groups; private final boolean isSuper; FSPermissionChecker(String fsOwner, String supergroup, UserGroupInformation callerUgi) { HashSet<String> s = new HashSet<String>(Arrays.asList(callerUgi.getGroupNames())); groups = Collections.unmodifiableSet(s); user = callerUgi.getShortUserName(); isSuper = user.equals(fsOwner) || groups.contains(supergroup); } /** * Check if the callers group contains the required values. * @param group group to check */ public boolean containsGroup(String group) {return groups.contains(group);} public String getUser() { return user; } public boolean isSuperUser() { return isSuper; } /** * Verify if the caller has the required permission. This will result into * an exception if the caller is not allowed to access the resource. */ public void checkSuperuserPrivilege() throws AccessControlException { if (!isSuper) { throw new AccessControlException("Access denied for user " + user + ". Superuser privilege is required"); } } /** * Check whether current user have permissions to access the path. * Traverse is always checked. * * Parent path means the parent directory for the path. * Ancestor path means the last (the closest) existing ancestor directory * of the path. * Note that if the parent path exists, * then the parent path and the ancestor path are the same. * * For example, suppose the path is "/foo/bar/baz". * No matter baz is a file or a directory, * the parent path is "/foo/bar". * If bar exists, then the ancestor path is also "/foo/bar". * If bar does not exist and foo exists, * then the ancestor path is "/foo". * Further, if both foo and bar do not exist, * then the ancestor path is "/". * * @param doCheckOwner Require user to be the owner of the path? * @param ancestorAccess The access required by the ancestor of the path. * @param parentAccess The access required by the parent of the path. * @param access The access required by the path. * @param subAccess If path is a directory, * it is the access required of the path and all the sub-directories. * If path is not a directory, there is no effect. * @param ignoreEmptyDir Ignore permission checking for empty directory? * @throws AccessControlException * * Guarded by {@link FSNamesystem#readLock()} * Caller of this method must hold that lock. */ void checkPermission(INodesInPath inodesInPath, boolean doCheckOwner, FsAction ancestorAccess, FsAction parentAccess, FsAction access, FsAction subAccess, boolean ignoreEmptyDir) throws AccessControlException { if (LOG.isDebugEnabled()) { LOG.debug("ACCESS CHECK: " + this + ", doCheckOwner=" + doCheckOwner + ", ancestorAccess=" + ancestorAccess + ", parentAccess=" + parentAccess + ", access=" + access + ", subAccess=" + subAccess + ", ignoreEmptyDir=" + ignoreEmptyDir); } // check if (parentAccess != null) && file exists, then check sb // If resolveLink, the check is performed on the link target. final int snapshotId = inodesInPath.getPathSnapshotId(); final int length = inodesInPath.length(); final INode last = length > 0 ? inodesInPath.getLastINode() : null; final INode parent = length > 1 ? inodesInPath.getINode(-2) : null; checkTraverse(inodesInPath, snapshotId); if (parentAccess != null && parentAccess.implies(FsAction.WRITE) && length > 1 && last != null) { checkStickyBit(parent, last, snapshotId); } if (ancestorAccess != null && length > 1) { List<INode> inodes = inodesInPath.getReadOnlyINodes(); INode ancestor = null; for (int i = inodes.size() - 2; i >= 0 && (ancestor = inodes.get(i)) == null; i--); check(ancestor, snapshotId, ancestorAccess); } if (parentAccess != null && length > 1 && parent != null) { check(parent, snapshotId, parentAccess); } if (access != null) { check(last, snapshotId, access); } if (subAccess != null) { checkSubAccess(last, snapshotId, subAccess, ignoreEmptyDir); } if (doCheckOwner) { checkOwner(last, snapshotId); } } /** Guarded by {@link FSNamesystem#readLock()} */ private void checkOwner(INode inode, int snapshotId ) throws AccessControlException { if (inode != null && user.equals(inode.getUserName(snapshotId))) { return; } throw new AccessControlException( "Permission denied. user=" + user + " is not the owner of inode=" + inode); } /** Guarded by {@link FSNamesystem#readLock()} */ private void checkTraverse(INodesInPath iip, int snapshotId) throws AccessControlException { List<INode> inodes = iip.getReadOnlyINodes(); for (int i = 0; i < inodes.size() - 1; i++) { INode inode = inodes.get(i); if (inode == null) { break; } check(inode, snapshotId, FsAction.EXECUTE); } } /** Guarded by {@link FSNamesystem#readLock()} */ private void checkSubAccess(INode inode, int snapshotId, FsAction access, boolean ignoreEmptyDir) throws AccessControlException { if (inode == null || !inode.isDirectory()) { return; } Stack<INodeDirectory> directories = new Stack<INodeDirectory>(); for(directories.push(inode.asDirectory()); !directories.isEmpty(); ) { INodeDirectory d = directories.pop(); ReadOnlyList<INode> cList = d.getChildrenList(snapshotId); if (!(cList.isEmpty() && ignoreEmptyDir)) { check(d, snapshotId, access); } for(INode child : cList) { if (child.isDirectory()) { directories.push(child.asDirectory()); } } } } /** Guarded by {@link FSNamesystem#readLock()} */ private void check(INode inode, int snapshotId, FsAction access) throws AccessControlException { if (inode == null) { return; } FsPermission mode = inode.getFsPermission(snapshotId); AclFeature aclFeature = inode.getAclFeature(snapshotId); if (aclFeature != null) { // It's possible that the inode has a default ACL but no access ACL. int firstEntry = aclFeature.getEntryAt(0); if (AclEntryStatusFormat.getScope(firstEntry) == AclEntryScope.ACCESS) { checkAccessAcl(inode, snapshotId, access, mode, aclFeature); return; } } checkFsPermission(inode, snapshotId, access, mode); } private void checkFsPermission(INode inode, int snapshotId, FsAction access, FsPermission mode) throws AccessControlException { if (user.equals(inode.getUserName(snapshotId))) { //user class if (mode.getUserAction().implies(access)) { return; } } else if (groups.contains(inode.getGroupName(snapshotId))) { //group class if (mode.getGroupAction().implies(access)) { return; } } else { //other class if (mode.getOtherAction().implies(access)) { return; } } throw new AccessControlException( toAccessControlString(inode, snapshotId, access, mode)); } /** * Checks requested access against an Access Control List. This method relies * on finding the ACL data in the relevant portions of {@link FsPermission} and * {@link AclFeature} as implemented in the logic of {@link AclStorage}. This * method also relies on receiving the ACL entries in sorted order. This is * assumed to be true, because the ACL modification methods in * {@link AclTransformation} sort the resulting entries. * * More specifically, this method depends on these invariants in an ACL: * - The list must be sorted. * - Each entry in the list must be unique by scope + type + name. * - There is exactly one each of the unnamed user/group/other entries. * - The mask entry must not have a name. * - The other entry must not have a name. * - Default entries may be present, but they are ignored during enforcement. * * @param inode INode accessed inode * @param snapshotId int snapshot ID * @param access FsAction requested permission * @param mode FsPermission mode from inode * @param aclFeature AclFeature of inode * @throws AccessControlException if the ACL denies permission */ private void checkAccessAcl(INode inode, int snapshotId, FsAction access, FsPermission mode, AclFeature aclFeature) throws AccessControlException { boolean foundMatch = false; // Use owner entry from permission bits if user is owner. if (user.equals(inode.getUserName(snapshotId))) { if (mode.getUserAction().implies(access)) { return; } foundMatch = true; } // Check named user and group entries if user was not denied by owner entry. if (!foundMatch) { for (int pos = 0, entry; pos < aclFeature.getEntriesSize(); pos++) { entry = aclFeature.getEntryAt(pos); if (AclEntryStatusFormat.getScope(entry) == AclEntryScope.DEFAULT) { break; } AclEntryType type = AclEntryStatusFormat.getType(entry); String name = AclEntryStatusFormat.getName(entry); if (type == AclEntryType.USER) { // Use named user entry with mask from permission bits applied if user // matches name. if (user.equals(name)) { FsAction masked = AclEntryStatusFormat.getPermission(entry).and( mode.getGroupAction()); if (masked.implies(access)) { return; } foundMatch = true; break; } } else if (type == AclEntryType.GROUP) { // Use group entry (unnamed or named) with mask from permission bits // applied if user is a member and entry grants access. If user is a // member of multiple groups that have entries that grant access, then // it doesn't matter which is chosen, so exit early after first match. String group = name == null ? inode.getGroupName(snapshotId) : name; if (groups.contains(group)) { FsAction masked = AclEntryStatusFormat.getPermission(entry).and( mode.getGroupAction()); if (masked.implies(access)) { return; } foundMatch = true; } } } } // Use other entry if user was not denied by an earlier match. if (!foundMatch && mode.getOtherAction().implies(access)) { return; } throw new AccessControlException( toAccessControlString(inode, snapshotId, access, mode, true)); } /** Guarded by {@link FSNamesystem#readLock()} */ private void checkStickyBit(INode parent, INode inode, int snapshotId ) throws AccessControlException { if(!parent.getFsPermission(snapshotId).getStickyBit()) { return; } // If this user is the directory owner, return if(parent.getUserName(snapshotId).equals(user)) { return; } // if this user is the file owner, return if(inode.getUserName(snapshotId).equals(user)) { return; } throw new AccessControlException("Permission denied by sticky bit setting:" + " user=" + user + ", inode=" + inode); } /** * Whether a cache pool can be accessed by the current context * * @param pool CachePool being accessed * @param access type of action being performed on the cache pool * @throws AccessControlException if pool cannot be accessed */ public void checkPermission(CachePool pool, FsAction access) throws AccessControlException { FsPermission mode = pool.getMode(); if (isSuperUser()) { return; } if (user.equals(pool.getOwnerName()) && mode.getUserAction().implies(access)) { return; } if (groups.contains(pool.getGroupName()) && mode.getGroupAction().implies(access)) { return; } if (mode.getOtherAction().implies(access)) { return; } throw new AccessControlException("Permission denied while accessing pool " + pool.getPoolName() + ": user " + user + " does not have " + access.toString() + " permissions."); } }
package com.orhanobut.hawk; import android.app.Activity; import android.content.Context; import junit.framework.TestCase; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.Robolectric; import org.robolectric.RobolectricGradleTestRunner; import org.robolectric.annotation.Config; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import rx.Observable; import rx.Observer; import rx.Subscriber; import rx.android.schedulers.AndroidSchedulers; import rx.functions.Func1; import rx.schedulers.Schedulers; import static org.assertj.core.api.Assertions.assertThat; /** * @author Orhan Obut */ @RunWith(RobolectricGradleTestRunner.class) @Config(constants = BuildConfig.class, sdk = 21) public class HawkTest extends TestCase { private static final String KEY = "TAG"; protected final Context context; public HawkTest() { context = Robolectric.buildActivity(Activity.class).create().get(); } @Before public void setUp() throws Exception { init(); } public void init() { Hawk.init(context).build(); } @After public void tearDown() throws Exception { super.tearDown(); Hawk.clear(); } @Test public void initWithInvalidValues() { try { Hawk.init(null); fail(); } catch (Exception e) { assertThat(e).hasMessage("Context should not be null"); } } @Test public void testSingleItem() { Hawk.put("boolean", true); assertThat(Hawk.get("boolean")).isEqualTo(true); Hawk.put("string", "string"); assertThat(Hawk.get("string")).isEqualTo("string"); Hawk.put("float", 1.5f); assertThat(Hawk.get("float")).isEqualTo(1.5f); Hawk.put("integer", 10); assertThat(Hawk.get("integer")).isEqualTo(10); Hawk.put("char", 'A'); assertThat(Hawk.get("char")).isEqualTo('A'); Hawk.put("object", new FooBar()); FooBar fooBar = Hawk.get("object"); assertThat(fooBar).isNotNull(); assertThat(fooBar.name).isEqualTo("hawk"); assertTrue(Hawk.put("innerClass", new FooBar.InnerFoo())); FooBar.InnerFoo innerFoo = Hawk.get("innerClass"); assertThat(innerFoo).isNotNull(); assertThat(innerFoo.name).isEqualTo("hawk"); } @Test public void testSingleItemDefault() { boolean result = Hawk.get("tag", true); assertThat(result).isEqualTo(true); } @Test public void testList() { List<String> list = new ArrayList<>(); list.add("foo"); list.add("bar"); Hawk.put("tag", list); List<String> list1 = Hawk.get("tag"); assertThat(list1).isNotNull(); assertThat(list1.get(0)).isEqualTo("foo"); assertThat(list1.get(1)).isEqualTo("bar"); } @Test public void testEmptyList() { List<FooBar> list = new ArrayList<>(); Hawk.put("tag", list); List<FooBar> list1 = Hawk.get("tag"); assertThat(list1).isNotNull(); } @Test public void testMap() { Map<String, String> map = new HashMap<>(); map.put("key", "value"); Hawk.put("map", map); Map<String, String> map1 = Hawk.get("map"); assertThat(map).isNotNull(); assertThat(map1.get("key")).isEqualTo("value"); } @Test public void testEmptyMap() { Map<String, FooBar> map = new HashMap<>(); Hawk.put("tag", map); Map<String, FooBar> map1 = Hawk.get("tag"); assertThat(map1).isNotNull(); } @Test public void testSet() { Set<String> set = new HashSet<>(); set.add("foo"); Hawk.put("set", set); Set<String> set1 = Hawk.get("set"); assertThat(set1).isNotNull(); assertThat(set1.contains("foo")).isTrue(); } @Test public void testEmptySet() { Set<FooBar> set = new HashSet<>(); Hawk.put("tag", set); Set<FooBar> set1 = Hawk.get("tag"); assertThat(set1).isNotNull(); } @Test public void testNullKeyPut() { try { Hawk.put(null, "test"); fail(); } catch (Exception e) { assertThat(e).hasMessage("Key cannot be null"); } } @Test public void testNullKeyGet() { try { Hawk.get(null); fail(); } catch (Exception e) { assertThat(e).hasMessage("Key cannot be null"); } } @Test public void testNullValuePut() { try { Hawk.put("tag", "something"); assertThat(Hawk.get("tag")).isNotNull(); assertThat(Hawk.put("tag", null)).isTrue(); assertThat(Hawk.get("tag")).isNull(); } catch (Exception e) { fail(); } } @Test public void testCount() { Hawk.clear(); String value = "test"; Hawk.put("tag", value); Hawk.put("tag1", value); Hawk.put("tag2", value); Hawk.put("tag3", value); Hawk.put("tag4", value); assertThat(Hawk.count()).isEqualTo(5); } @Test public void testClear() { String value = "test"; Hawk.put("tag", value); Hawk.put("tag1", value); Hawk.put("tag2", value); Hawk.clear(); assertThat(Hawk.count()).isEqualTo(0); } @Test public void testRemove() { Hawk.clear(); String value = "test"; Hawk.put("tag", value); Hawk.put("tag1", value); Hawk.put("tag2", value); Hawk.remove("tag"); String result = Hawk.get("tag"); assertThat(result).isNull(); assertThat(Hawk.count()).isEqualTo(2); } @Test public void testBulkRemoval() { Hawk.clear(); Hawk.put("tag", "test"); Hawk.put("tag1", 1); Hawk.put("tag2", Boolean.FALSE); Hawk.remove("tag", "tag1"); String result = Hawk.get("tag"); assertThat(result).isNull(); assertThat(Hawk.count()).isEqualTo(1); } @Test public void testContains() { String value = "test"; String key = "tag"; Hawk.put(key, value); assertThat(Hawk.contains(key)).isTrue(); Hawk.remove(key); assertThat(Hawk.contains(key)).isFalse(); } @Test public void testChain() { Hawk.chain() .put("tag", 1) .put("tag1", "yes") .put("tag2", Boolean.FALSE) .commit(); assertThat(Hawk.get("tag")).isEqualTo(1); assertThat(Hawk.get("tag1")).isEqualTo("yes"); assertThat(Hawk.get("tag2")).isEqualTo(false); } @Test public void testChainWithCapacity() { Hawk.chain(10) .put("tag", 1) .put("tag1", "yes") .put("tag2", Boolean.FALSE) .commit(); assertThat(Hawk.get("tag")).isEqualTo(1); assertThat(Hawk.get("tag1")).isEqualTo("yes"); assertThat(Hawk.get("tag2")).isEqualTo(false); } @Test public void testChainWithLists() { List<String> items = new ArrayList<>(); items.add("fst"); items.add("snd"); items.add("trd"); Hawk.chain() .put("tag", 1) .put("tag1", "yes") .put("tag2", Boolean.FALSE) .put("lst", items) .commit(); assertThat(Hawk.get("tag")).isEqualTo(1); assertThat(Hawk.get("tag1")).isEqualTo("yes"); assertThat(Hawk.get("tag2")).isEqualTo(false); List<String> stored = Hawk.get("lst"); assertThat(stored).isNotNull(); assertThat(stored.isEmpty()).isFalse(); for (int i = 0, s = stored.size(); i < s; i++) { assertThat(stored.get(i)).isEqualTo(items.get(i)); } } @Test public void testHugeData() { for (int i = 0; i < 100; i++) { Hawk.put("" + i, "" + i); } assertThat(true).isTrue(); } @Test public void testHugeDataWithBulk() { Hawk.Chain chain = Hawk.chain(); for (int i = 0; i < 10000; i++) { chain.put("" + i, "" + i); } chain.commit(); assertThat(true).isTrue(); } @Test public void testLogLevel() { Hawk.init(context) .setLogLevel(LogLevel.NONE) .build(); assertThat(Hawk.getLogLevel()).isEqualTo(LogLevel.NONE); Hawk.init(context) .setLogLevel(LogLevel.FULL) .build(); assertThat(Hawk.getLogLevel()).isEqualTo(LogLevel.FULL); } @Test public void resetCrypto() { assertThat(Hawk.resetCrypto()).isTrue(); } @Test public void getRxString() throws Exception { Hawk.put(KEY, "hawk"); Hawk.<String>getObservable(KEY) .observeOn(Schedulers.io()) .subscribeOn(AndroidSchedulers.mainThread()) .subscribe(new Subscriber<String>() { @Override public void onCompleted() { assertTrue(true); } @Override public void onError(Throwable e) { assertTrue(false); } @Override public void onNext(String s) { assertThat(s).isEqualTo("hawk"); } }); } @Test public void getRxStringDefaultValue() throws Exception { Hawk.<String>getObservable(KEY, "test") .observeOn(Schedulers.io()) .subscribeOn(AndroidSchedulers.mainThread()) .subscribe(new Subscriber<String>() { @Override public void onCompleted() { assertTrue(true); } @Override public void onError(Throwable e) { fail(); } @Override public void onNext(String s) { assertThat(s).isEqualTo("test"); } }); } @Test public void testBuildRx() { Hawk.init(context) .buildRx() .concatMap(new Func1<Boolean, Observable<Boolean>>() { @Override public Observable<Boolean> call(Boolean aBoolean) { return Hawk.putObservable(KEY, "hawk"); } }) .concatMap(new Func1<Boolean, Observable<String>>() { @Override public Observable<String> call(Boolean aBoolean) { return Hawk.getObservable(KEY); } }) .subscribe(new Observer<String>() { @Override public void onCompleted() { assertTrue(true); } @Override public void onError(Throwable throwable) { assertTrue(false); } @Override public void onNext(String storedValue) { assertEquals(storedValue, "hawk"); } }); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.client; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import java.util.concurrent.TimeUnit; /** Client configuration properties */ @InterfaceAudience.Private public interface HdfsClientConfigKeys { long SECOND = 1000L; long MINUTE = 60 * SECOND; String DFS_BLOCK_SIZE_KEY = "dfs.blocksize"; long DFS_BLOCK_SIZE_DEFAULT = 128*1024*1024; String DFS_REPLICATION_KEY = "dfs.replication"; short DFS_REPLICATION_DEFAULT = 3; String DFS_WEBHDFS_USER_PATTERN_KEY = "dfs.webhdfs.user.provider.user.pattern"; String DFS_WEBHDFS_USER_PATTERN_DEFAULT = "^[A-Za-z_][A-Za-z0-9._-]*[$]?$"; String DFS_WEBHDFS_ACL_PERMISSION_PATTERN_KEY = "dfs.webhdfs.acl.provider.permission.pattern"; String DFS_WEBHDFS_ACL_PERMISSION_PATTERN_DEFAULT = "^(default:)?(user|group|mask|other):[[A-Za-z_][A-Za-z0-9._-]]*:([rwx-]{3})?(,(default:)?(user|group|mask|other):[[A-Za-z_][A-Za-z0-9._-]]*:([rwx-]{3})?)*$"; String DFS_WEBHDFS_SOCKET_CONNECT_TIMEOUT_KEY = "dfs.webhdfs.socket.connect-timeout"; String DFS_WEBHDFS_SOCKET_READ_TIMEOUT_KEY = "dfs.webhdfs.socket.read-timeout"; String DFS_WEBHDFS_OAUTH_ENABLED_KEY = "dfs.webhdfs.oauth2.enabled"; boolean DFS_WEBHDFS_OAUTH_ENABLED_DEFAULT = false; String DFS_WEBHDFS_REST_CSRF_ENABLED_KEY = "dfs.webhdfs.rest-csrf.enabled"; boolean DFS_WEBHDFS_REST_CSRF_ENABLED_DEFAULT = false; String DFS_WEBHDFS_REST_CSRF_CUSTOM_HEADER_KEY = "dfs.webhdfs.rest-csrf.custom-header"; String DFS_WEBHDFS_REST_CSRF_CUSTOM_HEADER_DEFAULT = "X-XSRF-HEADER"; String DFS_WEBHDFS_REST_CSRF_METHODS_TO_IGNORE_KEY = "dfs.webhdfs.rest-csrf.methods-to-ignore"; String DFS_WEBHDFS_REST_CSRF_METHODS_TO_IGNORE_DEFAULT = "GET,OPTIONS,HEAD,TRACE"; String DFS_WEBHDFS_REST_CSRF_BROWSER_USERAGENTS_REGEX_KEY = "dfs.webhdfs.rest-csrf.browser-useragents-regex"; String OAUTH_CLIENT_ID_KEY = "dfs.webhdfs.oauth2.client.id"; String OAUTH_REFRESH_URL_KEY = "dfs.webhdfs.oauth2.refresh.url"; String ACCESS_TOKEN_PROVIDER_KEY = "dfs.webhdfs.oauth2.access.token.provider"; String PREFIX = "dfs.client."; String DFS_NAMESERVICES = "dfs.nameservices"; String DFS_NAMENODE_RPC_ADDRESS_KEY = "dfs.namenode.rpc-address"; String DFS_NAMENODE_RPC_ADDRESS_AUXILIARY_SUFFIX = "auxiliary-ports"; String DFS_NAMENODE_RPC_ADDRESS_AUXILIARY_KEY = DFS_NAMENODE_RPC_ADDRESS_KEY + "." + DFS_NAMENODE_RPC_ADDRESS_AUXILIARY_SUFFIX; int DFS_NAMENODE_HTTP_PORT_DEFAULT = 9870; String DFS_NAMENODE_HTTP_ADDRESS_KEY = "dfs.namenode.http-address"; int DFS_NAMENODE_HTTPS_PORT_DEFAULT = 9871; String DFS_NAMENODE_HTTPS_ADDRESS_KEY = "dfs.namenode.https-address"; String DFS_HA_NAMENODES_KEY_PREFIX = "dfs.ha.namenodes"; int DFS_NAMENODE_RPC_PORT_DEFAULT = 8020; String DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY = "dfs.namenode.kerberos.principal"; String DFS_CLIENT_WRITE_PACKET_SIZE_KEY = "dfs.client-write-packet-size"; int DFS_CLIENT_WRITE_PACKET_SIZE_DEFAULT = 64*1024; String DFS_CLIENT_SOCKET_TIMEOUT_KEY = "dfs.client.socket-timeout"; String DFS_CLIENT_SOCKET_SEND_BUFFER_SIZE_KEY = "dfs.client.socket.send.buffer.size"; int DFS_CLIENT_SOCKET_SEND_BUFFER_SIZE_DEFAULT = HdfsConstants.DEFAULT_DATA_SOCKET_SIZE; String DFS_CLIENT_SOCKET_CACHE_CAPACITY_KEY = "dfs.client.socketcache.capacity"; int DFS_CLIENT_SOCKET_CACHE_CAPACITY_DEFAULT = 16; String DFS_CLIENT_SOCKET_CACHE_EXPIRY_MSEC_KEY = "dfs.client.socketcache.expiryMsec"; long DFS_CLIENT_SOCKET_CACHE_EXPIRY_MSEC_DEFAULT = 3000; String DFS_CLIENT_USE_DN_HOSTNAME = "dfs.client.use.datanode.hostname"; boolean DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT = false; String DFS_CLIENT_CACHE_DROP_BEHIND_WRITES = "dfs.client.cache.drop.behind.writes"; String DFS_CLIENT_CACHE_DROP_BEHIND_READS = "dfs.client.cache.drop.behind.reads"; String DFS_CLIENT_CACHE_READAHEAD = "dfs.client.cache.readahead"; String DFS_CLIENT_CACHED_CONN_RETRY_KEY = "dfs.client.cached.conn.retry"; int DFS_CLIENT_CACHED_CONN_RETRY_DEFAULT = 3; String DFS_CLIENT_CONTEXT = "dfs.client.context"; String DFS_CLIENT_CONTEXT_DEFAULT = "default"; String DFS_CLIENT_USE_LEGACY_BLOCKREADERLOCAL = "dfs.client.use.legacy.blockreader.local"; boolean DFS_CLIENT_USE_LEGACY_BLOCKREADERLOCAL_DEFAULT = false; String DFS_CLIENT_DATANODE_RESTART_TIMEOUT_KEY = "dfs.client.datanode-restart.timeout"; long DFS_CLIENT_DATANODE_RESTART_TIMEOUT_DEFAULT = 30; // Much code in hdfs is not yet updated to use these keys. // the initial delay (unit is ms) for locateFollowingBlock, the delay time // will increase exponentially(double) for each retry. String DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_KEY = "dfs.client.max.block.acquire.failures"; int DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_DEFAULT = 3; String DFS_CLIENT_SERVER_DEFAULTS_VALIDITY_PERIOD_MS_KEY = "dfs.client.server-defaults.validity.period.ms"; long DFS_CLIENT_SERVER_DEFAULTS_VALIDITY_PERIOD_MS_DEFAULT = TimeUnit.HOURS.toMillis(1); String DFS_CHECKSUM_TYPE_KEY = "dfs.checksum.type"; String DFS_CHECKSUM_TYPE_DEFAULT = "CRC32C"; String DFS_BYTES_PER_CHECKSUM_KEY = "dfs.bytes-per-checksum"; int DFS_BYTES_PER_CHECKSUM_DEFAULT = 512; String DFS_CHECKSUM_COMBINE_MODE_KEY = "dfs.checksum.combine.mode"; String DFS_CHECKSUM_COMBINE_MODE_DEFAULT = "MD5MD5CRC"; String DFS_DATANODE_SOCKET_WRITE_TIMEOUT_KEY = "dfs.datanode.socket.write.timeout"; String DFS_CLIENT_DOMAIN_SOCKET_DATA_TRAFFIC = "dfs.client.domain.socket.data.traffic"; boolean DFS_CLIENT_DOMAIN_SOCKET_DATA_TRAFFIC_DEFAULT = false; String DFS_DOMAIN_SOCKET_PATH_KEY = "dfs.domain.socket.path"; String DFS_DOMAIN_SOCKET_PATH_DEFAULT = ""; String DFS_DOMAIN_SOCKET_DISABLE_INTERVAL_SECOND_KEY = "dfs.domain.socket.disable.interval.seconds"; long DFS_DOMAIN_SOCKET_DISABLE_INTERVAL_SECOND_DEFAULT = 600; String DFS_SHORT_CIRCUIT_SHARED_MEMORY_WATCHER_INTERRUPT_CHECK_MS = "dfs.short.circuit.shared.memory.watcher.interrupt.check.ms"; int DFS_SHORT_CIRCUIT_SHARED_MEMORY_WATCHER_INTERRUPT_CHECK_MS_DEFAULT = 60000; String DFS_CLIENT_SLOW_IO_WARNING_THRESHOLD_KEY = "dfs.client.slow.io.warning.threshold.ms"; long DFS_CLIENT_SLOW_IO_WARNING_THRESHOLD_DEFAULT = 30000; String DFS_CLIENT_KEY_PROVIDER_CACHE_EXPIRY_MS = "dfs.client.key.provider.cache.expiry"; long DFS_CLIENT_KEY_PROVIDER_CACHE_EXPIRY_DEFAULT = TimeUnit.DAYS.toMillis(10); // 10 days String DFS_CLIENT_BLOCK_READER_REMOTE_BUFFER_SIZE_KEY = "dfs.client.block.reader.remote.buffer.size"; int DFS_CLIENT_BLOCK_READER_REMOTE_BUFFER_SIZE_DEFAULT = 8192; String DFS_CLIENT_DEAD_NODE_DETECTION_ENABLED_KEY = "dfs.client.deadnode.detection.enabled"; boolean DFS_CLIENT_DEAD_NODE_DETECTION_ENABLED_DEFAULT = false; String DFS_CLIENT_DEAD_NODE_DETECTION_DEAD_NODE_QUEUE_MAX_KEY = "dfs.client.deadnode.detection.deadnode.queue.max"; int DFS_CLIENT_DEAD_NODE_DETECTION_DEAD_NODE_QUEUE_MAX_DEFAULT = 100; String DFS_CLIENT_DEAD_NODE_DETECTION_SUSPECT_NODE_QUEUE_MAX_KEY = "dfs.client.deadnode.detection.suspectnode.queue.max"; int DFS_CLIENT_DEAD_NODE_DETECTION_SUSPECT_NODE_QUEUE_MAX_DEFAULT = 1000; String DFS_CLIENT_DEAD_NODE_DETECTION_PROBE_CONNECTION_TIMEOUT_MS_KEY = "dfs.client.deadnode.detection.probe.connection.timeout.ms"; long DFS_CLIENT_DEAD_NODE_DETECTION_PROBE_CONNECTION_TIMEOUT_MS_DEFAULT = 20000; String DFS_CLIENT_DEAD_NODE_DETECTION_PROBE_DEAD_NODE_THREADS_KEY = "dfs.client.deadnode.detection.probe.deadnode.threads"; int DFS_CLIENT_DEAD_NODE_DETECTION_PROBE_DEAD_NODE_THREADS_DEFAULT = 10; String DFS_CLIENT_DEAD_NODE_DETECTION_PROBE_SUSPECT_NODE_THREADS_KEY = "dfs.client.deadnode.detection.probe.suspectnode.threads"; int DFS_CLIENT_DEAD_NODE_DETECTION_PROBE_SUSPECT_NODE_THREADS_DEFAULT = 10; String DFS_CLIENT_DEAD_NODE_DETECTION_RPC_THREADS_KEY = "dfs.client.deadnode.detection.rpc.threads"; int DFS_CLIENT_DEAD_NODE_DETECTION_RPC_THREADS_DEFAULT = 20; String DFS_CLIENT_DEAD_NODE_DETECTION_PROBE_DEAD_NODE_INTERVAL_MS_KEY = "dfs.client.deadnode.detection.probe.deadnode.interval.ms"; long DFS_CLIENT_DEAD_NODE_DETECTION_PROBE_DEAD_NODE_INTERVAL_MS_DEFAULT = 60 * 1000; // 60s String DFS_CLIENT_DEAD_NODE_DETECTION_PROBE_SUSPECT_NODE_INTERVAL_MS_KEY = "dfs.client.deadnode.detection.probe.suspectnode.interval.ms"; long DFS_CLIENT_DEAD_NODE_DETECTION_PROBE_SUSPECT_NODE_INTERVAL_MS_DEFAULT = 300; // 300ms // refreshing LocatedBlocks period. A value of 0 disables the feature. String DFS_CLIENT_REFRESH_READ_BLOCK_LOCATIONS_MS_KEY = "dfs.client.refresh.read-block-locations.ms"; long DFS_CLIENT_REFRESH_READ_BLOCK_LOCATIONS_MS_DEFAULT = 0L; String DFS_DATANODE_KERBEROS_PRINCIPAL_KEY = "dfs.datanode.kerberos.principal"; String DFS_DATANODE_READAHEAD_BYTES_KEY = "dfs.datanode.readahead.bytes"; long DFS_DATANODE_READAHEAD_BYTES_DEFAULT = 4 * 1024 * 1024; // 4MB String DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY = "dfs.encrypt.data.transfer.cipher.suites"; String DFS_ENCRYPT_DATA_OVERWRITE_DOWNSTREAM_NEW_QOP_KEY = "dfs.encrypt.data.overwrite.downstream.new.qop"; String DFS_DATA_TRANSFER_PROTECTION_KEY = "dfs.data.transfer.protection"; String DFS_DATA_TRANSFER_PROTECTION_DEFAULT = ""; String DFS_DATA_TRANSFER_SASL_PROPS_RESOLVER_CLASS_KEY = "dfs.data.transfer.saslproperties.resolver.class"; String DFS_ENCRYPT_DATA_TRANSFER_CIPHER_KEY_BITLENGTH_KEY = "dfs.encrypt.data.transfer.cipher.key.bitlength"; int DFS_ENCRYPT_DATA_TRANSFER_CIPHER_KEY_BITLENGTH_DEFAULT = 128; String DFS_TRUSTEDCHANNEL_RESOLVER_CLASS = "dfs.trustedchannel.resolver.class"; String REPLICA_ACCESSOR_BUILDER_CLASSES_KEY = PREFIX + "replica.accessor.builder.classes"; // The number of NN response dropped by client proactively in each RPC call. // For testing NN retry cache, we can set this property with positive value. String DFS_CLIENT_TEST_DROP_NAMENODE_RESPONSE_NUM_KEY = "dfs.client.test.drop.namenode.response.number"; int DFS_CLIENT_TEST_DROP_NAMENODE_RESPONSE_NUM_DEFAULT = 0; String DFS_CLIENT_LOCAL_INTERFACES = "dfs.client.local.interfaces"; String DFS_USER_HOME_DIR_PREFIX_KEY = "dfs.user.home.dir.prefix"; String DFS_USER_HOME_DIR_PREFIX_DEFAULT = "/user"; String DFS_DATA_TRANSFER_CLIENT_TCPNODELAY_KEY = "dfs.data.transfer.client.tcpnodelay"; boolean DFS_DATA_TRANSFER_CLIENT_TCPNODELAY_DEFAULT = true; String DFS_NAMENODE_SNAPSHOT_CAPTURE_OPENFILES = "dfs.namenode.snapshot.capture.openfiles"; boolean DFS_NAMENODE_SNAPSHOT_CAPTURE_OPENFILES_DEFAULT = false; String DFS_PROVIDED_ALIASMAP_INMEMORY_RPC_ADDRESS = "dfs.provided.aliasmap.inmemory.dnrpc-address"; /** * These are deprecated config keys to client code. */ interface DeprecatedKeys { String DFS_NAMENODE_BACKUP_ADDRESS_KEY = "dfs.namenode.backup.address"; String DFS_NAMENODE_BACKUP_HTTP_ADDRESS_KEY = "dfs.namenode.backup.http-address"; String DFS_DATANODE_BALANCE_BANDWIDTHPERSEC_KEY = "dfs.datanode.balance.bandwidthPerSec"; //Following keys have no defaults String DFS_DATANODE_DATA_DIR_KEY = "dfs.datanode.data.dir"; String DFS_NAMENODE_MAX_OBJECTS_KEY = "dfs.namenode.max.objects"; String DFS_NAMENODE_NAME_DIR_KEY = "dfs.namenode.name.dir"; String DFS_NAMENODE_NAME_DIR_RESTORE_KEY = "dfs.namenode.name.dir.restore"; String DFS_NAMENODE_EDITS_DIR_KEY = "dfs.namenode.edits.dir"; String DFS_NAMENODE_SAFEMODE_EXTENSION_KEY = "dfs.namenode.safemode.extension"; String DFS_NAMENODE_SAFEMODE_THRESHOLD_PCT_KEY = "dfs.namenode.safemode.threshold-pct"; String DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY = "dfs.namenode.secondary.http-address"; String DFS_NAMENODE_CHECKPOINT_DIR_KEY = "dfs.namenode.checkpoint.dir"; String DFS_NAMENODE_CHECKPOINT_EDITS_DIR_KEY = "dfs.namenode.checkpoint.edits.dir"; String DFS_NAMENODE_CHECKPOINT_PERIOD_KEY = "dfs.namenode.checkpoint.period"; String DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY = "dfs.namenode.heartbeat.recheck-interval"; String DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY = "dfs.client.https.keystore.resource"; String DFS_CLIENT_HTTPS_NEED_AUTH_KEY = "dfs.client.https.need-auth"; String DFS_DATANODE_HOST_NAME_KEY = "dfs.datanode.hostname"; String DFS_METRICS_SESSION_ID_KEY = "dfs.metrics.session-id"; String DFS_NAMENODE_ACCESSTIME_PRECISION_KEY = "dfs.namenode.accesstime.precision"; String DFS_NAMENODE_REDUNDANCY_CONSIDERLOAD_KEY = "dfs.namenode.redundancy.considerLoad"; String DFS_NAMENODE_REDUNDANCY_CONSIDERLOAD_FACTOR = "dfs.namenode.redundancy.considerLoad.factor"; String DFS_NAMENODE_REDUNDANCY_INTERVAL_SECONDS_KEY = "dfs.namenode.redundancy.interval.seconds"; String DFS_NAMENODE_REPLICATION_MIN_KEY = "dfs.namenode.replication.min"; String DFS_NAMENODE_RECONSTRUCTION_PENDING_TIMEOUT_SEC_KEY = "dfs.namenode.reconstruction.pending.timeout-sec"; String DFS_NAMENODE_REPLICATION_MAX_STREAMS_KEY = "dfs.namenode.replication.max-streams"; String DFS_PERMISSIONS_ENABLED_KEY = "dfs.permissions.enabled"; String DFS_PERMISSIONS_SUPERUSERGROUP_KEY = "dfs.permissions.superusergroup"; String DFS_DATANODE_MAX_RECEIVER_THREADS_KEY = "dfs.datanode.max.transfer.threads"; String DFS_NAMESERVICE_ID = "dfs.nameservice.id"; } /** dfs.client.retry configuration properties */ interface Retry { String PREFIX = HdfsClientConfigKeys.PREFIX + "retry."; String POLICY_ENABLED_KEY = PREFIX + "policy.enabled"; boolean POLICY_ENABLED_DEFAULT = false; String POLICY_SPEC_KEY = PREFIX + "policy.spec"; String POLICY_SPEC_DEFAULT = "10000,6,60000,10"; //t1,n1,t2,n2,... String TIMES_GET_LAST_BLOCK_LENGTH_KEY = PREFIX + "times.get-last-block-length"; int TIMES_GET_LAST_BLOCK_LENGTH_DEFAULT = 3; String INTERVAL_GET_LAST_BLOCK_LENGTH_KEY = PREFIX + "interval-ms.get-last-block-length"; int INTERVAL_GET_LAST_BLOCK_LENGTH_DEFAULT = 4000; String MAX_ATTEMPTS_KEY = PREFIX + "max.attempts"; int MAX_ATTEMPTS_DEFAULT = 10; String WINDOW_BASE_KEY = PREFIX + "window.base"; int WINDOW_BASE_DEFAULT = 3000; } /** dfs.client.failover configuration properties */ interface Failover { String PREFIX = HdfsClientConfigKeys.PREFIX + "failover."; String PROXY_PROVIDER_KEY_PREFIX = PREFIX + "proxy.provider"; String MAX_ATTEMPTS_KEY = PREFIX + "max.attempts"; int MAX_ATTEMPTS_DEFAULT = 15; String SLEEPTIME_BASE_KEY = PREFIX + "sleep.base.millis"; int SLEEPTIME_BASE_DEFAULT = 500; String SLEEPTIME_MAX_KEY = PREFIX + "sleep.max.millis"; int SLEEPTIME_MAX_DEFAULT = 15000; String CONNECTION_RETRIES_KEY = PREFIX + "connection.retries"; int CONNECTION_RETRIES_DEFAULT = 0; String CONNECTION_RETRIES_ON_SOCKET_TIMEOUTS_KEY = PREFIX + "connection.retries.on.timeouts"; int CONNECTION_RETRIES_ON_SOCKET_TIMEOUTS_DEFAULT = 0; String RANDOM_ORDER = PREFIX + "random.order"; boolean RANDOM_ORDER_DEFAULT = false; String RESOLVE_ADDRESS_NEEDED_KEY = PREFIX + "resolve-needed"; boolean RESOLVE_ADDRESS_NEEDED_DEFAULT = false; String RESOLVE_SERVICE_KEY = PREFIX + "resolver.impl"; String RESOLVE_ADDRESS_TO_FQDN = PREFIX + "resolver.useFQDN"; boolean RESOLVE_ADDRESS_TO_FQDN_DEFAULT = true; } /** dfs.client.write configuration properties */ interface Write { String PREFIX = HdfsClientConfigKeys.PREFIX + "write."; String MAX_PACKETS_IN_FLIGHT_KEY = PREFIX + "max-packets-in-flight"; int MAX_PACKETS_IN_FLIGHT_DEFAULT = 80; String EXCLUDE_NODES_CACHE_EXPIRY_INTERVAL_KEY = PREFIX + "exclude.nodes.cache.expiry.interval.millis"; long EXCLUDE_NODES_CACHE_EXPIRY_INTERVAL_DEFAULT = 10*MINUTE; interface ByteArrayManager { String PREFIX = Write.PREFIX + "byte-array-manager."; String ENABLED_KEY = PREFIX + "enabled"; boolean ENABLED_DEFAULT = false; String COUNT_THRESHOLD_KEY = PREFIX + "count-threshold"; int COUNT_THRESHOLD_DEFAULT = 128; String COUNT_LIMIT_KEY = PREFIX + "count-limit"; int COUNT_LIMIT_DEFAULT = 2048; String COUNT_RESET_TIME_PERIOD_MS_KEY = PREFIX + "count-reset-time-period-ms"; long COUNT_RESET_TIME_PERIOD_MS_DEFAULT = 10*SECOND; } } /** dfs.client.block.write configuration properties */ interface BlockWrite { String PREFIX = HdfsClientConfigKeys.PREFIX + "block.write."; String RETRIES_KEY = PREFIX + "retries"; int RETRIES_DEFAULT = 3; String LOCATEFOLLOWINGBLOCK_RETRIES_KEY = PREFIX + "locateFollowingBlock.retries"; int LOCATEFOLLOWINGBLOCK_RETRIES_DEFAULT = 5; String LOCATEFOLLOWINGBLOCK_INITIAL_DELAY_MS_KEY = PREFIX + "locateFollowingBlock.initial.delay.ms"; int LOCATEFOLLOWINGBLOCK_INITIAL_DELAY_MS_DEFAULT = 400; String LOCATEFOLLOWINGBLOCK_MAX_DELAY_MS_KEY = PREFIX + "locateFollowingBlock.max.delay.ms"; int LOCATEFOLLOWINGBLOCK_MAX_DELAY_MS_DEFAULT = 60000; interface ReplaceDatanodeOnFailure { String PREFIX = BlockWrite.PREFIX + "replace-datanode-on-failure."; String ENABLE_KEY = PREFIX + "enable"; boolean ENABLE_DEFAULT = true; String POLICY_KEY = PREFIX + "policy"; String POLICY_DEFAULT = "DEFAULT"; String BEST_EFFORT_KEY = PREFIX + "best-effort"; boolean BEST_EFFORT_DEFAULT = false; String MIN_REPLICATION = PREFIX + "min-replication"; short MIN_REPLICATION_DEFAULT = 0; } } /** dfs.client.read configuration properties */ interface Read { String PREFIX = HdfsClientConfigKeys.PREFIX + "read."; String PREFETCH_SIZE_KEY = PREFIX + "prefetch.size"; interface ShortCircuit { String PREFIX = Read.PREFIX + "shortcircuit."; String KEY = PREFIX.substring(0, PREFIX.length()-1); boolean DEFAULT = false; String SKIP_CHECKSUM_KEY = PREFIX + "skip.checksum"; boolean SKIP_CHECKSUM_DEFAULT = false; String BUFFER_SIZE_KEY = PREFIX + "buffer.size"; int BUFFER_SIZE_DEFAULT = 1024 * 1024; String STREAMS_CACHE_SIZE_KEY = PREFIX + "streams.cache.size"; int STREAMS_CACHE_SIZE_DEFAULT = 256; String STREAMS_CACHE_EXPIRY_MS_KEY = PREFIX + "streams.cache.expiry.ms"; long STREAMS_CACHE_EXPIRY_MS_DEFAULT = 5*MINUTE; String METRICS_SAMPLING_PERCENTAGE_KEY = PREFIX + "metrics.sampling.percentage"; int METRICS_SAMPLING_PERCENTAGE_DEFAULT = 0; } } /** dfs.client.short.circuit configuration properties */ interface ShortCircuit { String PREFIX = Read.PREFIX + "short.circuit."; String REPLICA_STALE_THRESHOLD_MS_KEY = PREFIX + "replica.stale.threshold.ms"; long REPLICA_STALE_THRESHOLD_MS_DEFAULT = 30*MINUTE; } /** dfs.client.mmap configuration properties */ interface Mmap { String PREFIX = HdfsClientConfigKeys.PREFIX + "mmap."; String ENABLED_KEY = PREFIX + "enabled"; boolean ENABLED_DEFAULT = true; String CACHE_SIZE_KEY = PREFIX + "cache.size"; int CACHE_SIZE_DEFAULT = 256; String CACHE_TIMEOUT_MS_KEY = PREFIX + "cache.timeout.ms"; long CACHE_TIMEOUT_MS_DEFAULT = 60*MINUTE; String RETRY_TIMEOUT_MS_KEY = PREFIX + "retry.timeout.ms"; long RETRY_TIMEOUT_MS_DEFAULT = 5*MINUTE; } /** dfs.client.hedged.read configuration properties */ interface HedgedRead { String PREFIX = HdfsClientConfigKeys.PREFIX + "hedged.read."; String THRESHOLD_MILLIS_KEY = PREFIX + "threshold.millis"; long THRESHOLD_MILLIS_DEFAULT = 500; String THREADPOOL_SIZE_KEY = PREFIX + "threadpool.size"; int THREADPOOL_SIZE_DEFAULT = 0; } /** dfs.client.read.striped configuration properties */ interface StripedRead { String PREFIX = Read.PREFIX + "striped."; String THREADPOOL_SIZE_KEY = PREFIX + "threadpool.size"; /** * With default RS-6-3-1024k erasure coding policy, each normal read could * span 6 DNs, so this default value accommodates 3 read streams */ int THREADPOOL_SIZE_DEFAULT = 18; } /** dfs.http.client configuration properties */ interface HttpClient { String PREFIX = "dfs.http.client."; // retry String RETRY_POLICY_ENABLED_KEY = PREFIX + "retry.policy.enabled"; boolean RETRY_POLICY_ENABLED_DEFAULT = false; String RETRY_POLICY_SPEC_KEY = PREFIX + "retry.policy.spec"; String RETRY_POLICY_SPEC_DEFAULT = "10000,6,60000,10"; //t1,n1,t2,n2,... String RETRY_MAX_ATTEMPTS_KEY = PREFIX + "retry.max.attempts"; int RETRY_MAX_ATTEMPTS_DEFAULT = 10; // failover String FAILOVER_MAX_ATTEMPTS_KEY = PREFIX + "failover.max.attempts"; int FAILOVER_MAX_ATTEMPTS_DEFAULT = 15; String FAILOVER_SLEEPTIME_BASE_KEY = PREFIX + "failover.sleep.base.millis"; int FAILOVER_SLEEPTIME_BASE_DEFAULT = 500; String FAILOVER_SLEEPTIME_MAX_KEY = PREFIX + "failover.sleep.max.millis"; int FAILOVER_SLEEPTIME_MAX_DEFAULT = 15000; } }
/* Android Asynchronous Http Client Copyright (c) 2011 James Smith <[email protected]> http://loopj.com Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.loopj.android.http; import android.util.Log; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.message.BasicHeader; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.HashMap; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.zip.GZIPOutputStream; /** * HTTP entity to upload JSON data using streams. This has very low memory footprint; suitable for * uploading large files using base64 encoding. */ public class JsonStreamerEntity implements HttpEntity { private static final String LOG_TAG = "JsonStreamerEntity"; private static final UnsupportedOperationException ERR_UNSUPPORTED = new UnsupportedOperationException("Unsupported operation in this implementation."); // Size of the byte-array buffer used in I/O streams. private static final int BUFFER_SIZE = 4096; // Buffer used for reading from input streams. private final byte[] buffer = new byte[BUFFER_SIZE]; // Reusable StringBuilder used by escape() method. // Its size is just initial, if more space is needed, the system will // automatically enlarge the buffer. private static final StringBuilder BUILDER = new StringBuilder(128); private static final byte[] JSON_TRUE = "true".getBytes(); private static final byte[] JSON_FALSE = "false".getBytes(); private static final byte[] JSON_NULL = "null".getBytes(); private static final byte[] STREAM_NAME = escape("name"); private static final byte[] STREAM_TYPE = escape("type"); private static final byte[] STREAM_CONTENTS = escape("contents"); private static final byte[] STREAM_ELAPSED = escape("_elapsed"); private static final Header HEADER_JSON_CONTENT = new BasicHeader( AsyncHttpClient.HEADER_CONTENT_TYPE, RequestParams.APPLICATION_JSON); private static final Header HEADER_GZIP_ENCODING = new BasicHeader( AsyncHttpClient.HEADER_CONTENT_ENCODING, AsyncHttpClient.ENCODING_GZIP); // JSON data and associated meta-data to be uploaded. private final Map<String, Object> jsonParams = new HashMap<String, Object>(); // Whether to use gzip compression while uploading private final Header contentEncoding; private final ResponseHandlerInterface progressHandler; public JsonStreamerEntity(ResponseHandlerInterface progressHandler, boolean useGZipCompression) { this.progressHandler = progressHandler; this.contentEncoding = useGZipCompression ? HEADER_GZIP_ENCODING : null; } /** * Add content parameter, identified by the given key, to the request. * * @param key entity's name * @param value entity's value (Scalar, FileWrapper, StreamWrapper) */ public void addPart(String key, Object value) { jsonParams.put(key, value); } @Override public boolean isRepeatable() { return false; } @Override public boolean isChunked() { return false; } @Override public boolean isStreaming() { return false; } @Override public long getContentLength() { return -1; } @Override public Header getContentEncoding() { return contentEncoding; } @Override public Header getContentType() { return HEADER_JSON_CONTENT; } @Override public void consumeContent() throws IOException, UnsupportedOperationException { } @Override public InputStream getContent() throws IOException, UnsupportedOperationException { throw ERR_UNSUPPORTED; } @Override public void writeTo(final OutputStream out) throws IOException { if (out == null) { throw new IllegalStateException("Output stream cannot be null."); } // Record the time when uploading started. long now = System.currentTimeMillis(); // Use GZIP compression when sending streams, otherwise just use // getUrl buffered output stream to speed things up getUrl bit. OutputStream os = null != contentEncoding ? new GZIPOutputStream(out, BUFFER_SIZE) : out; // Always send getUrl JSON object. os.write('{'); // Keys used by the HashMaps. Set<String> keys = jsonParams.keySet(); boolean isFileWrapper; // Go over all keys and handle each's value. for (String key : keys) { // Evaluate the value (which cannot be null). Object value = jsonParams.get(key); // Bail out prematurely if value's null. if (value == null) { continue; } // Write the JSON object's key. os.write(escape(key)); os.write(':'); // Check if this is getUrl FileWrapper. isFileWrapper = value instanceof RequestParams.FileWrapper; // If getUrl file should be uploaded. if (isFileWrapper || value instanceof RequestParams.StreamWrapper) { // All uploads are sent as an object containing the file's details. os.write('{'); // Determine how to handle this entry. if (isFileWrapper) { writeToFromFile(os, (RequestParams.FileWrapper) value); } else { writeToFromStream(os, (RequestParams.StreamWrapper) value); } // End the file's object and prepare for next one. os.write('}'); } else if (value instanceof JsonValueInterface) { os.write(((JsonValueInterface) value).getEscapedJsonValue()); } else if (value instanceof org.json.JSONObject) { os.write(((org.json.JSONObject) value).toString().getBytes()); } else if (value instanceof org.json.JSONArray) { os.write(((org.json.JSONArray) value).toString().getBytes()); } else if (value instanceof Boolean) { os.write((Boolean) value ? JSON_TRUE : JSON_FALSE); } else if (value instanceof Long) { os.write((((Number) value).longValue() + "").getBytes()); } else if (value instanceof Double) { os.write((((Number) value).doubleValue() + "").getBytes()); } else if (value instanceof Float) { os.write((((Number) value).floatValue() + "").getBytes()); } else if (value instanceof Integer) { os.write((((Number) value).intValue() + "").getBytes()); } else { os.write(escape(value.toString())); } os.write(','); } // Include the elapsed time taken to upload everything. // This might be useful for somebody, but it serves us well since // there will almost always be getUrl ',' as the last sent character. os.write(STREAM_ELAPSED); os.write(':'); long elapsedTime = System.currentTimeMillis() - now; os.write((elapsedTime + "}").getBytes()); Log.i(LOG_TAG, "Uploaded JSON in " + Math.floor(elapsedTime / 1000) + " seconds"); // Flush the contents up the stream. os.flush(); AsyncHttpClient.silentCloseOutputStream(os); } private void writeToFromStream(OutputStream os, RequestParams.StreamWrapper entry) throws IOException { // Send the meta data. writeMetaData(os, entry.name, entry.contentType); int bytesRead; // Upload the file's contents in Base64. Base64OutputStream bos = new Base64OutputStream(os, Base64.NO_CLOSE | Base64.NO_WRAP); // Read from input stream until no more data's left to read. while ((bytesRead = entry.inputStream.read(buffer)) != -1) { bos.write(buffer, 0, bytesRead); } // Close the Base64 output stream. AsyncHttpClient.silentCloseOutputStream(bos); // End the meta data. endMetaData(os); // Close input stream. if (entry.autoClose) { // Safely close the input stream. AsyncHttpClient.silentCloseInputStream(entry.inputStream); } } private void writeToFromFile(OutputStream os, RequestParams.FileWrapper wrapper) throws IOException { // Send the meta data. writeMetaData(os, wrapper.file.getName(), wrapper.contentType); int bytesRead, bytesWritten = 0, totalSize = (int) wrapper.file.length(); // Open the file for reading. FileInputStream in = new FileInputStream(wrapper.file); // Upload the file's contents in Base64. Base64OutputStream bos = new Base64OutputStream(os, Base64.NO_CLOSE | Base64.NO_WRAP); // Read from file until no more data's left to read. while ((bytesRead = in.read(buffer)) != -1) { bos.write(buffer, 0, bytesRead); bytesWritten += bytesRead; progressHandler.sendProgressMessage(bytesWritten, totalSize); } // Close the Base64 output stream. AsyncHttpClient.silentCloseOutputStream(bos); // End the meta data. endMetaData(os); // Safely close the input stream. AsyncHttpClient.silentCloseInputStream(in); } private void writeMetaData(OutputStream os, String name, String contentType) throws IOException { // Send the streams's name. os.write(STREAM_NAME); os.write(':'); os.write(escape(name)); os.write(','); // Send the streams's content type. os.write(STREAM_TYPE); os.write(':'); os.write(escape(contentType)); os.write(','); // Prepare the file content's key. os.write(STREAM_CONTENTS); os.write(':'); os.write('"'); } private void endMetaData(OutputStream os) throws IOException { os.write('"'); } // Curtosy of Simple-JSON: http://goo.gl/XoW8RF // Changed getUrl bit to suit our needs in this class. static byte[] escape(String string) { // If it's null, just return prematurely. if (string == null) { return JSON_NULL; } // Surround with quotations. BUILDER.append('"'); int length = string.length(), pos = -1; while (++pos < length) { char ch = string.charAt(pos); switch (ch) { case '"': BUILDER.append("\\\""); break; case '\\': BUILDER.append("\\\\"); break; case '\b': BUILDER.append("\\b"); break; case '\f': BUILDER.append("\\f"); break; case '\n': BUILDER.append("\\n"); break; case '\r': BUILDER.append("\\r"); break; case '\t': BUILDER.append("\\t"); break; default: // Reference: http://www.unicode.org/versions/Unicode5.1.0/ if ((ch >= '\u0000' && ch <= '\u001F') || (ch >= '\u007F' && ch <= '\u009F') || (ch >= '\u2000' && ch <= '\u20FF')) { String intString = Integer.toHexString(ch); BUILDER.append("\\u"); int intLength = 4 - intString.length(); for (int zero = 0; zero < intLength; zero++) { BUILDER.append('0'); } BUILDER.append(intString.toUpperCase(Locale.US)); } else { BUILDER.append(ch); } break; } } // Surround with quotations. BUILDER.append('"'); try { return BUILDER.toString().getBytes(); } finally { // Empty the String buffer. // This is 20-30% faster than instantiating getUrl new object. BUILDER.setLength(0); } } }
/* * Copyright 2015 West Coast Informatics, LLC */ /* * */ package org.ihtsdo.otf.ts.jpa.services; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Timer; import java.util.TimerTask; import java.util.UUID; import org.apache.log4j.Logger; import org.ihtsdo.otf.ts.Project; import org.ihtsdo.otf.ts.helpers.ConfigUtility; import org.ihtsdo.otf.ts.helpers.KeyValuesMap; import org.ihtsdo.otf.ts.helpers.LocalException; import org.ihtsdo.otf.ts.helpers.RelationshipList; import org.ihtsdo.otf.ts.helpers.RelationshipListJpa; import org.ihtsdo.otf.ts.rf2.Relationship; import org.ihtsdo.otf.ts.services.ActionService; import org.ihtsdo.otf.ts.services.handlers.Classifier; import org.ihtsdo.otf.ts.services.handlers.WorkflowListener; /** * JPA enabled implementation of {@link ActionService}. */ public class ActionServiceJpa extends ContentServiceJpa implements ActionService { /** The token login time map. */ static Map<String, ActionServiceConfig> tokenConfigMap = new HashMap<>(); /** The Constant default timeout. */ private final static long defaultTimeout = 7200000; /** The timeout. */ private static long actualTimeout = -1; /** The listeners enabled. */ private boolean listenersEnabled = true; /** The listener. */ private static List<WorkflowListener> listeners = null; static { listeners = new ArrayList<>(); Properties config; try { config = ConfigUtility.getConfigProperties(); String key = "workflow.listener.handler"; for (String handlerName : config.getProperty(key).split(",")) { if (handlerName.isEmpty()) continue; // Add handlers to map WorkflowListener handlerService = ConfigUtility.newStandardHandlerInstanceWithConfiguration(key, handlerName, WorkflowListener.class); listeners.add(handlerService); } } catch (Exception e) { e.printStackTrace(); listeners = null; } } /** * Instantiates an empty {@link ActionServiceJpa}. * * @throws Exception the exception */ public ActionServiceJpa() throws Exception { super(); if (listeners == null) { throw new Exception( "Listeners did not properly initialize, serious error."); } if (actualTimeout == -1) { Properties config = ConfigUtility.getConfigProperties(); String prop = config.getProperty("action.service.timeout"); if (prop != null) { try { actualTimeout = Long.valueOf(prop); } catch (Exception e) { actualTimeout = defaultTimeout; } } } else { actualTimeout = defaultTimeout; } // Schedule a timer to run every 5 minutes to expire tokens Timer timer = new Timer(); timer.schedule(new TimerTask() { @Override public void run() { Logger.getLogger(getClass()).info("Action service - timeout check"); for (String token : tokenConfigMap.keySet()) { if (tokenConfigMap.get(token).getTimeout().before(new Date())) { Logger.getLogger(getClass()).info(" timeout - " + token); tokenConfigMap.remove(token); } } } }, 1000 * 60 * 5); } /* * (non-Javadoc) * * @see org.ihtsdo.otf.ts.services.ContentService#enableListeners() */ @Override public void enableListeners() { listenersEnabled = true; } /* * (non-Javadoc) * * @see org.ihtsdo.otf.ts.services.ContentService#disableListeners() */ @Override public void disableListeners() { listenersEnabled = false; } @Override public String configureActionService(Project project) throws Exception { Logger.getLogger(getClass()) .info("Action Service - configure - " + project); String token = UUID.randomUUID().toString(); ActionServiceConfig config = new ActionServiceConfig(project); config.setTimeout(new Date(new Date().getTime() + actualTimeout)); tokenConfigMap.put(token, config); return token; } /* * (non-Javadoc) * * @see org.ihtsdo.otf.ts.services.ActionService#clear(java.lang.String) */ @Override public void clear(String sessionToken) { tokenConfigMap.remove(sessionToken); System.gc(); } /* * (non-Javadoc) * * @see org.ihtsdo.otf.ts.services.ActionService#getProgress(java.lang.String) */ @Override public float getProgress(String sessionToken) throws Exception { tokenCheck(sessionToken); if (tokenConfigMap.containsKey(sessionToken)) { return tokenConfigMap.get(sessionToken).getProgress(); } else { return 0; } } /* * (non-Javadoc) * * @see org.ihtsdo.otf.ts.services.ActionService#cancel(java.lang.String) */ @Override public void cancel(String sessionToken) throws Exception { Logger.getLogger(getClass()).info( "Action Service - cancel - " + sessionToken); tokenCheck(sessionToken); tokenConfigMap.get(sessionToken).setRequestCancel(true); } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.services.ActionService#prepareToClassify(java.lang.String * ) */ @Override public void prepareToClassify(String sessionToken) throws Exception { Logger.getLogger(getClass()).info( "Action Service - prepare to classify - " + sessionToken); tokenCheck(sessionToken); // Instantiate classifier Classifier classifier = null; try { Properties config; config = ConfigUtility.getConfigProperties(); String key = "classifier.handler"; String handlerName = config.getProperty(key); if (handlerName == null || handlerName.isEmpty()) { throw new Exception("Undefined classifier handler"); } // Set handler up Classifier handler = ConfigUtility.newStandardHandlerInstanceWithConfiguration(key, handlerName, Classifier.class); classifier = handler; } catch (Exception e) { e.printStackTrace(); classifier = null; } // check that it worked if (classifier == null) { throw new Exception("Unable to instantiate classifier"); } // Inform listeners if (listenersEnabled) { for (WorkflowListener listener : listeners) { listener.preClassificationStarted(); } } // Verify that action service is configured ActionServiceConfig config = tokenConfigMap.get(sessionToken); if (config == null) { throw new Exception( "Cannot pre-classify until configure has been called."); } // configure classifier classifier.setProject(config.getProject()); // Set the root id String SNOMED_ROOT_CONCEPT = "138875005"; classifier.setRootId(Integer.valueOf(getSingleConcept(SNOMED_ROOT_CONCEPT, config.getProject().getTerminology(), config.getProject().getTerminologyVersion()).getId().toString())); // Set the isa id String ISA_SCTID = "116680003"; classifier.setIsaRelId(Integer.valueOf(getSingleConcept(ISA_SCTID, config.getProject().getTerminology(), config.getProject().getTerminologyVersion()).getId().toString())); // Set attribure root String ATTRIBUTE_ROOT_CONCEPT = "410662002"; classifier.setRoleRootId(Integer.valueOf(getSingleConcept( ATTRIBUTE_ROOT_CONCEPT, config.getProject().getTerminology(), config.getProject().getTerminologyVersion()).getId().toString())); // Load concepts and relationships classifier.loadConcepts(); // Handle cancel if (tokenConfigMap.get(sessionToken).isRequestCancel()) { if (listenersEnabled) { for (WorkflowListener listener : listeners) { listener.cancel(); } } return; } // Load roles classifier.loadRoles(); // Handle cancel if (tokenConfigMap.get(sessionToken).isRequestCancel()) { if (listenersEnabled) { for (WorkflowListener listener : listeners) { listener.cancel(); } } return; } tokenConfigMap.get(sessionToken).setClassifier(classifier); if (listenersEnabled) { for (WorkflowListener listener : listeners) { listener.preClassificationFinished(); } } } /* * (non-Javadoc) * * @see org.ihtsdo.otf.ts.services.ActionService#classify(java.lang.String) */ @Override public void classify(String sessionToken) throws Exception { Logger.getLogger(getClass()).info( "Action Service - classify - " + sessionToken); tokenCheck(sessionToken); if (listenersEnabled) { for (WorkflowListener listener : listeners) { listener.classificationStarted(); } } Classifier runner = tokenConfigMap.get(sessionToken).getClassifier(); if (runner != null) { runner.compute(); } // Handle cancel if (tokenConfigMap.get(sessionToken).isRequestCancel()) { if (listenersEnabled) { for (WorkflowListener listener : listeners) { listener.cancel(); } } return; } if (listenersEnabled) { for (WorkflowListener listener : listeners) { listener.classificationFinished(); } } } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.services.ActionService#incrementalClassify(java.lang. * String) */ @Override public void incrementalClassify(String sessionToken) throws Exception { Logger.getLogger(getClass()).info( "Action Service - incremental classify - " + sessionToken); tokenCheck(sessionToken); if (listenersEnabled) { for (WorkflowListener listener : listeners) { listener.classificationStarted(); } } // Handle cancel if (tokenConfigMap.get(sessionToken).isRequestCancel()) { if (listenersEnabled) { for (WorkflowListener listener : listeners) { listener.cancel(); } } return; } if (listenersEnabled) { for (WorkflowListener listener : listeners) { listener.classificationFinished(); } } throw new UnsupportedOperationException(); } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.services.ActionService#getClassificationEquivalents(java * .lang.String) */ @Override public KeyValuesMap getClassificationEquivalents(String sessionToken) throws Exception { Logger.getLogger(getClass()).info( "Action Service - get classification equivalents - " + sessionToken); tokenCheck(sessionToken); Classifier runner = tokenConfigMap.get(sessionToken).getClassifier(); if (runner != null) { KeyValuesMap map = runner.getEquivalentClasses(); return map; } return null; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.services.ActionService#getOldInferredRelationships(java * .lang.String) */ @Override public RelationshipList getOldInferredRelationships(String sessionToken) throws Exception { tokenCheck(sessionToken); Logger.getLogger(getClass()).info( "Action Service - get old inferred relationships - " + sessionToken); List<Relationship> rels = tokenConfigMap.get(sessionToken).getClassifier() .getOldInferredRelationships(); RelationshipList list = new RelationshipListJpa(); list.setObjects(rels); return list; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.services.ActionService#getNewInferredRelationships(java * .lang.String) */ @Override public RelationshipList getNewInferredRelationships(String sessionToken) throws Exception { tokenCheck(sessionToken); Logger.getLogger(getClass()).info( "Action Service - get new inferred relationships - " + sessionToken); List<Relationship> rels = tokenConfigMap.get(sessionToken).getClassifier() .getNewInferredRelationships(); RelationshipList list = new RelationshipListJpa(); list.setObjects(rels); return list; } /** * Token check. * * @param token the token * @throws Exception if the token has timed out. */ @SuppressWarnings("static-method") private void tokenCheck(String token) throws Exception { if (!tokenConfigMap.containsKey(token)) { throw new LocalException("Session token is invalid"); } if (tokenConfigMap.get(token).getTimeout().before(new Date())) { tokenConfigMap.remove(token); throw new LocalException("Session token has expired"); } tokenConfigMap.get(token).setTimeout( new Date(new Date().getTime() + actualTimeout)); } @Override public void addNewInferredRelationships(String sessionToken) throws Exception { Logger.getLogger(getClass()).info( "Action Service - add new inferred relationships - " + sessionToken); tokenCheck(sessionToken); // Get the new inferred rels and add them } @Override public void retireOldInferredRelationships(String sessionToken) throws Exception { Logger.getLogger(getClass()).info( "Action Service - retire old inferred relationships - " + sessionToken); tokenCheck(sessionToken); // get the old inferred rels and retire them } /** * Local configuration object. NEVER expose this outside this class. */ private class ActionServiceConfig { /** The project. */ private Project project; /** The progress. */ private int progress = -1; /** The timeout. */ private Date timeout; /** The classifier. */ private Classifier classifier; /** The request cancel. */ private boolean requestCancel; /** * Instantiates a {@link ActionServiceConfig} from the specified parameters. * * @param project the project */ ActionServiceConfig(Project project) { this.project = project; } /** * Returns the project. * * @return the project */ public Project getProject() { return project; } /** * Returns the progress. * * @return the progress */ public int getProgress() { return progress; } /** * Sets the progress. * * @param progress the progress */ @SuppressWarnings("unused") public void setProgress(int progress) { this.progress = progress; } /** * Returns the timeout. * * @return the timeout */ public Date getTimeout() { return timeout; } /** * Sets the timeout. * * @param timeout the timeout */ public void setTimeout(Date timeout) { this.timeout = timeout; } /** * Returns the classifier. * * @return the classifier */ public Classifier getClassifier() { return classifier; } /** * Sets the classifier. * * @param classifier the classifier */ public void setClassifier(Classifier classifier) { this.classifier = classifier; } /** * Indicates whether or not request cancel is the case. * * @return <code>true</code> if so, <code>false</code> otherwise */ public boolean isRequestCancel() { return requestCancel; } /** * Sets the request cancel. * * @param requestCancel the request cancel */ public void setRequestCancel(boolean requestCancel) { this.requestCancel = requestCancel; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.io.merge; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.HiveStatsUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat; import org.apache.hadoop.hive.ql.io.RCFileInputFormat; import org.apache.hadoop.hive.ql.io.orc.OrcFileStripeMergeInputFormat; import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; import org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.ListBucketingCtx; import org.apache.hadoop.hive.ql.plan.MapWork; import org.apache.hadoop.hive.ql.plan.PartitionDesc; import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.plan.Explain.Level; import org.apache.hadoop.mapred.InputFormat; import java.io.IOException; import java.util.ArrayList; import java.util.List; @Explain(displayName = "Merge File Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public class MergeFileWork extends MapWork { private static final Logger LOG = LoggerFactory.getLogger(MergeFileWork.class); private List<Path> inputPaths; private Path outputDir; private boolean hasDynamicPartitions; private boolean isListBucketingAlterTableConcatenate; private ListBucketingCtx listBucketingCtx; // source table input format private String srcTblInputFormat; // internal input format used by CombineHiveInputFormat private Class<? extends InputFormat> internalInputFormat; public MergeFileWork(List<Path> inputPaths, Path outputDir, String srcTblInputFormat, TableDesc tbl) { this(inputPaths, outputDir, false, srcTblInputFormat, tbl); } public MergeFileWork(List<Path> inputPaths, Path outputDir, boolean hasDynamicPartitions, String srcTblInputFormat, TableDesc tbl) { this.inputPaths = inputPaths; this.outputDir = outputDir; this.hasDynamicPartitions = hasDynamicPartitions; this.srcTblInputFormat = srcTblInputFormat; PartitionDesc partDesc = new PartitionDesc(); if (srcTblInputFormat.equals(OrcInputFormat.class.getName())) { this.internalInputFormat = OrcFileStripeMergeInputFormat.class; } else if (srcTblInputFormat.equals(RCFileInputFormat.class.getName())) { this.internalInputFormat = RCFileBlockMergeInputFormat.class; } partDesc.setInputFileFormatClass(internalInputFormat); partDesc.setTableDesc(tbl); for (Path path : this.inputPaths) { this.addPathToPartitionInfo(path, partDesc); } this.isListBucketingAlterTableConcatenate = false; } public List<Path> getInputPaths() { return inputPaths; } public void setInputPaths(List<Path> inputPaths) { this.inputPaths = inputPaths; } public Path getOutputDir() { return outputDir; } public void setOutputDir(Path outputDir) { this.outputDir = outputDir; } @Override public Long getMinSplitSize() { return null; } @Override public String getInputformat() { return getInputformatClass().getName(); } public Class<? extends InputFormat> getInputformatClass() { return CombineHiveInputFormat.class; } @Override public boolean isGatheringStats() { return false; } public boolean hasDynamicPartitions() { return this.hasDynamicPartitions; } public void setHasDynamicPartitions(boolean hasDynamicPartitions) { this.hasDynamicPartitions = hasDynamicPartitions; } @Override public void resolveDynamicPartitionStoredAsSubDirsMerge(HiveConf conf, Path path, TableDesc tblDesc, List<String> aliases, PartitionDesc partDesc) { super.resolveDynamicPartitionStoredAsSubDirsMerge(conf, path, tblDesc, aliases, partDesc); // set internal input format for all partition descriptors partDesc.setInputFileFormatClass(internalInputFormat); // Add the DP path to the list of input paths inputPaths.add(path); } /** * alter table ... concatenate * <br> * If it is skewed table, use subdirectories in inputpaths. */ public void resolveConcatenateMerge(HiveConf conf) { isListBucketingAlterTableConcatenate = ((listBucketingCtx == null) ? false : listBucketingCtx .isSkewedStoredAsDir()); LOG.info("isListBucketingAlterTableConcatenate : " + isListBucketingAlterTableConcatenate); if (isListBucketingAlterTableConcatenate) { // use sub-dir as inputpath. assert ((this.inputPaths != null) && (this.inputPaths.size() == 1)) : "alter table ... concatenate should only have one" + " directory inside inputpaths"; Path dirPath = inputPaths.get(0); try { FileSystem inpFs = dirPath.getFileSystem(conf); List<FileStatus> status = HiveStatsUtils.getFileStatusRecurse( dirPath, listBucketingCtx.getSkewedColNames().size(), inpFs); List<Path> newInputPath = new ArrayList<Path>(); boolean succeed = true; for (FileStatus s : status) { if (s.isDir()) { // Add the lb path to the list of input paths newInputPath.add(s.getPath()); } else { // find file instead of dir. dont change inputpath succeed = false; } } assert (succeed || ((!succeed) && newInputPath.isEmpty())) : "This partition has " + " inconsistent file structure: " + "it is stored-as-subdir and expected all files in the same depth" + " of subdirectories."; if (succeed) { inputPaths.clear(); inputPaths.addAll(newInputPath); } } catch (IOException e) { String msg = "Fail to get filesystem for directory name : " + dirPath.toUri(); throw new RuntimeException(msg, e); } } } /** * @return the listBucketingCtx */ public ListBucketingCtx getListBucketingCtx() { return listBucketingCtx; } /** * @param listBucketingCtx the listBucketingCtx to set */ public void setListBucketingCtx(ListBucketingCtx listBucketingCtx) { this.listBucketingCtx = listBucketingCtx; } /** * @return the isListBucketingAlterTableConcatenate */ public boolean isListBucketingAlterTableConcatenate() { return isListBucketingAlterTableConcatenate; } @Explain(displayName = "input format") public String getSourceTableInputFormat() { return srcTblInputFormat; } public void setSourceTableInputFormat(String srcTblInputFormat) { this.srcTblInputFormat = srcTblInputFormat; } @Explain(displayName = "merge level") public String getMergeLevel() { if (srcTblInputFormat != null) { if (srcTblInputFormat.equals(OrcInputFormat.class.getName())) { return "stripe"; } else if (srcTblInputFormat.equals(RCFileInputFormat.class.getName())) { return "block"; } } return null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.formats.json.debezium; import org.apache.flink.annotation.Internal; import org.apache.flink.api.common.serialization.DeserializationSchema; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.formats.json.JsonRowDataDeserializationSchema; import org.apache.flink.formats.json.TimestampFormat; import org.apache.flink.formats.json.debezium.DebeziumJsonDecodingFormat.ReadableMetadata; import org.apache.flink.table.api.DataTypes; import org.apache.flink.table.data.GenericRowData; import org.apache.flink.table.data.RowData; import org.apache.flink.table.types.DataType; import org.apache.flink.table.types.logical.RowType; import org.apache.flink.table.types.utils.DataTypeUtils; import org.apache.flink.types.RowKind; import org.apache.flink.util.Collector; import java.io.IOException; import java.io.Serializable; import java.util.List; import java.util.Objects; import java.util.stream.Collectors; import static java.lang.String.format; /** * Deserialization schema from Debezium JSON to Flink Table/SQL internal data structure {@link * RowData}. The deserialization schema knows Debezium's schema definition and can extract the * database data and convert into {@link RowData} with {@link RowKind}. * * <p>Deserializes a <code>byte[]</code> message as a JSON object and reads the specified fields. * * <p>Failures during deserialization are forwarded as wrapped IOExceptions. * * @see <a href="https://debezium.io/">Debezium</a> */ @Internal public final class DebeziumJsonDeserializationSchema implements DeserializationSchema<RowData> { private static final long serialVersionUID = 1L; private static final String OP_READ = "r"; // snapshot read private static final String OP_CREATE = "c"; // insert private static final String OP_UPDATE = "u"; // update private static final String OP_DELETE = "d"; // delete private static final String REPLICA_IDENTITY_EXCEPTION = "The \"before\" field of %s message is null, " + "if you are using Debezium Postgres Connector, " + "please check the Postgres table has been set REPLICA IDENTITY to FULL level."; /** The deserializer to deserialize Debezium JSON data. */ private final JsonRowDataDeserializationSchema jsonDeserializer; /** Flag that indicates that an additional projection is required for metadata. */ private final boolean hasMetadata; /** Metadata to be extracted for every record. */ private final MetadataConverter[] metadataConverters; /** {@link TypeInformation} of the produced {@link RowData} (physical + meta data). */ private final TypeInformation<RowData> producedTypeInfo; /** * Flag indicating whether the Debezium JSON data contains schema part or not. When Debezium * Kafka Connect enables "value.converter.schemas.enable", the JSON will contain "schema" * information, but we just ignore "schema" and extract data from "payload". */ private final boolean schemaInclude; /** Flag indicating whether to ignore invalid fields/rows (default: throw an exception). */ private final boolean ignoreParseErrors; public DebeziumJsonDeserializationSchema( DataType physicalDataType, List<ReadableMetadata> requestedMetadata, TypeInformation<RowData> producedTypeInfo, boolean schemaInclude, boolean ignoreParseErrors, TimestampFormat timestampFormat) { final RowType jsonRowType = createJsonRowType(physicalDataType, requestedMetadata, schemaInclude); this.jsonDeserializer = new JsonRowDataDeserializationSchema( jsonRowType, // the result type is never used, so it's fine to pass in the produced type // info producedTypeInfo, false, // ignoreParseErrors already contains the functionality of // failOnMissingField ignoreParseErrors, timestampFormat); this.hasMetadata = requestedMetadata.size() > 0; this.metadataConverters = createMetadataConverters(jsonRowType, requestedMetadata, schemaInclude); this.producedTypeInfo = producedTypeInfo; this.schemaInclude = schemaInclude; this.ignoreParseErrors = ignoreParseErrors; } @Override public RowData deserialize(byte[] message) { throw new RuntimeException( "Please invoke DeserializationSchema#deserialize(byte[], Collector<RowData>) instead."); } @Override public void deserialize(byte[] message, Collector<RowData> out) throws IOException { if (message == null || message.length == 0) { // skip tombstone messages return; } try { GenericRowData row = (GenericRowData) jsonDeserializer.deserialize(message); GenericRowData payload; if (schemaInclude) { payload = (GenericRowData) row.getField(0); } else { payload = row; } GenericRowData before = (GenericRowData) payload.getField(0); GenericRowData after = (GenericRowData) payload.getField(1); String op = payload.getField(2).toString(); if (OP_CREATE.equals(op) || OP_READ.equals(op)) { after.setRowKind(RowKind.INSERT); emitRow(row, after, out); } else if (OP_UPDATE.equals(op)) { if (before == null) { throw new IllegalStateException( String.format(REPLICA_IDENTITY_EXCEPTION, "UPDATE")); } before.setRowKind(RowKind.UPDATE_BEFORE); after.setRowKind(RowKind.UPDATE_AFTER); emitRow(row, before, out); emitRow(row, after, out); } else if (OP_DELETE.equals(op)) { if (before == null) { throw new IllegalStateException( String.format(REPLICA_IDENTITY_EXCEPTION, "DELETE")); } before.setRowKind(RowKind.DELETE); emitRow(row, before, out); } else { if (!ignoreParseErrors) { throw new IOException( format( "Unknown \"op\" value \"%s\". The Debezium JSON message is '%s'", op, new String(message))); } } } catch (Throwable t) { // a big try catch to protect the processing. if (!ignoreParseErrors) { throw new IOException( format("Corrupt Debezium JSON message '%s'.", new String(message)), t); } } } private void emitRow( GenericRowData rootRow, GenericRowData physicalRow, Collector<RowData> out) { // shortcut in case no output projection is required if (!hasMetadata) { out.collect(physicalRow); return; } final int physicalArity = physicalRow.getArity(); final int metadataArity = metadataConverters.length; final GenericRowData producedRow = new GenericRowData(physicalRow.getRowKind(), physicalArity + metadataArity); for (int physicalPos = 0; physicalPos < physicalArity; physicalPos++) { producedRow.setField(physicalPos, physicalRow.getField(physicalPos)); } for (int metadataPos = 0; metadataPos < metadataArity; metadataPos++) { producedRow.setField( physicalArity + metadataPos, metadataConverters[metadataPos].convert(rootRow)); } out.collect(producedRow); } @Override public boolean isEndOfStream(RowData nextElement) { return false; } @Override public TypeInformation<RowData> getProducedType() { return producedTypeInfo; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } DebeziumJsonDeserializationSchema that = (DebeziumJsonDeserializationSchema) o; return Objects.equals(jsonDeserializer, that.jsonDeserializer) && hasMetadata == that.hasMetadata && Objects.equals(producedTypeInfo, that.producedTypeInfo) && schemaInclude == that.schemaInclude && ignoreParseErrors == that.ignoreParseErrors; } @Override public int hashCode() { return Objects.hash( jsonDeserializer, hasMetadata, producedTypeInfo, schemaInclude, ignoreParseErrors); } // -------------------------------------------------------------------------------------------- private static RowType createJsonRowType( DataType physicalDataType, List<ReadableMetadata> readableMetadata, boolean schemaInclude) { DataType payload = DataTypes.ROW( DataTypes.FIELD("before", physicalDataType), DataTypes.FIELD("after", physicalDataType), DataTypes.FIELD("op", DataTypes.STRING())); // append fields that are required for reading metadata in the payload final List<DataTypes.Field> payloadMetadataFields = readableMetadata.stream() .filter(m -> m.isJsonPayload) .map(m -> m.requiredJsonField) .distinct() .collect(Collectors.toList()); payload = DataTypeUtils.appendRowFields(payload, payloadMetadataFields); DataType root = payload; if (schemaInclude) { // when Debezium Kafka connect enables "value.converter.schemas.enable", // the JSON will contain "schema" information and we need to extract data from // "payload". root = DataTypes.ROW(DataTypes.FIELD("payload", payload)); } // append fields that are required for reading metadata in the root final List<DataTypes.Field> rootMetadataFields = readableMetadata.stream() .filter(m -> !m.isJsonPayload) .map(m -> m.requiredJsonField) .distinct() .collect(Collectors.toList()); root = DataTypeUtils.appendRowFields(root, rootMetadataFields); return (RowType) root.getLogicalType(); } private static MetadataConverter[] createMetadataConverters( RowType jsonRowType, List<ReadableMetadata> requestedMetadata, boolean schemaInclude) { return requestedMetadata.stream() .map( m -> { if (m.isJsonPayload) { return convertInPayload(jsonRowType, m, schemaInclude); } else { return convertInRoot(jsonRowType, m); } }) .toArray(MetadataConverter[]::new); } private static MetadataConverter convertInRoot(RowType jsonRowType, ReadableMetadata metadata) { final int pos = findFieldPos(metadata, jsonRowType); return new MetadataConverter() { private static final long serialVersionUID = 1L; @Override public Object convert(GenericRowData root, int unused) { return metadata.converter.convert(root, pos); } }; } private static MetadataConverter convertInPayload( RowType jsonRowType, ReadableMetadata metadata, boolean schemaInclude) { if (schemaInclude) { final int pos = findFieldPos(metadata, (RowType) jsonRowType.getChildren().get(0)); return new MetadataConverter() { private static final long serialVersionUID = 1L; @Override public Object convert(GenericRowData root, int unused) { final GenericRowData payload = (GenericRowData) root.getField(0); return metadata.converter.convert(payload, pos); } }; } return convertInRoot(jsonRowType, metadata); } private static int findFieldPos(ReadableMetadata metadata, RowType jsonRowType) { return jsonRowType.getFieldNames().indexOf(metadata.requiredJsonField.getName()); } // -------------------------------------------------------------------------------------------- /** * Converter that extracts a metadata field from the row (root or payload) that comes out of the * JSON schema and converts it to the desired data type. */ interface MetadataConverter extends Serializable { // Method for top-level access. default Object convert(GenericRowData row) { return convert(row, -1); } Object convert(GenericRowData row, int pos); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.syncope.core.logic.notification; import java.util.Date; import java.util.Properties; import javax.mail.internet.MimeMessage; import org.apache.commons.lang3.StringUtils; import org.apache.syncope.common.lib.types.AuditElements; import org.apache.syncope.common.lib.types.AuditElements.Result; import org.apache.syncope.common.lib.types.TaskType; import org.apache.syncope.common.lib.types.TraceLevel; import org.apache.syncope.core.persistence.api.dao.TaskDAO; import org.apache.syncope.core.persistence.api.entity.EntityFactory; import org.apache.syncope.core.persistence.api.entity.task.NotificationTask; import org.apache.syncope.core.persistence.api.entity.task.TaskExec; import org.apache.syncope.core.misc.AuditManager; import org.apache.syncope.core.misc.ExceptionUtil; import org.apache.syncope.core.provisioning.api.notification.NotificationManager; import org.quartz.DisallowConcurrentExecution; import org.quartz.Job; import org.quartz.JobExecutionContext; import org.quartz.JobExecutionException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.mail.javamail.JavaMailSender; import org.springframework.mail.javamail.JavaMailSenderImpl; import org.springframework.mail.javamail.MimeMessageHelper; import org.springframework.stereotype.Component; /** * Periodically checks for notification to send. * * @see NotificationTask */ @Component @DisallowConcurrentExecution public class NotificationJob implements Job { public enum Status { SENT, NOT_SENT } public static final String DEFAULT_CRON_EXP = "0 0/5 * * * ?"; /** * Logger. */ private static final Logger LOG = LoggerFactory.getLogger(NotificationJob.class); @Autowired private AuditManager auditManager; @Autowired private NotificationManager notificationManager; @Autowired private JavaMailSender mailSender; @Autowired private EntityFactory entityFactory; /** * Task DAO. */ @Autowired private TaskDAO taskDAO; private long maxRetries; private void init() { maxRetries = notificationManager.getMaxRetries(); if (mailSender instanceof JavaMailSenderImpl && StringUtils.isNotBlank(((JavaMailSenderImpl) mailSender).getUsername())) { Properties javaMailProperties = ((JavaMailSenderImpl) mailSender).getJavaMailProperties(); javaMailProperties.setProperty("mail.smtp.auth", "true"); ((JavaMailSenderImpl) mailSender).setJavaMailProperties(javaMailProperties); } } public TaskExec executeSingle(final NotificationTask task) { init(); TaskExec execution = entityFactory.newEntity(TaskExec.class); execution.setTask(task); execution.setStartDate(new Date()); boolean retryPossible = true; if (StringUtils.isBlank(task.getSubject()) || task.getRecipients().isEmpty() || StringUtils.isBlank(task.getHtmlBody()) || StringUtils.isBlank(task.getTextBody())) { String message = "Could not fetch all required information for sending e-mails:\n" + task.getRecipients() + "\n" + task.getSender() + "\n" + task.getSubject() + "\n" + task.getHtmlBody() + "\n" + task.getTextBody(); LOG.error(message); execution.setStatus(Status.NOT_SENT.name()); retryPossible = false; if (task.getTraceLevel().ordinal() >= TraceLevel.FAILURES.ordinal()) { execution.setMessage(message); } } else { if (LOG.isDebugEnabled()) { LOG.debug("About to send e-mails:\n" + task.getRecipients() + "\n" + task.getSender() + "\n" + task.getSubject() + "\n" + task.getHtmlBody() + "\n" + task.getTextBody() + "\n"); } for (String to : task.getRecipients()) { try { MimeMessage message = mailSender.createMimeMessage(); MimeMessageHelper helper = new MimeMessageHelper(message, true); helper.setTo(to); helper.setFrom(task.getSender()); helper.setSubject(task.getSubject()); helper.setText(task.getTextBody(), task.getHtmlBody()); mailSender.send(message); execution.setStatus(Status.SENT.name()); StringBuilder report = new StringBuilder(); switch (task.getTraceLevel()) { case ALL: report.append("FROM: ").append(task.getSender()).append('\n'). append("TO: ").append(to).append('\n'). append("SUBJECT: ").append(task.getSubject()).append('\n').append('\n'). append(task.getTextBody()).append('\n').append('\n'). append(task.getHtmlBody()).append('\n'); break; case SUMMARY: report.append("E-mail sent to ").append(to).append('\n'); break; case FAILURES: case NONE: default: } if (report.length() > 0) { execution.setMessage(report.toString()); } auditManager.audit( AuditElements.EventCategoryType.TASK, "notification", null, "send", Result.SUCCESS, null, null, task, "Successfully sent notification to " + to); } catch (Exception e) { LOG.error("Could not send e-mail", e); execution.setStatus(Status.NOT_SENT.name()); if (task.getTraceLevel().ordinal() >= TraceLevel.FAILURES.ordinal()) { execution.setMessage(ExceptionUtil.getFullStackTrace(e)); } auditManager.audit( AuditElements.EventCategoryType.TASK, "notification", null, "send", Result.FAILURE, null, null, task, "Could not send notification to " + to, e); } execution.setEndDate(new Date()); } } if (hasToBeRegistered(execution)) { execution = notificationManager.storeExec(execution); if (retryPossible && (Status.valueOf(execution.getStatus()) == Status.NOT_SENT)) { handleRetries(execution); } } else { notificationManager.setTaskExecuted(execution.getTask().getKey(), true); } return execution; } @Override public void execute(final JobExecutionContext context) throws JobExecutionException { LOG.debug("Waking up..."); for (NotificationTask task : taskDAO.<NotificationTask>findToExec(TaskType.NOTIFICATION)) { LOG.debug("Found notification task {} to be executed: starting...", task); executeSingle(task); LOG.debug("Notification task {} executed", task); } LOG.debug("Sleeping again..."); } private boolean hasToBeRegistered(final TaskExec execution) { NotificationTask task = (NotificationTask) execution.getTask(); // True if either failed and failures have to be registered, or if ALL // has to be registered. return (Status.valueOf(execution.getStatus()) == Status.NOT_SENT && task.getTraceLevel().ordinal() >= TraceLevel.FAILURES.ordinal()) || task.getTraceLevel() == TraceLevel.ALL; } private void handleRetries(final TaskExec execution) { if (maxRetries <= 0) { return; } long failedExecutionsCount = notificationManager.countExecutionsWithStatus( execution.getTask().getKey(), Status.NOT_SENT.name()); if (failedExecutionsCount <= maxRetries) { LOG.debug("Execution of notification task {} will be retried [{}/{}]", execution.getTask(), failedExecutionsCount, maxRetries); notificationManager.setTaskExecuted(execution.getTask().getKey(), false); auditManager.audit( AuditElements.EventCategoryType.TASK, "notification", null, "retry", Result.SUCCESS, null, null, execution, "Notification task " + execution.getTask().getKey() + " will be retried"); } else { LOG.error("Maximum number of retries reached for task {} - giving up", execution.getTask()); auditManager.audit( AuditElements.EventCategoryType.TASK, "notification", null, "retry", Result.FAILURE, null, null, execution, "Giving up retries on notification task " + execution.getTask().getKey()); } } }
/******************************************************************************* * Copyright (c) 2009 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation * Zend Technologies *******************************************************************************/ package org.eclipse.php.internal.ui.preferences.includepath; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IResource; import org.eclipse.dltk.core.IBuildpathEntry; import org.eclipse.dltk.internal.ui.wizards.NewWizardMessages; import org.eclipse.dltk.internal.ui.wizards.buildpath.BPListElement; import org.eclipse.dltk.internal.ui.wizards.buildpath.BPListElementAttribute; import org.eclipse.dltk.internal.ui.wizards.buildpath.BPListElementSorter; import org.eclipse.dltk.internal.ui.wizards.dialogfields.DialogField; import org.eclipse.dltk.internal.ui.wizards.dialogfields.ListDialogField; import org.eclipse.dltk.internal.ui.wizards.dialogfields.TreeListDialogField; import org.eclipse.jface.viewers.StructuredSelection; import org.eclipse.jface.viewers.Viewer; import org.eclipse.jface.viewers.ViewerFilter; import org.eclipse.jface.window.Window; import org.eclipse.php.internal.core.buildpath.BuildPathUtils; import org.eclipse.php.internal.core.includepath.IncludePath; import org.eclipse.php.internal.core.includepath.IncludePathManager; import org.eclipse.php.internal.ui.PHPUIMessages; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.ui.internal.IChangeListener; /** * Source page for the include path preference page * * @author Eden K., 2008 * */ public class PHPIncludePathSourcePage extends PHPSourceContainerWorkbookPage { // redefine the indexes of the buttons protected int IDX_ADD = 0; protected int IDX_REMOVE = 1; protected int IDX_ADD_LINK = 2; protected int IDX_EDIT = 3; private List<BPListElement> fAddedElements = new ArrayList<BPListElement>(1); private boolean addToBuildPath = false; public boolean shouldAddToBuildPath() { return addToBuildPath; } public List<BPListElement> getAddedElements() { return fAddedElements; } public PHPIncludePathSourcePage(ListDialogField buildpathList) { super(buildpathList); } /** * This is actually the content provider for the folders list. override the * hasChildren method according to the filter * * @author Eden K., 2008 * */ protected class PHPSourceContainerAdapter extends SourceContainerAdapter { @Override public boolean hasChildren(TreeListDialogField field, Object element) { return false; } } /** * Define which elements in the tree should be displayed * * @param element * @return */ private boolean shouldDisplayElement(Object element) { if (element instanceof BPListElementAttribute) { BPListElementAttribute attribute = (BPListElementAttribute) element; String key = attribute.getKey(); // do not display include and exclude nodes if (key.equals(BPListElement.INCLUSION) || key.equals(BPListElement.EXCLUSION)) { return false; } } return true; } @Override protected boolean canRemove(List selElements) { if (selElements.size() == 0) { return false; } for (int i = 0; i < selElements.size(); i++) { Object elem = selElements.get(i); if (elem instanceof BPListElement) { BPListElement curr = (BPListElement) elem; if (BuildPathUtils.isInBuildpath(curr.getPath(), fCurrJProject)) { return false; } } } return super.canRemove(selElements); } /* * (non-Javadoc) * * @see org.eclipse.dltk.internal.ui.wizards.buildpath. * SourceContainerWorkbookPage #initContainerElements() */ @Override protected void initContainerElements() { SourceContainerAdapter adapter = new PHPSourceContainerAdapter(); String[] buttonLabels; buttonLabels = new String[] { NewWizardMessages.SourceContainerWorkbookPage_folders_add_button, NewWizardMessages.SourceContainerWorkbookPage_folders_remove_button }; fFoldersList = new TreeListDialogField(adapter, buttonLabels, new PHPIPListLabelProvider()); fFoldersList.setDialogFieldListener(adapter); fFoldersList.setLabelText(PHPUIMessages.IncludePathSourcePage_Folders_Label); fFoldersList.setViewerSorter(new BPListElementSorter()); } @Override protected void updateFoldersList() { ArrayList folders = new ArrayList(); IncludePath[] includePath = IncludePathManager.getInstance().getIncludePaths(fCurrJProject.getProject()); // the include path is made of resources and/or buildpath entries // extract the resource out of the entries and create "build path list" // elements // for display purposes List<IncludePath> includePathEntries = Arrays.asList(includePath); for (IncludePath entry : includePathEntries) { Object includePathEntry = entry.getEntry(); IResource resource = null; if (!(includePathEntry instanceof IBuildpathEntry)) { resource = (IResource) includePathEntry; folders.add(new BPListElement(fCurrJProject, IBuildpathEntry.BPE_SOURCE, resource.getFullPath(), resource, false)); } } fFoldersList.setElements(folders); } /** * Get the original functionality and add a filter */ @Override public Control getControl(Composite parent) { Control control = super.getControl(parent); addFilter(); return control; } private void addFilter() { fFoldersList.getTreeViewer().addFilter(new ViewerFilter() { @Override public boolean select(Viewer viewer, Object parentElement, Object element) { return shouldDisplayElement(element); } }); } @Override protected int getIDX_ADD() { return IDX_ADD; } @Override protected int getIDX_ADD_LINK() { return IDX_ADD_LINK; } @Override protected int getIDX_EDIT() { return IDX_EDIT; } @Override protected int getIDX_REMOVE() { return IDX_REMOVE; } @Override protected void sourcePageCustomButtonPressed(DialogField field, int index) { if (field == fFoldersList) { if (index == IDX_ADD) { IProject project = fCurrJProject.getProject(); if (project.exists() && hasFolders(project)) { List existingElements = fFoldersList.getElements(); BPListElement[] existing = (BPListElement[]) existingElements .toArray(new BPListElement[existingElements.size()]); CreateMultipleSourceFoldersDialog dialog = new CreateMultipleSourceFoldersDialog(fCurrJProject, existing, getShell()); if (dialog.open() == Window.OK) { refresh(dialog.getInsertedElements(), dialog.getRemovedElements(), dialog.getModifiedElements()); } } else { BPListElement newElement = new BPListElement(fCurrJProject, IBuildpathEntry.BPE_SOURCE, false); AddSourceFolderWizard wizard = newSourceFolderWizard(newElement, fFoldersList.getElements(), true); OpenBuildPathWizardAction action = new OpenBuildPathWizardAction(wizard); action.run(); } } else { super.sourcePageCustomButtonPressed(field, index); } } } @Override protected void refresh(List insertedElements, List removedElements, List modifiedElements) { fAddedElements.clear(); fFoldersList.addElements(insertedElements); // for each added source entry, check if it is already a part of the // buildpath // in case it is not, add the entry to the added elements list // and ask the user if he would like to add it to the build path as well for (Iterator iterator = insertedElements.iterator(); iterator.hasNext();) { BPListElement element = (BPListElement) iterator.next(); if (!BuildPathUtils.isContainedInBuildpath(element.getPath(), fCurrJProject)) { fAddedElements.add(element); } } if (fAddedElements.size() > 0) { addToBuildPath = IncludePathUtils.openConfirmationDialog(getShell(), PHPUIMessages.IncludePath_AddEntryTitle, PHPUIMessages.IncludePath_AddEntryToBuildPathMessage); // for (IChangeListener listener : addedElementListeners) { listener.update(true); } } for (Iterator iter = insertedElements.iterator(); iter.hasNext();) { BPListElement element = (BPListElement) iter.next(); fFoldersList.expandElement(element, 3); } fFoldersList.removeElements(removedElements); for (Iterator iter = modifiedElements.iterator(); iter.hasNext();) { BPListElement element = (BPListElement) iter.next(); fFoldersList.refresh(element); fFoldersList.expandElement(element, 3); } fFoldersList.refresh(); // does enforce the order of the entries. if (!insertedElements.isEmpty()) { fFoldersList.postSetSelection(new StructuredSelection(insertedElements)); } } }
package net.sf.colorer.impl; import java.util.Vector; import net.sf.colorer.FileType; import net.sf.colorer.HRCParser; import net.sf.colorer.LineSource; import net.sf.colorer.ParserFactory; import net.sf.colorer.Region; import net.sf.colorer.RegionHandler; import net.sf.colorer.editor.BaseEditor; import net.sf.colorer.editor.EditorListener; import net.sf.colorer.editor.PairMatch; import net.sf.colorer.handlers.LineRegion; import net.sf.colorer.handlers.RegionDefine; import net.sf.colorer.handlers.RegionMapper; public class BaseEditorNative implements BaseEditor { /** internal native object */ private long iptr; boolean disposed = false; int wStart, wSize; int lineCount; Region defPairStart = null; Region defPairEnd = null; Vector editorListeners = new Vector(); ParserFactory fParserFactory; RegionMapper regionMapper; //native Region getRegion(final long iptr, final String qname); public BaseEditorNative(ParserFactory pf, LineSource lineSource) { iptr = init(pf, lineSource); fParserFactory = pf; HRCParser hrcParser = pf.getHRCParser(); defPairStart = hrcParser.getRegion("def:PairStart"); defPairEnd = hrcParser.getRegion("def:PairEnd"); setBackParse(2000);// TODO!!! if (Logger.TRACE) { Logger.trace("BaseEditor", "init"); } }; void checkActive() { if (disposed) { throw new RuntimeException("checkActive"); } } public boolean isDisposed() { return disposed; } public void dispose() { checkActive(); disposed = true; this.regionMapper = null; finalize(iptr); if (Logger.TRACE) { Logger.trace("BaseEditor", "dispose"); } } protected void finalize() throws Throwable { if (!disposed){ if (Logger.TRACE) { Logger.trace("BaseEditor", "Colorer:BaseEditorNative: Invalid global state: dispose() should be called explicitly!"); } // Memory leak instead??? // Is this safe??? dispose(); } } public void setRegionCompact(boolean compact) { checkActive(); setRegionCompact(iptr, compact); } public void setFileType(FileType typename) { checkActive(); setFileType(iptr, typename); modifyEvent(iptr, 0); } public FileType chooseFileType(String fname) { checkActive(); modifyEvent(iptr, 0); return chooseFileType(iptr, fname); } public FileType getFileType() { checkActive(); return getFileType(iptr); } public void setRegionMapper(RegionMapper regionMapper) { checkActive(); setRegionMapper(iptr, regionMapper); // Save object reference this.regionMapper = regionMapper; modifyEvent(iptr, 0); } public void setRegionMapper(String hrdClass, String hrdName) { checkActive(); setRegionMapper(iptr, hrdClass, hrdName); modifyEvent(iptr, 0); } public void addRegionHandler(RegionHandler rh, Region filter) { checkActive(); addRegionHandler(iptr, rh, filter); } public void removeRegionHandler(RegionHandler rh) { checkActive(); removeRegionHandler(iptr, rh); } public void addEditorListener(EditorListener el) { checkActive(); editorListeners.add(el); } public void removeEditorListener(EditorListener el) { checkActive(); editorListeners.remove(el); } public RegionDefine getBackground() { checkActive(); return getBackground(iptr); } public RegionDefine getVertCross() { checkActive(); return getVertCross(iptr); }; public RegionDefine getHorzCross() { checkActive(); return getHorzCross(iptr); } public void setBackParse(int backParse) { checkActive(); setBackParse(iptr, backParse); } public PairMatch getPairMatch(int lineNo, int linePos) { checkActive(); LineRegion[] lrArray = getLineRegions(lineNo); if (lrArray.length == 0) return null; LineRegion pair = null; for (int idx = 0; idx < lrArray.length; idx++) { LineRegion l1 = lrArray[idx]; if (l1.region == null) continue; if ((l1.region.hasParent(defPairStart) || l1.region .hasParent(defPairEnd)) && linePos >= l1.start && linePos <= l1.end) pair = l1; } ; if (pair != null) { PairMatch pm = new PairMatch(pair, null, lineNo, -1, -1, false); if (pair.region.hasParent(defPairStart)) { pm.pairBalance = 1; pm.topPosition = true; } ; return pm; } return null; } int getLastVisibleLine() { checkActive(); int r1 = (wStart + wSize); int r2 = lineCount; return ((r1 > r2) ? r2 : r1) - 1; } void searchPair(PairMatch pm, int start_line, int end_line) { checkActive(); LineRegion pair = pm.start; int lno = pm.sline; LineRegion[] slr = getLineRegions(lno); int li = 0; while (li < slr.length && slr[li] != pair) li++; if (li == slr.length) return; while (true) { if (pm.pairBalance > 0) { li++; while (li == slr.length) { lno++; if (lno > end_line) break; slr = getLineRegions(lno); li = 0; }; if (lno > end_line) break; pair = slr[li]; } else { while (li == 0) { lno--; if (lno < start_line) break; slr = getLineRegions(lno); li = slr.length; }; if (lno < start_line) break; li--; pair = slr[li]; } ; if (pair.region == null) continue; if (pair.region.hasParent(defPairStart)) pm.pairBalance++; if (pair.region.hasParent(defPairEnd)) pm.pairBalance--; if (pm.pairBalance == 0) break; } ; if (pm.pairBalance == 0) { pm.eline = lno; pm.end = pair; } ; }; public void searchLocalPair(PairMatch pm) { checkActive(); int end_line = getLastVisibleLine(); searchPair(pm, wStart, end_line); } public void searchGlobalPair(PairMatch pm) { checkActive(); searchPair(pm, 0, lineCount - 1); } public LineRegion[] getLineRegions(int lno) { checkActive(); if (Logger.TRACE) { Logger.trace("BaseEditor", "getLineRegions:"+lno); } return getLineRegions(iptr, lno); } public void validate(int lno) { checkActive(); validate(iptr, lno); } public void idleJob(int time) { checkActive(); idleJob(iptr, time); } public void modifyEvent(int topLine) { checkActive(); for (int idx = editorListeners.size()-1; idx >= 0; idx--) { ((EditorListener)editorListeners.elementAt(idx)).modifyEvent(topLine); } if (Logger.TRACE) { Logger.trace("BaseEditor", "modifyEvent:"+topLine); } modifyEvent(iptr, topLine); } public void modifyLineEvent(int line) { checkActive(); if (Logger.TRACE) { Logger.trace("BaseEditor", "modifyLIneEvent:"+line); } modifyLineEvent(iptr, line); } public void visibleTextEvent(int wStart, int wSize) { checkActive(); if (Logger.TRACE) { Logger.trace("BaseEditor", "visibleTextEvent:"+wStart+":"+wSize); } visibleTextEvent(iptr, wStart, wSize); this.wStart = wStart; this.wSize = wSize; } public void lineCountEvent(int newLineCount) { checkActive(); lineCountEvent(iptr, newLineCount); lineCount = newLineCount; } public ParserFactory getParserFactory() { return fParserFactory; } native long init(ParserFactory pf, LineSource lineSource); native void finalize(long iptr); native void setRegionCompact(long iptr, boolean compact); native void setRegionMapper(long iptr, RegionMapper regionMapper); native void setRegionMapper(long iptr, String hrdClass, String hrdName); native void addRegionHandler(long iptr, RegionHandler rh, Region filter); native void removeRegionHandler(long iptr, RegionHandler rh); native void setFileType(long iptr, FileType typename); native FileType getFileType(long iptr); native FileType chooseFileType(long iptr, String fname); native void setBackParse(long iptr, int backParse); native RegionDefine getBackground(long iptr); native RegionDefine getVertCross(long iptr); native RegionDefine getHorzCross(long iptr); native LineRegion[] getLineRegions(long iptr, int lno); native void validate(long iptr, int lno); native void idleJob(long iptr, int time); native void modifyEvent(long iptr, int topLine); native void modifyLineEvent(long iptr, int line); native void visibleTextEvent(long iptr, int wStart, int wSize); native void lineCountEvent(long iptr, int newLineCount); }; /******************************************************************************* * ***** BEGIN LICENSE BLOCK Version: MPL 1.1/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for * the specific language governing rights and limitations under the License. * * The Original Code is the Colorer Library. * * The Initial Developer of the Original Code is Cail Lomecb <[email protected]>. * Portions created by the Initial Developer are Copyright (C) 1999-2003 the * Initial Developer. All Rights Reserved. * * Contributor(s): * * Alternatively, the contents of this file may be used under the terms of * either the GNU General Public License Version 2 or later (the "GPL"), or the * GNU Lesser General Public License Version 2.1 or later (the "LGPL"), in which * case the provisions of the GPL or the LGPL are applicable instead of those * above. If you wish to allow use of your version of this file only under the * terms of either the GPL or the LGPL, and not to allow others to use your * version of this file under the terms of the MPL, indicate your decision by * deleting the provisions above and replace them with the notice and other * provisions required by the GPL or the LGPL. If you do not delete the * provisions above, a recipient may use your version of this file under the * terms of any one of the MPL, the GPL or the LGPL. * * ***** END LICENSE BLOCK ***** */
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gflogger.formatter; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.util.*; import org.junit.Test; import static org.junit.Assert.assertEquals; /** * BufferFormatterTest * * @author Vladimir Dolzhenko, [email protected] */ public class BufferFormatterTest { @Test public void testAppendByteBufferString() throws Exception { final String[] values = new String[]{"true", null, "value"}; final ByteBuffer buffer = ByteBuffer.allocateDirect(50); for (int i = 0; i < values.length; i++) { BufferFormatter.append(buffer, values[i]); final String s = toString(buffer); assertEquals(String.valueOf(values[i]), s); buffer.clear(); } } @Test public void testAppendCharBufferBoolean() throws Exception { final boolean[] booleans = new boolean[]{true, false}; final CharBuffer buffer = ByteBuffer.allocateDirect(50).asCharBuffer(); for (int i = 0; i < booleans.length; i++) { BufferFormatter.append(buffer, booleans[i]); assertEquals(Boolean.toString(booleans[i]), toString(buffer)); buffer.clear(); } } @Test public void testAppendByteBufferBoolean() throws Exception { final boolean[] booleans = new boolean[]{true, false}; final ByteBuffer buffer = ByteBuffer.allocateDirect(50); for (int i = 0; i < booleans.length; i++) { BufferFormatter.append(buffer, booleans[i]); assertEquals(Boolean.toString(booleans[i]), toString(buffer)); buffer.clear(); } } @Test public void testAppendByte() throws Exception { final CharBuffer buffer = ByteBuffer.allocateDirect(50).asCharBuffer(); for (byte b = Byte.MIN_VALUE; b < Byte.MAX_VALUE; b++) { BufferFormatter.append(buffer, b); assertEquals(Byte.toString(b), toString(buffer)); buffer.clear(); } } @Test public void testAppendShort() throws Exception { final CharBuffer buffer = ByteBuffer.allocateDirect(50).asCharBuffer(); for (short s = Short.MIN_VALUE; s < Short.MAX_VALUE; s++) { BufferFormatter.append(buffer, s); assertEquals(Short.toString(s), toString(buffer)); buffer.clear(); } } @Test public void testAppendInt() throws Exception { final CharBuffer buffer = ByteBuffer.allocateDirect(30).asCharBuffer(); for (int i = Short.MIN_VALUE - 100; i < Short.MAX_VALUE + 100; i++) { BufferFormatter.append(buffer, i); assertEquals(Integer.toString(i), toString(buffer)); buffer.clear(); } final int[] numbers = new int[]{9123123, Integer.MAX_VALUE, Integer.MIN_VALUE}; for (int i = 0; i < numbers.length; i++) { BufferFormatter.append(buffer, numbers[i]); buffer.append(' '); assertEquals(Integer.toString(numbers[i]) + " ", toString(buffer)); // check buffer.clear(); } } @Test public void testAppendLong() throws Exception { final CharBuffer buffer = ByteBuffer.allocateDirect(50).asCharBuffer(); for (long i = Short.MIN_VALUE - 100; i < Short.MAX_VALUE + 100; i++) { BufferFormatter.append(buffer, i); assertEquals(Long.toString(i), toString(buffer)); buffer.clear(); } final long[] numbers = new long[]{7123712398L, 9999999999399L, 99999999999999L, 10007, 1000000, 123456789, 1234567890L, 987654321, 9876543210L, Integer.MAX_VALUE, Integer.MIN_VALUE, Integer.MAX_VALUE + 249, Integer.MIN_VALUE - 100, Long.MAX_VALUE, Long.MIN_VALUE}; for (int i = 0; i < numbers.length; i++) { BufferFormatter.append(buffer, numbers[i]); buffer.append(' '); assertEquals(Long.toString(numbers[i]) + " ", toString(buffer)); // check buffer.clear(); } } @Test public void testAppendDoubleCharBufferWithPrecision() throws Exception { final CharBuffer buffer = ByteBuffer.allocateDirect(100).asCharBuffer(); { final double[] numbers = new double[]{0, 1, 7, 11, 123, 7895, -100, 101, -10007}; for (int i = 0; i < numbers.length; i++) { BufferFormatter.append(buffer, numbers[i], 0); buffer.append(' '); assertEquals(Double.toString(numbers[i]) + " ", toString(buffer)); // check buffer.clear(); } } // final double[] numbers = new double[]{ // 1.025292, 1.0025292, 1.00025292, 1.000025292, 1.0000025292, 1.00000025292, // 10.025292, 10.0025292, 10.00025292, 10.000025292, // -1.025292, -1.0025292, -1.00025292, -1.000025292, -1.0000025292, -1.00000025292, // -10.025292, -10.0025292, -10.00025292, -10.000025292, // 1.4328, -123.9487, -0.5}; // for (int i = 0; i < numbers.length; i++) { // final double number = numbers[i]; // BufferFormatter.append(buffer, number, 8); // buffer.append(' '); // assertEquals( // String.format(Locale.ENGLISH, "%.8f", number ) + " ", // toString(buffer) // ); // // check // buffer.clear(); // } final double[] numbers2 = new double[]{1e10, 1e15, 1e18, 5.074e10}; for (int i = 0; i < numbers2.length; i++) { BufferFormatter.append(buffer, numbers2[i], 6); buffer.append(' '); assertEquals(String.format(Locale.ENGLISH, "%.6f", numbers2[i]) + " ", toString(buffer)); // check buffer.clear(); } final double[] numbers3 = new double[]{1e-5, 1e-10, 1e-18, 5.074e-10, 0.0035}; for (int i = 0; i < numbers3.length; i++) { BufferFormatter.append(buffer, numbers3[i], 20); buffer.append(' '); assertEquals(String.format(Locale.ENGLISH, "%.20f", numbers3[i]) + " ", toString(buffer)); // check buffer.clear(); } final double[] numbers4 = new double[]{1e-19, 1e19, Double.NaN, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0.0, -0.0}; for (int i = 0; i < numbers4.length; i++) { BufferFormatter.append(buffer, numbers4[i], 20); buffer.append(' '); assertEquals(Double.toString(numbers4[i]) + " ", toString(buffer)); // check buffer.clear(); } } @Test public void testAppendDoubleByteBufferWithPrecision() throws Exception { final ByteBuffer buffer = ByteBuffer.allocateDirect(200); { final double[] numbers = new double[]{0, 1, 7, 11, 123, 7895, -100, 101, -10007}; for (int i = 0; i < numbers.length; i++) { BufferFormatter.append(buffer, numbers[i], 0); buffer.put((byte) ' '); assertEquals(Double.toString(numbers[i]) + " ", toString(buffer)); // check buffer.clear(); } } final double[] numbers = new double[]{1.4328, -123.9487, -0.5}; for (int i = 0; i < numbers.length; i++) { BufferFormatter.append(buffer, numbers[i], 6); buffer.put((byte) ' '); assertEquals(String.format(Locale.ENGLISH, "%.6f", numbers[i]) + " ", toString(buffer)); // check buffer.clear(); } final double[] numbers2 = new double[]{1e10, 1e15, 1e18}; for (int i = 0; i < numbers2.length; i++) { BufferFormatter.append(buffer, numbers2[i], 6); buffer.put((byte) ' '); assertEquals(String.format(Locale.ENGLISH, "%.6f", numbers2[i]) + " ", toString(buffer)); // check buffer.clear(); } final double[] numbers3 = new double[]{1e-5, 1e-10, 1e-18, 0.0035}; for (int i = 0; i < numbers3.length; i++) { BufferFormatter.append(buffer, numbers3[i], 20); buffer.put((byte) ' '); assertEquals(String.format(Locale.ENGLISH, "%.20f", numbers3[i]) + " ", toString(buffer)); // check buffer.clear(); } final double[] numbers4 = new double[]{1e-19, 1e19, Double.NaN, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0.0, -0.0}; for (int i = 0; i < numbers4.length; i++) { BufferFormatter.append(buffer, numbers4[i], 20); buffer.put((byte) ' '); assertEquals(Double.toString(numbers4[i]) + " ", toString(buffer)); // check buffer.clear(); } } @Test public void testAppendDoubleByteBuffer() throws Exception { final ByteBuffer buffer = ByteBuffer.allocateDirect(200); final double[] numbers = new double[]{ 0.0035, 1e-19, 1e19, Double.NaN, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, -0.0, 0.0, 1235, -1235, 0.005, -0.5}; final String[] strings = new String[]{ "0.003500000000000", Double.toString(1e-19), Double.toString(1e19), Double.toString(Double.NaN), Double.toString(Double.POSITIVE_INFINITY), Double.toString(Double.NEGATIVE_INFINITY), Double.toString(-0.0), Double.toString(0.0), "1235.0", "-1235.0", "0.005000000000000", "-0.500000000000000"}; for (int i = 0; i < numbers.length; i++) { BufferFormatter.append(buffer, numbers[i]); buffer.put((byte) ' '); assertEquals(strings[i] + " ", toString(buffer)); // check buffer.clear(); } } @Test public void testAppendDoubleCharBuffer() throws Exception { final CharBuffer buffer = ByteBuffer.allocateDirect(100).asCharBuffer(); final double[] numbers = new double[]{ 0.0035, 1e-19, 1e19, Double.NaN, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, -0.0, 0.0, 1235, -1235, 0.005, -0.5}; final String[] strings = new String[]{ "0.003500000000000", Double.toString(1e-19), Double.toString(1e19), Double.toString(Double.NaN), Double.toString(Double.POSITIVE_INFINITY), Double.toString(Double.NEGATIVE_INFINITY), Double.toString(-0.0), Double.toString(0.0), "1235.0", "-1235.0", "0.005000000000000", "-0.500000000000000"}; for (int i = 0; i < numbers.length; i++) { BufferFormatter.append(buffer, numbers[i]); buffer.put(' '); assertEquals(strings[i] + " ", toString(buffer)); // check buffer.clear(); } } static String toString(final CharBuffer buffer) { buffer.flip(); final char[] chs = new char[buffer.limit()]; buffer.get(chs); return new String(chs); } static String toString(final ByteBuffer buffer) { buffer.flip(); final byte[] chs = new byte[buffer.limit()]; buffer.get(chs); return new String(chs); } }
/* * Copyright 2012-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.util.cache.impl; import com.facebook.buck.event.AbstractBuckEvent; import com.facebook.buck.hashing.PathHashing; import com.facebook.buck.io.ArchiveMemberPath; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.io.filesystem.ProjectFilesystemFactory; import com.facebook.buck.util.MoreCollectors; import com.facebook.buck.util.cache.FileHashCacheEngine; import com.facebook.buck.util.cache.FileHashCacheMode; import com.facebook.buck.util.cache.FileHashCacheVerificationResult; import com.facebook.buck.util.cache.HashCodeAndFileType; import com.facebook.buck.util.cache.ProjectFileHashCache; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.hash.HashCode; import com.google.common.hash.Hasher; import com.google.common.hash.Hashing; import java.io.IOException; import java.nio.file.FileSystems; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.function.Predicate; public class DefaultFileHashCache implements ProjectFileHashCache { private static final boolean SHOULD_CHECK_IGNORED_PATHS = Boolean.getBoolean("buck.DefaultFileHashCache.check_ignored_paths"); private final ProjectFilesystem projectFilesystem; private final Predicate<Path> ignoredPredicate; @VisibleForTesting FileHashCacheEngine fileHashCacheEngine; protected DefaultFileHashCache( ProjectFilesystem projectFilesystem, Predicate<Path> ignoredPredicate, FileHashCacheMode fileHashCacheMode) { this.projectFilesystem = projectFilesystem; this.ignoredPredicate = ignoredPredicate; FileHashCacheEngine.ValueLoader<HashCodeAndFileType> hashLoader = path -> { try { return getHashCodeAndFileType(path); } catch (IOException e) { throw new RuntimeException(e); } }; FileHashCacheEngine.ValueLoader<Long> sizeLoader = path -> { try { return getPathSize(path); } catch (IOException e) { throw new RuntimeException(e); } }; final FileHashCacheEngine.ValueLoader<HashCode> fileHashLoader = (path) -> { try { return getFileHashCode(path); } catch (IOException e) { throw new RuntimeException(e); } }; final FileHashCacheEngine.ValueLoader<HashCodeAndFileType> dirHashLoader = (path) -> { try { return getDirHashCode(path); } catch (IOException e) { throw new RuntimeException(e); } }; switch (fileHashCacheMode) { case PARALLEL_COMPARISON: fileHashCacheEngine = new ComboFileHashCache(hashLoader, sizeLoader); break; case LOADING_CACHE: fileHashCacheEngine = LoadingCacheFileHashCache.createWithStats(hashLoader, sizeLoader); break; case PREFIX_TREE: fileHashCacheEngine = FileSystemMapFileHashCache.createWithStats(hashLoader, sizeLoader); break; case LIMITED_PREFIX_TREE: fileHashCacheEngine = new StatsTrackingFileHashCacheEngine( new LimitedFileHashCacheEngine( projectFilesystem, fileHashLoader, dirHashLoader, sizeLoader), "limited"); break; case LIMITED_PREFIX_TREE_PARALLEL: fileHashCacheEngine = new ComboFileHashCache( LoadingCacheFileHashCache.createWithStats(hashLoader, sizeLoader), new StatsTrackingFileHashCacheEngine( new LimitedFileHashCacheEngine( projectFilesystem, fileHashLoader, dirHashLoader, sizeLoader), "limited")); break; default: throw new RuntimeException( "Unsupported file hash cache engine: " + fileHashCacheMode.toString()); } } public static DefaultFileHashCache createBuckOutFileHashCache( ProjectFilesystem projectFilesystem, FileHashCacheMode fileHashCacheMode) { return new DefaultFileHashCache( projectFilesystem, (path) -> !(path.startsWith(projectFilesystem.getBuckPaths().getBuckOut())), fileHashCacheMode); } public static DefaultFileHashCache createDefaultFileHashCache( ProjectFilesystem projectFilesystem, FileHashCacheMode fileHashCacheMode) { return new DefaultFileHashCache( projectFilesystem, getDefaultPathPredicate(projectFilesystem), fileHashCacheMode); } protected static Predicate<Path> getDefaultPathPredicate(ProjectFilesystem projectFilesystem) { return path -> path.startsWith(projectFilesystem.getBuckPaths().getBuckOut()) || projectFilesystem.isIgnored(path); } public static ImmutableList<? extends ProjectFileHashCache> createOsRootDirectoriesCaches( ProjectFilesystemFactory projectFilesystemFactory, FileHashCacheMode fileHashCacheMode) throws InterruptedException { ImmutableList.Builder<ProjectFileHashCache> allCaches = ImmutableList.builder(); for (Path root : FileSystems.getDefault().getRootDirectories()) { if (!root.toFile().exists()) { // On Windows, it is possible that the system will have a // drive for something that does not exist such as a floppy // disk or SD card. The drive exists, but it does not // contain anything useful, so Buck should not consider it // as a cacheable location. continue; } ProjectFilesystem projectFilesystem = projectFilesystemFactory.createOrThrow(root); // A cache which caches hashes of absolute paths which my be accessed by certain // rules (e.g. /usr/bin/gcc), and only serves to prevent rehashing the same file // multiple times in a single run. allCaches.add( DefaultFileHashCache.createDefaultFileHashCache(projectFilesystem, fileHashCacheMode)); } return allCaches.build(); } private void checkNotIgnored(Path relativePath) { if (SHOULD_CHECK_IGNORED_PATHS) { Preconditions.checkArgument(!projectFilesystem.isIgnored(relativePath)); } } private HashCodeAndFileType getHashCodeAndFileType(Path path) throws IOException { if (projectFilesystem.isDirectory(path)) { return getDirHashCode(path); } else if (path.toString().endsWith(".jar")) { return HashCodeAndFileType.ofArchive( getFileHashCode(path), new DefaultJarContentHasher(projectFilesystem, path)); } return HashCodeAndFileType.ofFile(getFileHashCode(path)); } private HashCode getFileHashCode(Path path) throws IOException { return projectFilesystem.computeSha1(path).asHashCode(); } private long getPathSize(Path path) throws IOException { long size = 0; for (Path child : projectFilesystem.getFilesUnderPath(path)) { size += projectFilesystem.getFileSize(child); } return size; } private HashCodeAndFileType getDirHashCode(Path path) throws IOException { Hasher hasher = Hashing.sha1().newHasher(); ImmutableSet<Path> children = PathHashing.hashPath(hasher, this, projectFilesystem, path); return HashCodeAndFileType.ofDirectory(hasher.hash(), children); } @Override public boolean willGet(Path relativePath) { Preconditions.checkState(!relativePath.isAbsolute()); checkNotIgnored(relativePath); return fileHashCacheEngine.getIfPresent(relativePath) != null || (projectFilesystem.exists(relativePath) && !isIgnored(relativePath)); } @Override public boolean isIgnored(Path path) { return ignoredPredicate.test(path); } @Override public boolean willGet(ArchiveMemberPath archiveMemberPath) { Preconditions.checkState(!archiveMemberPath.getArchivePath().isAbsolute()); checkNotIgnored(archiveMemberPath.getArchivePath()); return willGet(archiveMemberPath.getArchivePath()); } @Override public void invalidate(Path relativePath) { fileHashCacheEngine.invalidate(relativePath); } @Override public void invalidateAll() { fileHashCacheEngine.invalidateAll(); } /** @return The {@link com.google.common.hash.HashCode} of the contents of path. */ @Override public HashCode get(Path relativePath) throws IOException { Preconditions.checkArgument(!relativePath.isAbsolute()); checkNotIgnored(relativePath); return fileHashCacheEngine.get(relativePath); } @Override public long getSize(Path relativePath) throws IOException { Preconditions.checkArgument(!relativePath.isAbsolute()); checkNotIgnored(relativePath); return fileHashCacheEngine.getSize(relativePath); } @Override public Optional<HashCode> getIfPresent(Path relativePath) { Preconditions.checkArgument(!relativePath.isAbsolute()); checkNotIgnored(relativePath); return Optional.ofNullable(fileHashCacheEngine.getIfPresent(relativePath)) .map(HashCodeAndFileType::getHashCode); } @Override public HashCode get(ArchiveMemberPath archiveMemberPath) throws IOException { Preconditions.checkArgument(!archiveMemberPath.isAbsolute()); checkNotIgnored(archiveMemberPath.getArchivePath()); return fileHashCacheEngine.get(archiveMemberPath); } @Override public ProjectFilesystem getFilesystem() { return projectFilesystem; } @Override public void set(Path relativePath, HashCode hashCode) throws IOException { Preconditions.checkArgument(!relativePath.isAbsolute()); checkNotIgnored(relativePath); HashCodeAndFileType value; if (projectFilesystem.isDirectory(relativePath)) { value = HashCodeAndFileType.ofDirectory( hashCode, projectFilesystem .getFilesUnderPath(relativePath) .stream() .map(relativePath::relativize) .collect(MoreCollectors.toImmutableSet())); } else if (relativePath.toString().endsWith(".jar")) { value = HashCodeAndFileType.ofArchive( hashCode, new DefaultJarContentHasher( projectFilesystem, projectFilesystem.getPathRelativeToProjectRoot(relativePath).get())); } else { value = HashCodeAndFileType.ofFile(hashCode); } fileHashCacheEngine.put(relativePath, value); } @Override public FileHashCacheVerificationResult verify() throws IOException { List<String> errors = new ArrayList<>(); Map<Path, HashCodeAndFileType> cacheMap = fileHashCacheEngine.asMap(); for (Map.Entry<Path, HashCodeAndFileType> entry : cacheMap.entrySet()) { Path path = entry.getKey(); HashCodeAndFileType cached = entry.getValue(); HashCodeAndFileType current = getHashCodeAndFileType(path); if (!cached.equals(current)) { errors.add(path.toString()); } } return FileHashCacheVerificationResult.builder() .setCachesExamined(1) .setFilesExamined(cacheMap.size()) .addAllVerificationErrors(errors) .build(); } public List<AbstractBuckEvent> getStatsEvents() { return fileHashCacheEngine.getStatsEvents(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sling.servlets.compat.impl; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Properties; import org.apache.felix.scr.annotations.Property; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.Service; import org.apache.sling.api.SlingException; import org.apache.sling.api.SlingHttpServletRequest; import org.apache.sling.api.SlingHttpServletResponse; import org.apache.sling.api.resource.Resource; import org.apache.sling.api.resource.ResourceResolver; import org.apache.sling.api.resource.ResourceUtil; import org.apache.sling.api.servlets.SlingSafeMethodsServlet; import org.apache.sling.commons.json.JSONException; import org.apache.sling.commons.json.io.JSONWriter; import org.apache.sling.commons.metrics.Meter; import org.apache.sling.servlets.get.impl.helpers.JsonResourceWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.sling.commons.metrics.MetricsService; import org.apache.sling.commons.metrics.Meter; import org.apache.sling.commons.metrics.MetricsService; /** * A SlingSafeMethodsServlet that renders the search results as JSON data * * Use this as the default query servlet for json get requests for Sling */ @Component @Service(value=javax.servlet.Servlet.class) @Properties({ @Property(name="service.description", value="Default Query Servlet"), @Property(name="service.vendor",value="The Apache Software Foundation"), @Property(name="sling.servlet.resourceTypes", value="sling/servlet/default"), @Property(name="sling.servlet.extensions", value="json"), @Property(name="sling.servlet.selectors", value="query"), @Property(name="sling.servlet.prefix", intValue=-1) }) public class JsonQueryServlet extends SlingSafeMethodsServlet { private static final long serialVersionUID = 1L; private final Logger log = LoggerFactory.getLogger(JsonQueryServlet.class); /** Search clause */ public static final String STATEMENT = "statement"; /** Query type */ public static final String QUERY_TYPE = "queryType"; /** Result set offset */ public static final String OFFSET = "offset"; /** Number of rows requested */ public static final String ROWS = "rows"; /** property to append to the result */ public static final String PROPERTY = "property"; /** exerpt lookup path */ public static final String EXCERPT_PATH = "excerptPath"; /** rep:exerpt */ private static final String REP_EXCERPT = "rep:excerpt()"; public static final String TIDY = "tidy"; private final JsonResourceWriter itemWriter; @Reference private MetricsService metricsService; private Meter meter; public JsonQueryServlet() { itemWriter = new JsonResourceWriter(null); } /** True if our request wants the "tidy" pretty-printed format */ protected boolean isTidy(SlingHttpServletRequest req) { for(String selector : req.getRequestPathInfo().getSelectors()) { if(TIDY.equals(selector)) { return true; } } return false; } @Activate private void activate(){ meter = metricsService.meter("data_flow_jsonQueryServletTEST"); } @Override protected void doGet(SlingHttpServletRequest req, SlingHttpServletResponse resp) throws IOException { meter.mark(); dumpResult(req, resp); } /** * Retrieve the query type from the request. * * @param req request * @return the query type. * */ protected String getQueryType(SlingHttpServletRequest req) { return req.getParameter(QUERY_TYPE); } /** * Retrieve the query statement from the request. * * @param req request * @param queryType the query type, as previously determined * @return the query statement. * */ protected String getStatement(SlingHttpServletRequest req, String queryType) { return req.getParameter(STATEMENT); } /** * Dumps the result as JSON object. * * @param req request * @param resp response * @throws IOException in case the search will unexpectedly fail */ private void dumpResult(SlingHttpServletRequest req, SlingHttpServletResponse resp) throws IOException { try { ResourceResolver resolver = req.getResourceResolver(); String queryType = getQueryType(req); String statement = getStatement(req, queryType); Iterator<Map<String, Object>> result = resolver.queryResources( statement, queryType); if (req.getParameter(OFFSET) != null) { long skip = Long.parseLong(req.getParameter(OFFSET)); while (skip > 0 && result.hasNext()) { result.next(); skip--; } } resp.setContentType(req.getResponseContentType()); resp.setCharacterEncoding("UTF-8"); final JSONWriter w = new JSONWriter(resp.getWriter()); w.setTidy(isTidy(req)); w.array(); long count = -1; if (req.getParameter(ROWS) != null) { count = Long.parseLong(req.getParameter(ROWS)); } List<String> properties = new ArrayList<String>(); if (req.getParameterValues(PROPERTY) != null) { for (String property : req.getParameterValues(PROPERTY)) { properties.add(property); } } String exerptPath = ""; if (req.getParameter(EXCERPT_PATH) != null) { exerptPath = req.getParameter(EXCERPT_PATH); } // iterate through the result set and build the "json result" while (result.hasNext() && count != 0) { Map<String, Object> row = result.next(); w.object(); String path = row.get("jcr:path").toString(); w.key("name"); w.value(ResourceUtil.getName(path)); // dump columns for (String colName : row.keySet()) { w.key(colName); String strValue = ""; if (colName.equals(REP_EXCERPT)) { Object ev = row.get("rep:excerpt(" + exerptPath + ")"); strValue = (ev == null) ? "" : ev.toString(); w.value(strValue); } else { //strValue = formatValue(row.get(colName)); itemWriter.dumpValue(w, row.get(colName)); } //w.value(strValue); } // load properties and add it to the result set if (!properties.isEmpty()) { Resource nodeRes = resolver.getResource(path); dumpProperties(w, nodeRes, properties); } w.endObject(); count--; } w.endArray(); } catch (JSONException je) { throw wrapException(je); } } private void dumpProperties(JSONWriter w, Resource nodeRes, List<String> properties) throws JSONException { // nothing to do if there is no resource if (nodeRes == null) { return; } itemWriter.dumpProperties(nodeRes, w, properties); } /** * @param e * @throws org.apache.sling.api.SlingException wrapping the given exception */ private SlingException wrapException(Exception e) { log.warn("Error in QueryServlet: " + e.toString(), e); return new SlingException(e.toString(), e); } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/cx/v3/webhook.proto package com.google.cloud.dialogflow.cx.v3; /** * * * <pre> * The request message for [Webhooks.CreateWebhook][google.cloud.dialogflow.cx.v3.Webhooks.CreateWebhook]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3.CreateWebhookRequest} */ public final class CreateWebhookRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3.CreateWebhookRequest) CreateWebhookRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateWebhookRequest.newBuilder() to construct. private CreateWebhookRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateWebhookRequest() { parent_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateWebhookRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CreateWebhookRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); parent_ = s; break; } case 18: { com.google.cloud.dialogflow.cx.v3.Webhook.Builder subBuilder = null; if (webhook_ != null) { subBuilder = webhook_.toBuilder(); } webhook_ = input.readMessage( com.google.cloud.dialogflow.cx.v3.Webhook.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(webhook_); webhook_ = subBuilder.buildPartial(); } break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3.WebhookProto .internal_static_google_cloud_dialogflow_cx_v3_CreateWebhookRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3.WebhookProto .internal_static_google_cloud_dialogflow_cx_v3_CreateWebhookRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest.class, com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; private volatile java.lang.Object parent_; /** * * * <pre> * Required. The agent to create a webhook for. * Format: `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agents/&lt;Agent ID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The agent to create a webhook for. * Format: `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agents/&lt;Agent ID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int WEBHOOK_FIELD_NUMBER = 2; private com.google.cloud.dialogflow.cx.v3.Webhook webhook_; /** * * * <pre> * Required. The webhook to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.Webhook webhook = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the webhook field is set. */ @java.lang.Override public boolean hasWebhook() { return webhook_ != null; } /** * * * <pre> * Required. The webhook to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.Webhook webhook = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The webhook. */ @java.lang.Override public com.google.cloud.dialogflow.cx.v3.Webhook getWebhook() { return webhook_ == null ? com.google.cloud.dialogflow.cx.v3.Webhook.getDefaultInstance() : webhook_; } /** * * * <pre> * Required. The webhook to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.Webhook webhook = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.dialogflow.cx.v3.WebhookOrBuilder getWebhookOrBuilder() { return getWebhook(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (webhook_ != null) { output.writeMessage(2, getWebhook()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (webhook_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getWebhook()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest)) { return super.equals(obj); } com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest other = (com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasWebhook() != other.hasWebhook()) return false; if (hasWebhook()) { if (!getWebhook().equals(other.getWebhook())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasWebhook()) { hash = (37 * hash) + WEBHOOK_FIELD_NUMBER; hash = (53 * hash) + getWebhook().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request message for [Webhooks.CreateWebhook][google.cloud.dialogflow.cx.v3.Webhooks.CreateWebhook]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3.CreateWebhookRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3.CreateWebhookRequest) com.google.cloud.dialogflow.cx.v3.CreateWebhookRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3.WebhookProto .internal_static_google_cloud_dialogflow_cx_v3_CreateWebhookRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3.WebhookProto .internal_static_google_cloud_dialogflow_cx_v3_CreateWebhookRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest.class, com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest.Builder.class); } // Construct using com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); parent_ = ""; if (webhookBuilder_ == null) { webhook_ = null; } else { webhook_ = null; webhookBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.cx.v3.WebhookProto .internal_static_google_cloud_dialogflow_cx_v3_CreateWebhookRequest_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest getDefaultInstanceForType() { return com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest build() { com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest buildPartial() { com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest result = new com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest(this); result.parent_ = parent_; if (webhookBuilder_ == null) { result.webhook_ = webhook_; } else { result.webhook_ = webhookBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest) { return mergeFrom((com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest other) { if (other == com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; onChanged(); } if (other.hasWebhook()) { mergeWebhook(other.getWebhook()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The agent to create a webhook for. * Format: `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agents/&lt;Agent ID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The agent to create a webhook for. * Format: `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agents/&lt;Agent ID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The agent to create a webhook for. * Format: `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agents/&lt;Agent ID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; onChanged(); return this; } /** * * * <pre> * Required. The agent to create a webhook for. * Format: `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agents/&lt;Agent ID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); onChanged(); return this; } /** * * * <pre> * Required. The agent to create a webhook for. * Format: `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agents/&lt;Agent ID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; onChanged(); return this; } private com.google.cloud.dialogflow.cx.v3.Webhook webhook_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.cx.v3.Webhook, com.google.cloud.dialogflow.cx.v3.Webhook.Builder, com.google.cloud.dialogflow.cx.v3.WebhookOrBuilder> webhookBuilder_; /** * * * <pre> * Required. The webhook to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.Webhook webhook = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the webhook field is set. */ public boolean hasWebhook() { return webhookBuilder_ != null || webhook_ != null; } /** * * * <pre> * Required. The webhook to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.Webhook webhook = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The webhook. */ public com.google.cloud.dialogflow.cx.v3.Webhook getWebhook() { if (webhookBuilder_ == null) { return webhook_ == null ? com.google.cloud.dialogflow.cx.v3.Webhook.getDefaultInstance() : webhook_; } else { return webhookBuilder_.getMessage(); } } /** * * * <pre> * Required. The webhook to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.Webhook webhook = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setWebhook(com.google.cloud.dialogflow.cx.v3.Webhook value) { if (webhookBuilder_ == null) { if (value == null) { throw new NullPointerException(); } webhook_ = value; onChanged(); } else { webhookBuilder_.setMessage(value); } return this; } /** * * * <pre> * Required. The webhook to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.Webhook webhook = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setWebhook(com.google.cloud.dialogflow.cx.v3.Webhook.Builder builderForValue) { if (webhookBuilder_ == null) { webhook_ = builderForValue.build(); onChanged(); } else { webhookBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Required. The webhook to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.Webhook webhook = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeWebhook(com.google.cloud.dialogflow.cx.v3.Webhook value) { if (webhookBuilder_ == null) { if (webhook_ != null) { webhook_ = com.google.cloud.dialogflow.cx.v3.Webhook.newBuilder(webhook_) .mergeFrom(value) .buildPartial(); } else { webhook_ = value; } onChanged(); } else { webhookBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Required. The webhook to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.Webhook webhook = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearWebhook() { if (webhookBuilder_ == null) { webhook_ = null; onChanged(); } else { webhook_ = null; webhookBuilder_ = null; } return this; } /** * * * <pre> * Required. The webhook to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.Webhook webhook = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dialogflow.cx.v3.Webhook.Builder getWebhookBuilder() { onChanged(); return getWebhookFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The webhook to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.Webhook webhook = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dialogflow.cx.v3.WebhookOrBuilder getWebhookOrBuilder() { if (webhookBuilder_ != null) { return webhookBuilder_.getMessageOrBuilder(); } else { return webhook_ == null ? com.google.cloud.dialogflow.cx.v3.Webhook.getDefaultInstance() : webhook_; } } /** * * * <pre> * Required. The webhook to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.Webhook webhook = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.cx.v3.Webhook, com.google.cloud.dialogflow.cx.v3.Webhook.Builder, com.google.cloud.dialogflow.cx.v3.WebhookOrBuilder> getWebhookFieldBuilder() { if (webhookBuilder_ == null) { webhookBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.cx.v3.Webhook, com.google.cloud.dialogflow.cx.v3.Webhook.Builder, com.google.cloud.dialogflow.cx.v3.WebhookOrBuilder>( getWebhook(), getParentForChildren(), isClean()); webhook_ = null; } return webhookBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3.CreateWebhookRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3.CreateWebhookRequest) private static final com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest(); } public static com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateWebhookRequest> PARSER = new com.google.protobuf.AbstractParser<CreateWebhookRequest>() { @java.lang.Override public CreateWebhookRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new CreateWebhookRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<CreateWebhookRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateWebhookRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.CreateWebhookRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/** * This file is part of the JCROM project. * Copyright (C) 2008-2015 - All rights reserved. * Authors: Olafur Gauti Gudmundsson, Nicolas Dos Santos * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jcrom; import java.io.IOException; import java.lang.annotation.Annotation; import java.lang.reflect.Field; import java.sql.Timestamp; import java.util.Arrays; import java.util.Calendar; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CopyOnWriteArraySet; import javax.jcr.Node; import javax.jcr.NodeIterator; import javax.jcr.Property; import javax.jcr.RepositoryException; import javax.jcr.nodetype.NodeType; import javax.jcr.version.Version; import javax.jcr.version.VersionManager; import net.sf.cglib.proxy.LazyLoader; import org.jcrom.annotations.JcrBaseVersionCreated; import org.jcrom.annotations.JcrBaseVersionName; import org.jcrom.annotations.JcrCheckedout; import org.jcrom.annotations.JcrChildNode; import org.jcrom.annotations.JcrCreated; import org.jcrom.annotations.JcrFileNode; import org.jcrom.annotations.JcrIdentifier; import org.jcrom.annotations.JcrName; import org.jcrom.annotations.JcrNode; import org.jcrom.annotations.JcrParentNode; import org.jcrom.annotations.JcrPath; import org.jcrom.annotations.JcrProperty; import org.jcrom.annotations.JcrProtectedProperty; import org.jcrom.annotations.JcrReference; import org.jcrom.annotations.JcrSerializedProperty; import org.jcrom.annotations.JcrUUID; import org.jcrom.annotations.JcrVersionCreated; import org.jcrom.annotations.JcrVersionName; import org.jcrom.callback.DefaultJcromCallback; import org.jcrom.callback.JcromCallback; import org.jcrom.type.TypeHandler; import org.jcrom.util.JcrUtils; import org.jcrom.util.NodeFilter; import org.jcrom.util.PathUtils; import org.jcrom.util.ReflectionUtils; /** * This class handles the heavy lifting of mapping a JCR node to a JCR entity object, and vice versa. * * @author Olafur Gauti Gudmundsson * @author Nicolas Dos Santos */ class Mapper { static final String DEFAULT_FIELDNAME = "fieldName"; /** Set of classes that have been validated for mapping by this mapper */ private final CopyOnWriteArraySet<Class<?>> mappedClasses = new CopyOnWriteArraySet<Class<?>>(); /** Specifies whether to clean up the node names */ private final boolean cleanNames; /** Specifies whether to retrieve mapped class name from node property */ private final boolean dynamicInstantiation; /** */ private final TypeHandler typeHandler; private final PropertyMapper propertyMapper; private final ReferenceMapper referenceMapper; private final FileNodeMapper fileNodeMapper; private final ChildNodeMapper childNodeMapper; private final Jcrom jcrom; private final ThreadLocal<Map<HistoryKey, Object>> history = new ThreadLocal<Map<HistoryKey, Object>>(); /** * Create a Mapper for a specific class. * * @param cleanNames specifies whether to clean names of new nodes, that is, replace illegal characters and spaces automatically * @param dynamicInstantiation if set to true, then Jcrom will try to retrieve the name of the class to instantiate from a node property (see @JcrNode(classNameProperty)). * @param typeHandler {@link TypeHandler} * @param jcrom */ Mapper(boolean cleanNames, boolean dynamicInstantiation, TypeHandler typeHandler, Jcrom jcrom) { this.cleanNames = cleanNames; this.dynamicInstantiation = dynamicInstantiation; this.typeHandler = typeHandler; this.jcrom = jcrom; this.propertyMapper = new PropertyMapper(this); this.referenceMapper = new ReferenceMapper(this); this.fileNodeMapper = new FileNodeMapper(this); this.childNodeMapper = new ChildNodeMapper(this); } void clearHistory() { history.remove(); } boolean isMapped(Class<?> c) { return mappedClasses.contains(c); } void addMappedClass(Class<?> c) { mappedClasses.add(c); } CopyOnWriteArraySet<Class<?>> getMappedClasses() { return mappedClasses; } boolean isCleanNames() { return cleanNames; } boolean isDynamicInstantiation() { return dynamicInstantiation; } TypeHandler getTypeHandler() { return typeHandler; } Class<?> getClassForName(String className) { return getClassForName(className, null); } Class<?> getClassForName(String className, Class<?> defaultClass) { for (Class<?> c : mappedClasses) { if (className.equals(c.getCanonicalName())) { return c; } } try { return Class.forName(className, true, Thread.currentThread().getContextClassLoader()); } catch (ClassNotFoundException ex) { return defaultClass; } } String getCleanName(String name) { if (name == null) { throw new JcrMappingException("Node name is null"); } if (cleanNames) { return PathUtils.createValidName(name); } else { return name; } } Object findEntityByPath(List<?> entities, String path) throws IllegalAccessException { for (Object entity : entities) { if (path.equals(getNodePath(entity))) { return entity; } } return null; } private Field findAnnotatedField(Object obj, Class<? extends Annotation> annotationClass) { for (Field field : ReflectionUtils.getDeclaredAndInheritedFields(obj.getClass(), false)) { if (jcrom.getAnnotationReader().isAnnotationPresent(field, annotationClass)) { field.setAccessible(true); return field; } } return null; } Field findPathField(Object obj) { return findAnnotatedField(obj, JcrPath.class); } Field findParentField(Object obj) { return findAnnotatedField(obj, JcrParentNode.class); } Field findNameField(Object obj) { return findAnnotatedField(obj, JcrName.class); } /** * @deprecated This method is now deprecated because {@link JcrUUID} annotation is deprecated.<br/> * {@link #findIdField(Object)} with {@link JcrIdentifier} annotation should be used instead. */ @Deprecated Field findUUIDField(Object obj) { return findAnnotatedField(obj, JcrUUID.class); } Field findIdField(Object obj) { return findAnnotatedField(obj, JcrIdentifier.class); } String getNodeName(Object object) throws IllegalAccessException { Field field = findNameField(object); return (String) typeHandler.getObject(field, object); } String getNodePath(Object object) throws IllegalAccessException { Field field = findPathField(object); return (String) typeHandler.getObject(field, object); } Object getParentObject(Object childObject) throws IllegalAccessException { Field parentField = findParentField(childObject); return parentField != null ? typeHandler.getObject(parentField, childObject) : null; } String getChildContainerNodePath(Object childObject, Object parentObject, Node parentNode) throws IllegalAccessException, RepositoryException { return childNodeMapper.getChildContainerNodePath(childObject, parentObject, parentNode); } /** * @deprecated This method is now deprecated because {@link #findUUIDField(Object)} annotation is deprecated.<br/> * {@link #getNodeId(Object)} should be used instead. */ @Deprecated String getNodeUUID(Object object) throws IllegalAccessException { return (String) findUUIDField(object).get(object); } String getNodeId(Object object) throws IllegalAccessException { Field idField = findIdField(object); return idField != null ? (String) typeHandler.getObject(idField, object) : getNodeUUID(object); } static boolean hasMixinType(Node node, String mixinType) throws RepositoryException { for (NodeType nodeType : node.getMixinNodeTypes()) { if (nodeType.getName().equals(mixinType)) { return true; } } return false; } void setBaseVersionInfo(Object object, String name, Calendar created) throws IllegalAccessException { Field baseName = findAnnotatedField(object, JcrBaseVersionName.class); if (baseName != null) { baseName.set(object, name); } Field baseCreated = findAnnotatedField(object, JcrBaseVersionCreated.class); if (baseCreated != null) { if (baseCreated.getType() == Date.class) { baseCreated.set(object, created.getTime()); } else if (baseCreated.getType() == Timestamp.class) { baseCreated.set(object, new Timestamp(created.getTimeInMillis())); } else if (baseCreated.getType() == Calendar.class) { baseCreated.set(object, created); } } } void setNodeName(Object object, String name) throws IllegalAccessException { Field field = findNameField(object); typeHandler.setObject(field, object, name); } void setNodePath(Object object, String path) throws IllegalAccessException { Field field = findPathField(object); typeHandler.setObject(field, object, path); } /** * @deprecated This method is now deprecated because {@link #findUUIDField(Object)} annotation is deprecated.<br/> * {@link #setId(Object, String)} should be used instead. */ @Deprecated void setUUID(Object object, String uuid) throws IllegalAccessException { Field uuidField = findUUIDField(object); if (uuidField != null) { typeHandler.setObject(uuidField, object, uuid); } } void setId(Object object, String id) throws IllegalAccessException { Field idField = findIdField(object); if (idField != null) { typeHandler.setObject(idField, object, id); } } /** * Check if this node has a child version history reference. If so, then return the referenced node, else return the * node supplied. * * @param node * @return * @throws javax.jcr.RepositoryException */ Node checkIfVersionedChild(Node node) throws RepositoryException { if (node.hasProperty(Property.JCR_CHILD_VERSION_HISTORY)) { //Node versionNode = node.getSession().getNodeByUUID(node.getProperty("jcr:childVersionHistory").getString()); Node versionNode = getNodeById(node, node.getProperty(Property.JCR_CHILD_VERSION_HISTORY).getString()); NodeIterator it = versionNode.getNodes(); while (it.hasNext()) { Node n = it.nextNode(); if ((!n.getName().equals("jcr:rootVersion") && !n.getName().equals(Node.JCR_ROOT_VERSION)) && n.isNodeType(NodeType.NT_VERSION) && n.hasNode(Node.JCR_FROZEN_NODE) && node.getPath().indexOf("/" + n.getName() + "/") != -1) { return n.getNode(Node.JCR_FROZEN_NODE); } } return node; } else { return node; } } Object findParentObjectFromNode(Node node) throws RepositoryException, IllegalAccessException, ClassNotFoundException, InstantiationException, IOException { Object parentObj = null; Node parentNode = node.getParent(); while (parentNode != null) { Class<?> parentClass = findClassFromNode(Object.class, parentNode); if (parentClass != null && !parentClass.equals(Object.class)) { // Gets parent object without children parentObj = fromNode(parentClass, parentNode, new NodeFilter(NodeFilter.INCLUDE_ALL, 0)); break; } try { parentNode = parentNode.getParent(); } catch (Exception ignore) { parentNode = null; } } return parentObj; } Class<?> findClassFromNode(Class<?> defaultClass, Node node) throws RepositoryException, IllegalAccessException, ClassNotFoundException, InstantiationException { if (dynamicInstantiation) { // first we try to locate the class name from node property String classNameProperty = "className"; JcrNode jcrNode = ReflectionUtils.getJcrNodeAnnotation(defaultClass); if (jcrNode != null && !jcrNode.classNameProperty().equals("none")) { classNameProperty = jcrNode.classNameProperty(); } if (node.hasProperty(classNameProperty)) { String className = node.getProperty(classNameProperty).getString(); Class<?> c = getClassForName(className, defaultClass); if (isMapped(c)) { return c; } else { throw new JcrMappingException("Trying to instantiate unmapped class: " + c.getName()); } } else { // use default class return defaultClass; } } else { // use default class return defaultClass; } } Object createInstanceForNode(Class<?> objClass, Node node) throws RepositoryException, IllegalAccessException, ClassNotFoundException, InstantiationException { return findClassFromNode(objClass, node).newInstance(); } /** * Transforms the node supplied to an instance of the entity class that this Mapper was created for. * * @param node * the JCR node from which to create the object * @param nodeFilter * the NodeFilter to be applied * @param action * callback object that specifies the Jcr action * @return an instance of the JCR entity class, mapped from the node * @throws java.lang.Exception */ Object fromNodeWithParent(Class<?> entityClass, Node node, NodeFilter nodeFilter) throws ClassNotFoundException, InstantiationException, RepositoryException, IllegalAccessException, IOException { history.set(new HashMap<HistoryKey, Object>()); Object obj = createInstanceForNode(entityClass, node); Object parentObj = findParentObjectFromNode(node); if (nodeFilter == null) { nodeFilter = new NodeFilter(NodeFilter.INCLUDE_ALL, NodeFilter.DEPTH_INFINITE); } if (JcrFile.class.isAssignableFrom(obj.getClass())) { // special handling of JcrFile objects fileNodeMapper.mapSingleFile((JcrFile) obj, node, parentObj, 0, nodeFilter, this); } mapNodeToClass(obj, node, nodeFilter, parentObj, 0); history.remove(); return obj; } /** * Transforms the node supplied to an instance of the entity class that this Mapper was created for. * * @param node * the JCR node from which to create the object * @param nodeFilter * the NodeFilter to be applied * @return an instance of the JCR entity class, mapped from the node * @throws java.lang.Exception */ Object fromNode(Class<?> entityClass, Node node, NodeFilter nodeFilter) throws ClassNotFoundException, InstantiationException, RepositoryException, IllegalAccessException, IOException { history.set(new HashMap<HistoryKey, Object>()); Object obj = createInstanceForNode(entityClass, node); if (nodeFilter == null) { nodeFilter = new NodeFilter(NodeFilter.INCLUDE_ALL, NodeFilter.DEPTH_INFINITE); } if (JcrFile.class.isAssignableFrom(obj.getClass())) { // special handling of JcrFile objects fileNodeMapper.mapSingleFile((JcrFile) obj, node, null, 0, nodeFilter, this); } mapNodeToClass(obj, node, nodeFilter, null, 0); history.remove(); return obj; } /** * Transforms the entity supplied to a JCR node, and adds that node as a child to the parent node supplied. * * @param parentNode * the parent node to which the entity node will be added * @param entity * the entity to be mapped to the JCR node * @param mixinTypes * an array of mixin type that will be added to the new node * @param action * callback object that specifies the Jcrom actions: * <ul> * <li>{@link JcromCallback#doAddNode(Node, String, JcrNode, Object)},</li> * <li>{@link JcromCallback#doAddMixinTypes(Node, String[], JcrNode, Object)},</li> * <li>{@link JcromCallback#doComplete(Object, Node)},</li> * </ul> * @return the newly created JCR node * @throws java.lang.Exception */ Node addNode(Node parentNode, Object entity, String[] mixinTypes, JcromCallback action) throws IllegalAccessException, RepositoryException, IOException { return addNode(parentNode, entity, mixinTypes, true, action); } Node addNode(Node parentNode, Object entity, String[] mixinTypes, boolean createNode, JcromCallback action) throws IllegalAccessException, RepositoryException, IOException { entity = typeHandler.resolveAddEntity(entity); entity = clearCglib(entity); if (action == null) { action = new DefaultJcromCallback(jcrom); } // create the child node Node node; JcrNode jcrNode = typeHandler.getJcrNodeAnnotation(entity.getClass(), entity.getClass().getGenericSuperclass(), entity); if (createNode) { // add node String nodeName = getCleanName(getNodeName(entity)); node = action.doAddNode(parentNode, nodeName, jcrNode, entity); // add mixin types action.doAddMixinTypes(node, mixinTypes, jcrNode, entity); // update the object id, name and path setId(entity, node.getIdentifier()); setNodeName(entity, node.getName()); setNodePath(entity, node.getPath()); if (node.hasProperty(Property.JCR_UUID)) { // setUUID(entity, node.getUUID()); setUUID(entity, node.getIdentifier()); } } else { node = parentNode; } // add class name to property if (jcrNode != null && !jcrNode.classNameProperty().equals("none")) { action.doAddClassNameToProperty(node, jcrNode, entity); } // special handling of JcrFile objects if (JcrFile.class.isAssignableFrom(entity.getClass())) { fileNodeMapper.addFileNode(node, (JcrFile) entity, this); } for (Field field : ReflectionUtils.getDeclaredAndInheritedFields(entity.getClass(), true)) { field.setAccessible(true); if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrProperty.class)) { propertyMapper.addProperty(field, entity, node, this); } else if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrSerializedProperty.class)) { propertyMapper.addSerializedProperty(field, entity, node); } else if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrChildNode.class)) { childNodeMapper.addChildren(field, entity, node, this); } else if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrReference.class)) { referenceMapper.addReferences(field, entity, node); } else if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrFileNode.class)) { fileNodeMapper.addFiles(field, entity, node, this); } } // complete the addition of the new node action.doComplete(entity, node); return node; } /** * Update an existing JCR node with the entity supplied. * * @param node * the JCR node to be updated * @param entity * the entity that will be mapped to the existing node * @param nodeFilter * the NodeFilter to apply when updating child nodes and references * @param action * callback object that specifies the Jcrom actions * @return the updated node * @throws java.lang.Exception */ Node updateNode(Node node, Object entity, NodeFilter nodeFilter, JcromCallback action) throws RepositoryException, IllegalAccessException, IOException { return updateNode(node, entity, entity.getClass(), nodeFilter, 0, action); } Node updateNode(Node node, Object entity, Class<?> entityClass, NodeFilter nodeFilter, int depth, JcromCallback action) throws RepositoryException, IllegalAccessException, IOException { entity = clearCglib(entity); if (nodeFilter == null) { nodeFilter = new NodeFilter(NodeFilter.INCLUDE_ALL, NodeFilter.DEPTH_INFINITE); } if (action == null) { action = new DefaultJcromCallback(jcrom); } // map the class name to a property JcrNode jcrNode = ReflectionUtils.getJcrNodeAnnotation(entityClass); if (jcrNode != null && !jcrNode.classNameProperty().equals("none")) { // check if the class of the object has changed if (node.hasProperty(jcrNode.classNameProperty())) { String oldClassName = node.getProperty(jcrNode.classNameProperty()).getString(); if (!oldClassName.equals(entity.getClass().getCanonicalName())) { // different class, so we should remove the properties of the old class Class<?> oldClass = getClassForName(oldClassName); if (oldClass != null) { Class<?> newClass = entity.getClass(); Set<Field> oldFields = new HashSet<Field>(); oldFields.addAll(Arrays.asList(ReflectionUtils.getDeclaredAndInheritedFields(oldClass, true))); oldFields.removeAll(Arrays.asList(ReflectionUtils.getDeclaredAndInheritedFields(newClass, true))); // remove the old fields for (Field field : oldFields) { if (node.hasProperty(field.getName())) { node.getProperty(field.getName()).remove(); } } } } } action.doUpdateClassNameToProperty(node, jcrNode, entity); } // special handling of JcrFile objects if (JcrFile.class.isAssignableFrom(entity.getClass()) && depth == 0) { fileNodeMapper.addFileNode(node, (JcrFile) entity, this); } for (Field field : ReflectionUtils.getDeclaredAndInheritedFields(entityClass, true)) { field.setAccessible(true); if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrProperty.class) && nodeFilter.isDepthPropertyIncluded(depth)) { propertyMapper.updateProperty(field, entity, node, depth, nodeFilter, this); } else if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrSerializedProperty.class) && nodeFilter.isDepthPropertyIncluded(depth)) { propertyMapper.updateSerializedProperty(field, entity, node, depth, nodeFilter); } else if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrChildNode.class) && nodeFilter.isDepthIncluded(depth)) { // child nodes childNodeMapper.updateChildren(field, entity, node, depth, nodeFilter, this); } else if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrReference.class)) { // references referenceMapper.updateReferences(field, entity, node, nodeFilter); } else if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrFileNode.class) && nodeFilter.isDepthIncluded(depth)) { // file nodes fileNodeMapper.updateFiles(field, entity, node, this, depth, nodeFilter); } } // if name is different, then we move the node if (!node.getName().equals(getCleanName(getNodeName(entity)))) { boolean isVersionable = JcrUtils.hasMixinType(node, "mix:versionable") || JcrUtils.hasMixinType(node, NodeType.MIX_VERSIONABLE); Node parentNode = node.getParent(); if (isVersionable) { if (JcrUtils.hasMixinType(parentNode, "mix:versionable") || JcrUtils.hasMixinType(parentNode, NodeType.MIX_VERSIONABLE)) { JcrUtils.checkout(parentNode); } } // move node String nodeName = getCleanName(getNodeName(entity)); action.doMoveNode(parentNode, node, nodeName, jcrNode, entity); if (isVersionable) { if ((JcrUtils.hasMixinType(parentNode, "mix:versionable") || JcrUtils.hasMixinType(parentNode, NodeType.MIX_VERSIONABLE)) && parentNode.isCheckedOut()) { // Save session changes before checking-in the parent node node.getSession().save(); JcrUtils.checkin(parentNode); } } // update the object name and path setNodeName(entity, node.getName()); setNodePath(entity, node.getPath()); } // complete the update of the node action.doComplete(entity, node); return node; } private boolean isVersionable(Node node) throws RepositoryException { for (NodeType mixinType : node.getMixinNodeTypes()) { if (mixinType.getName().equals("mix:versionable") || mixinType.getName().equals(NodeType.MIX_VERSIONABLE)) { return true; } } return false; } Object mapNodeToClass(Object obj, Node node, NodeFilter nodeFilter, Object parentObject, int depth) throws ClassNotFoundException, InstantiationException, RepositoryException, IllegalAccessException, IOException { if (!JcrFile.class.isAssignableFrom(obj.getClass())) { // this does not apply for JcrFile extensions setNodeName(obj, node.getName()); } // construct history key HistoryKey key = new HistoryKey(); key.path = node.getPath(); if (nodeFilter.getMaxDepth() == NodeFilter.DEPTH_INFINITE) { // then use infinite depth as key depth key.depth = NodeFilter.DEPTH_INFINITE; } else { // calculate key depth from max depth - current depth key.depth = nodeFilter.getMaxDepth() - depth; } // now check the history key if (history.get() == null) { history.set(new HashMap<HistoryKey, Object>()); } if (history.get().containsKey(key)) { return history.get().get(key); } else { history.get().put(key, obj); } for (Field field : ReflectionUtils.getDeclaredAndInheritedFields(obj.getClass(), false)) { field.setAccessible(true); if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrProperty.class) && nodeFilter.isDepthPropertyIncluded(depth)) { propertyMapper.mapPropertyToField(obj, field, node, depth, nodeFilter); } else if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrSerializedProperty.class) && nodeFilter.isDepthPropertyIncluded(depth)) { propertyMapper.mapSerializedPropertyToField(obj, field, node, depth, nodeFilter); } else if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrProtectedProperty.class)) { propertyMapper.mapProtectedPropertyToField(obj, field, node); } else if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrUUID.class)) { if (node.hasProperty(Property.JCR_UUID)) { // field.set(obj, node.getUUID()); typeHandler.setObject(field, obj, node.getIdentifier()); } } else if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrIdentifier.class)) { typeHandler.setObject(field, obj, node.getIdentifier()); } else if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrBaseVersionName.class)) { if (isVersionable(node)) { // Version baseVersion = node.getBaseVersion(); Version baseVersion = getVersionManager(node).getBaseVersion(node.getPath()); typeHandler.setObject(field, obj, baseVersion.getName()); } } else if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrBaseVersionCreated.class)) { if (isVersionable(node)) { // Version baseVersion = node.getBaseVersion(); Version baseVersion = getVersionManager(node).getBaseVersion(node.getPath()); typeHandler.setObject(field, obj, typeHandler.getValue(field.getType(), null, typeHandler.createValue(Calendar.class, baseVersion.getCreated(), node.getSession().getValueFactory()), null)); } } else if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrVersionName.class)) { if (node.getParent() != null && node.getParent().isNodeType(NodeType.NT_VERSION)) { typeHandler.setObject(field, obj, node.getParent().getName()); } else if (isVersionable(node)) { // if we're not browsing version history, then this must be the base version //Version baseVersion = node.getBaseVersion(); Version baseVersion = getVersionManager(node).getBaseVersion(node.getPath()); typeHandler.setObject(field, obj, baseVersion.getName()); } } else if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrVersionCreated.class)) { if (node.getParent() != null && node.getParent().isNodeType(NodeType.NT_VERSION)) { Version version = (Version) node.getParent(); typeHandler.setObject(field, obj, typeHandler.getValue(field.getType(), null, typeHandler.createValue(Calendar.class, version.getCreated(), node.getSession().getValueFactory()), null)); } else if (isVersionable(node)) { // if we're not browsing version history, then this must be the base version //Version baseVersion = node.getBaseVersion(); Version baseVersion = getVersionManager(node).getBaseVersion(node.getPath()); typeHandler.setObject(field, obj, typeHandler.getValue(field.getType(), null, typeHandler.createValue(Calendar.class, baseVersion.getCreated(), node.getSession().getValueFactory()), null)); } } else if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrCheckedout.class)) { typeHandler.setObject(field, obj, node.isCheckedOut()); } else if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrCreated.class)) { if (node.hasProperty(Property.JCR_CREATED)) { typeHandler.setObject(field, obj, typeHandler.getValue(field.getType(), null, node.getProperty(Property.JCR_CREATED).getValue(), null)); } } else if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrParentNode.class)) { if (parentObject != null && typeHandler.getType(field.getType(), field.getGenericType(), obj).isInstance(parentObject)) { typeHandler.setObject(field, obj, parentObject); } } else if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrChildNode.class) && nodeFilter.isDepthIncluded(depth)) { childNodeMapper.getChildrenFromNode(field, node, obj, depth, nodeFilter, this); } else if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrReference.class)) { referenceMapper.getReferencesFromNode(field, node, obj, depth, nodeFilter, this); } else if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrFileNode.class) && nodeFilter.isDepthIncluded(depth)) { fileNodeMapper.getFilesFromNode(field, node, obj, depth, nodeFilter, this); } else if (jcrom.getAnnotationReader().isAnnotationPresent(field, JcrPath.class)) { typeHandler.setObject(field, obj, node.getPath()); } } return obj; } static VersionManager getVersionManager(Node node) throws RepositoryException { VersionManager versionMgr = node.getSession().getWorkspace().getVersionManager(); return versionMgr; } static Node getNodeById(Node node, String id) throws RepositoryException { // return node.getSession().getNodeByUUID(uuid); return node.getSession().getNodeByIdentifier(id); } /** * This is a temporary solution to enable lazy loading of single child nodes and single references. The problem is * that Jcrom uses direct field modification, but CGLIB fails to cascade field changes between the enhanced class * and the lazy object. * * @param obj * @return * @throws java.lang.IllegalAccessException */ Object clearCglib(Object obj) throws IllegalAccessException { for (Field field : ReflectionUtils.getDeclaredAndInheritedFields(obj.getClass(), true)) { field.setAccessible(true); if (field.getName().equals("CGLIB$LAZY_LOADER_0")) { Object object = typeHandler.getObject(field, obj); if (object != null) { return object; } else { // lazy loading has not been triggered yet, so // we do it manually return triggerLazyLoading(obj); } } } return obj; } Object triggerLazyLoading(Object obj) throws IllegalAccessException { for (Field field : ReflectionUtils.getDeclaredAndInheritedFields(obj.getClass(), false)) { field.setAccessible(true); if (field.getName().equals("CGLIB$CALLBACK_0")) { try { return ((LazyLoader) typeHandler.getObject(field, obj)).loadObject(); } catch (Exception e) { throw new JcrMappingException("Could not trigger lazy loading", e); } } } return obj; } PropertyMapper getPropertyMapper() { return propertyMapper; } ReferenceMapper getReferenceMapper() { return referenceMapper; } FileNodeMapper getFileNodeMapper() { return fileNodeMapper; } ChildNodeMapper getChildNodeMapper() { return childNodeMapper; } Jcrom getJcrom() { return jcrom; } /** * Class for the history key. Contains the node path and the depth. * Thanks to Leander for supplying this fix. */ private static class HistoryKey { private String path; private int depth; @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + depth; result = prime * result + ((path == null) ? 0 : path.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } HistoryKey other = (HistoryKey) obj; if (depth != other.depth) { return false; } if (path == null) { if (other.path != null) { return false; } } else if (!path.equals(other.path)) { return false; } return true; } } }
package mods.ocminecart.common.minecart; import cpw.mods.fml.common.FMLCommonHandler; import cpw.mods.fml.common.Loader; import cpw.mods.fml.common.Optional; import mods.ocminecart.Settings; import mods.ocminecart.common.util.BitUtil; import mods.railcraft.api.carts.IEnergyTransfer; import mods.railcraft.api.electricity.IElectricMinecart; import mods.railcraft.client.emblems.EmblemToolsClient; import mods.railcraft.common.emblems.EmblemToolsServer; import net.minecraft.block.Block; import net.minecraft.block.BlockRailBase; import net.minecraft.entity.item.EntityMinecart; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.util.AxisAlignedBB; import net.minecraft.util.DamageSource; import net.minecraft.util.MathHelper; import net.minecraft.util.ResourceLocation; import net.minecraft.world.World; //Is the Base for a solid, self powered cart with a brake. //Later I will add the Railcraft integration here @Optional.InterfaceList({ @Optional.Interface(iface = "mods.railcraft.api.carts.IEnergyTransfer", modid = "Railcraft") , @Optional.Interface(iface = "mods.railcraft.api.electricity.IElectricMinecart", modid = "Railcraft") }) public abstract class AdvCart extends EntityMinecart implements IEnergyTransfer, IElectricMinecart { private ChargeHandler charge; public AdvCart(World p_i1713_1_, double p_i1713_2_, double p_i1713_4_, double p_i1713_6_) { super(p_i1713_1_, p_i1713_2_, p_i1713_4_, p_i1713_6_); this.setDisplayTileData(0); } public AdvCart(World p_i1713_1) { super(p_i1713_1); this.setDisplayTileData(0); } protected void entityInit() { super.entityInit(); if(Loader.isModLoaded("Railcraft") && FMLCommonHandler.instance().getEffectiveSide().isServer()) charge = new ChargeHandler(this, ChargeHandler.Type.USER, Settings.ComputerCartETrackBuf, Settings.ComputerCartETrackLoss); this.dataWatcher.addObject(3, (byte)0); // Booleans (is Locked, Brake enabled) this.dataWatcher.addObject(4, 0.0F); //Engine speed this.dataWatcher.addObject(5, ""); //Emblem id [Railcraft] // Free DataWatcher 6-16, 23-32 } protected final void setBrake(boolean b){ this.dataWatcher.updateObject(3, BitUtil.setBit(b, this.dataWatcher.getWatchableObjectByte(3), 0)); } protected final boolean getBrake(){ return BitUtil.getBit(this.dataWatcher.getWatchableObjectByte(3), 0); } protected final void setEngine(double d){ this.dataWatcher.updateObject(4, (float)d); } protected final double getEngine(){ return this.dataWatcher.getWatchableObjectFloat(4); } public final boolean isLocked(){ return BitUtil.getBit(this.dataWatcher.getWatchableObjectByte(3), 1); } public final boolean isEngineActive(){ return this.getEngine()!=0 && !this.isLocked() && !this.getBrake() && this.onRail(); } public void writeEntityToNBT(NBTTagCompound nbt){ super.writeEntityToNBT(nbt); NBTTagCompound tag = new NBTTagCompound(); tag.setDouble("enginespeed", this.dataWatcher.getWatchableObjectFloat(4)); tag.setBoolean("brake", BitUtil.getBit(this.dataWatcher.getWatchableObjectByte(3), 0)); if(Loader.isModLoaded("Railcraft")){ NBTTagCompound rctag = new NBTTagCompound(); rctag.setBoolean("locked", BitUtil.getBit(this.dataWatcher.getWatchableObjectByte(3), 1)); if(this.charge!=null) this.charge.writeToNBT(rctag); String emblem = this.dataWatcher.getWatchableObjectString(5); if(emblem!=null && emblem!="") rctag.setString("emblem_id", emblem); else rctag.removeTag("emblem_id"); tag.setTag("railcraft", rctag); } nbt.setTag("advcart", tag); } public void readEntityFromNBT(NBTTagCompound nbt){ super.readEntityFromNBT(nbt); if(nbt.hasKey("advcart")){ NBTTagCompound tag = (NBTTagCompound) nbt.getTag("advcart"); if(tag.hasKey("enginespeed")) this.dataWatcher.updateObject(4, (float)tag.getDouble("enginespeed")); if(tag.hasKey("brake")) this.dataWatcher.updateObject(3, BitUtil.setBit(tag.getBoolean("brake"), this.dataWatcher.getWatchableObjectByte(3), 0)); if(tag.hasKey("railcraft") && Loader.isModLoaded("Railcraft")){ NBTTagCompound rctag = tag.getCompoundTag("railcraft"); this.dataWatcher.updateObject(3, BitUtil.setBit(rctag.getBoolean("locked"), this.dataWatcher.getWatchableObjectByte(3), 1)); if(this.charge!=null) this.charge.readFromNBT(rctag); if(rctag.hasKey("emblem_id")){ String id= rctag.getString("emblem_id"); this.dataWatcher.updateObject(5, (id==null)?"":id); } } } } @Override public void killMinecart(DamageSource p_94095_1_) { this.setDead(); ItemStack itemstack = this.getCartItem(); if (this.func_95999_t() != null) { itemstack.setStackDisplayName(this.func_95999_t()); } this.entityDropItem(itemstack, 0.0F); } @Override public int getMinecartType() { return -1; } public boolean onRail() { int x = MathHelper.floor_double(this.posX); int y = MathHelper.floor_double(this.posY); int z = MathHelper.floor_double(this.posZ); return BlockRailBase.func_150049_b_(this.worldObj, x, y, z); } public void onUpdate() { super.onUpdate(); if (this.worldObj.isRemote) return; if(charge!=null && Loader.isModLoaded("Railcraft")){ this.charge.tick(); double mv = this.addEnergy(this.charge.getCharge() * Settings.OC_IC2PWR, true); //Get max. energy we can load to the node mv = Math.min(mv, Settings.ComputerCartETrackLoad * Settings.OC_IC2PWR); //Check if the movable energy is higher than the limit. mv = this.charge.removeCharge(mv / Settings.OC_IC2PWR) * Settings.OC_IC2PWR; //Remove the charge from the buffer this.addEnergy(mv , false); //Add the removed energy to the node network } } @Override protected void func_145821_a(int trackX, int trackY, int trackZ, double maxSpeed, double slopeAdjustement, Block trackBlock, int trackMeta) { super.func_145821_a(trackX, trackY, trackZ, maxSpeed, slopeAdjustement, trackBlock, trackMeta); if (this.worldObj.isRemote) return; if(charge!=null && Loader.isModLoaded("Railcraft")){ this.charge.tickOnTrack(trackX, trackY, trackZ); } } protected void applyDrag() { if(!(BitUtil.getBit(this.dataWatcher.getWatchableObjectByte(3), 0) || BitUtil.getBit(this.dataWatcher.getWatchableObjectByte(3), 1))){ this.motionX *= 0.9699999785423279D; this.motionY *= 0.0D; this.motionZ *= 0.9699999785423279D; if(this.dataWatcher.getWatchableObjectFloat(4)!=0){ double yaw = this.rotationYaw * Math.PI / 180.0; this.motionX += Math.cos(yaw) * 10; this.motionZ += Math.sin(yaw) * 10; double nMotionX = Math.min( Math.abs(this.motionX) , this.dataWatcher.getWatchableObjectFloat(4)); double nMotionZ = Math.min( Math.abs(this.motionZ) , this.dataWatcher.getWatchableObjectFloat(4)); if(this.motionX < 0) this.motionX = - nMotionX; else this.motionX = nMotionX; if(this.motionZ < 0) this.motionZ = - nMotionZ; else this.motionZ = nMotionZ; } //Stop the cart if there is no speed. (below 0.0001 there are only sounds and no movement) if(Math.sqrt(this.motionX * this.motionX + this.motionZ * this.motionZ) < 0.0001){ this.motionX = 0; this.motionZ = 0; } } else if(!BitUtil.getBit(this.dataWatcher.getWatchableObjectByte(3), 1)){ this.motionX = 0; this.motionZ = 0; this.setPosition(this.lastTickPosX, this.posY, this.lastTickPosZ); // Fix: Bug on Booster Tracks (Reset Position) } } public double getSpeed(){ return Math.sqrt(this.motionX * this.motionX + this.motionZ * this.motionZ); } public AxisAlignedBB getBoundingBox() { if(Loader.isModLoaded("Railcraft") && Settings.GeneralFixCartBox) //The Railcraft collision handler breaks some things return super.getBoundingBox(); return this.getCollisionBox(this); } public boolean canBePushed() { return (!BitUtil.getBit(this.dataWatcher.getWatchableObjectByte(3),0) || !onRail()); } protected abstract double addEnergy(double amount, boolean simulate); /*-------Railcraft-------*/ public void lockdown(boolean lock){ if(lock != BitUtil.getBit(this.dataWatcher.getWatchableObjectByte(3), 1)) this.dataWatcher.updateObject(3, BitUtil.setBit(lock, this.dataWatcher.getWatchableObjectByte(3), 1)); } @Override public boolean canExtractEnergy() { return false; } @Override public boolean canInjectEnergy() { return false; } @Override public double extractEnergy(Object arg0, double arg1, int arg2, boolean arg3, boolean arg4, boolean arg5) { return 0; } @Override public int getCapacity() { return (int) this.charge.getCapacity(); } @Override public double getEnergy() { return charge.getCharge(); } @Override public int getTier() { return 1; } @Override public int getTransferLimit() { return (int)(Settings.ComputerCartETrackLoad * 1.1); } @Override public double injectEnergy(Object arg0, double arg1, int arg2, boolean arg3, boolean arg4, boolean arg5) { return 0; } @Override public ChargeHandler getChargeHandler() { return this.charge; } public boolean setEmblem(ItemStack stack){ if(!Loader.isModLoaded("Railcraft")) return false; return setEmblem(EmblemToolsServer.getEmblemIdentifier(stack)); } public boolean setEmblem(String emblem){ if(!Loader.isModLoaded("Railcraft")) return false; if(emblem==this.dataWatcher.getWatchableObjectString(5)) return false; if(emblem==null) emblem=""; this.dataWatcher.updateObject(5, emblem); return true; } public String getEmblem(){ if(!Loader.isModLoaded("Railcraft")) return null; return this.dataWatcher.getWatchableObjectString(5); } @Optional.Method(modid="Railcraft") public ResourceLocation getEmblemIcon(){ String id = this.dataWatcher.getWatchableObjectString(5); if(id==null || id.length()<1) return null; return EmblemToolsClient.packageManager.getEmblemTextureLocation(id); } }